mirror of
https://github.com/immich-app/immich.git
synced 2025-12-08 09:13:07 +03:00
Compare commits
261 Commits
v1.106.3
...
feat/ml-ar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72269ab58c | ||
|
|
3db69b94ed | ||
|
|
b5acb71b05 | ||
|
|
b39cca1b43 | ||
|
|
3d62011ae3 | ||
|
|
1ad348c407 | ||
|
|
5dae920ac6 | ||
|
|
956480ab2c | ||
|
|
5748f50c1f | ||
|
|
1b3a7feb67 | ||
|
|
d68bd876c1 | ||
|
|
c50ac55892 | ||
|
|
b2dd4e1c2b | ||
|
|
ff2ba240c9 | ||
|
|
96084355f0 | ||
|
|
25a380d023 | ||
|
|
3cb42de931 | ||
|
|
8dd1d95913 | ||
|
|
0ee2390c7f | ||
|
|
52db9558b3 | ||
|
|
0fbfbc86d2 | ||
|
|
c7432834d0 | ||
|
|
a971fae81f | ||
|
|
a58a2eec53 | ||
|
|
f43721ec92 | ||
|
|
59aa347912 | ||
|
|
1dd1d36120 | ||
|
|
545b206076 | ||
|
|
cf77487c00 | ||
|
|
9d8b755c07 | ||
|
|
bd88b079ea | ||
|
|
27b13b82f5 | ||
|
|
79c8412660 | ||
|
|
a078dde241 | ||
|
|
7e4e96c440 | ||
|
|
94f129d632 | ||
|
|
678111ed3b | ||
|
|
c1036d6f88 | ||
|
|
e8af0e859e | ||
|
|
a0f6d7444a | ||
|
|
eb89208abb | ||
|
|
af94f0f979 | ||
|
|
025a54c462 | ||
|
|
334a709cc6 | ||
|
|
5f25e2ce82 | ||
|
|
04d0f575b7 | ||
|
|
e9683b326a | ||
|
|
cb40db9555 | ||
|
|
39221c8d1f | ||
|
|
a5467d60ea | ||
|
|
d582ec02b1 | ||
|
|
59cdbdc492 | ||
|
|
01706ccf5c | ||
|
|
6c49a4ba34 | ||
|
|
e1f25b44d2 | ||
|
|
f6cafa3290 | ||
|
|
53d4a5268b | ||
|
|
cf88f4b6f8 | ||
|
|
ac8d8d91f7 | ||
|
|
842291124c | ||
|
|
6f5b3c47b0 | ||
|
|
b25642b889 | ||
|
|
7bde19d842 | ||
|
|
eb1ba11d60 | ||
|
|
23b3073687 | ||
|
|
3cd187dced | ||
|
|
6791af8c2c | ||
|
|
e566fbb009 | ||
|
|
e5c92912fc | ||
|
|
f33d5b0a38 | ||
|
|
df10618a7e | ||
|
|
6030349a6f | ||
|
|
6629bf50ae | ||
|
|
e32ce82179 | ||
|
|
10ea894186 | ||
|
|
81d12c0586 | ||
|
|
0b88bef157 | ||
|
|
2b8942026c | ||
|
|
f5937a5a9b | ||
|
|
04f0ac1aad | ||
|
|
4a481acca6 | ||
|
|
de62bd3ba5 | ||
|
|
ab2ea28ed9 | ||
|
|
96f29cefeb | ||
|
|
6f950ea45d | ||
|
|
99c45bd4d2 | ||
|
|
312030f275 | ||
|
|
bed9ccadbc | ||
|
|
d55499eba0 | ||
|
|
910b75c6cc | ||
|
|
6a11464d60 | ||
|
|
aa29f5d69c | ||
|
|
1ee10ee2d6 | ||
|
|
f23401d911 | ||
|
|
14d94df1b8 | ||
|
|
b47ec2f88f | ||
|
|
b5c8ca075c | ||
|
|
7bfa642fa3 | ||
|
|
9a83038728 | ||
|
|
a1629f0793 | ||
|
|
d4cba57102 | ||
|
|
2934676594 | ||
|
|
ebea793534 | ||
|
|
eeae77422f | ||
|
|
850424e960 | ||
|
|
58298bd038 | ||
|
|
e46af5c26b | ||
|
|
3b37b70626 | ||
|
|
4193b0dede | ||
|
|
ac51cad075 | ||
|
|
b54dd4e135 | ||
|
|
f5164b42e0 | ||
|
|
783088afbe | ||
|
|
744dfb675b | ||
|
|
1d282851e2 | ||
|
|
d00d33d8a5 | ||
|
|
560dbd3c65 | ||
|
|
c58148af35 | ||
|
|
e54c18367b | ||
|
|
8b6d27f1bc | ||
|
|
887acb9d9f | ||
|
|
8f553ddb39 | ||
|
|
24c1855899 | ||
|
|
6ebae3c84f | ||
|
|
e0bb9add91 | ||
|
|
821570f2fb | ||
|
|
a2364a12cf | ||
|
|
e361640e39 | ||
|
|
37b5d92110 | ||
|
|
325aa1d392 | ||
|
|
72bf9439b0 | ||
|
|
7e99394c70 | ||
|
|
8ff9c37d79 | ||
|
|
0b4153e256 | ||
|
|
12b9f3ad91 | ||
|
|
9fc9465cec | ||
|
|
d8175d8da8 | ||
|
|
922430da36 | ||
|
|
a3c3619811 | ||
|
|
7f5a3e5adb | ||
|
|
63041674c2 | ||
|
|
8a445cac07 | ||
|
|
15c1cd6449 | ||
|
|
8198259de8 | ||
|
|
6decf33226 | ||
|
|
df0064c83b | ||
|
|
c754f2504b | ||
|
|
0891658668 | ||
|
|
5b909eeaf0 | ||
|
|
0484a4e252 | ||
|
|
bf83fdee49 | ||
|
|
9eafbb0524 | ||
|
|
6356c28f64 | ||
|
|
6538ad8de7 | ||
|
|
9f9e42a96a | ||
|
|
905d6c1508 | ||
|
|
91af793b52 | ||
|
|
5912fcc393 | ||
|
|
b5b0c6fe8b | ||
|
|
330648ff44 | ||
|
|
54d1dc56a2 | ||
|
|
d8e6b17ef9 | ||
|
|
d7a33c8ec2 | ||
|
|
0012369c67 | ||
|
|
cb3ac4ff9f | ||
|
|
4988df3fcb | ||
|
|
fc6c9a19d9 | ||
|
|
13cc1f0aa6 | ||
|
|
ba72802888 | ||
|
|
04f0e29df6 | ||
|
|
c83de5213f | ||
|
|
dd2c7400a6 | ||
|
|
df9e074304 | ||
|
|
5f47cf604a | ||
|
|
8e2f6f1f41 | ||
|
|
32da9d90e4 | ||
|
|
6164640575 | ||
|
|
4cb165304b | ||
|
|
1200265425 | ||
|
|
0a3aafd439 | ||
|
|
aaf7c0b6db | ||
|
|
b3252ffdac | ||
|
|
1129020159 | ||
|
|
61a5d67674 | ||
|
|
42f3b50422 | ||
|
|
5e9a7b17d9 | ||
|
|
0fda67543d | ||
|
|
5cde52eec9 | ||
|
|
eff839251c | ||
|
|
a42af06889 | ||
|
|
79a8ab71ef | ||
|
|
1191978d50 | ||
|
|
7ea0278b32 | ||
|
|
4ef033aa55 | ||
|
|
660afa9fad | ||
|
|
104048ecd5 | ||
|
|
bec77f926e | ||
|
|
ba57a1144d | ||
|
|
b3f9641edf | ||
|
|
86cbc6e125 | ||
|
|
968553a50e | ||
|
|
5813dc02d1 | ||
|
|
58b17a866b | ||
|
|
c58b0ac66a | ||
|
|
517a83cfa9 | ||
|
|
7daa761eed | ||
|
|
e58131492d | ||
|
|
b21572cb32 | ||
|
|
8332efcd04 | ||
|
|
b71aa4473b | ||
|
|
99c6fdbc1c | ||
|
|
c1a5ed3526 | ||
|
|
9000ce4283 | ||
|
|
e8994d9ffd | ||
|
|
1b67ea2d91 | ||
|
|
38e26fd67c | ||
|
|
29e4666dfa | ||
|
|
7ce87abc95 | ||
|
|
eb987c14c1 | ||
|
|
a6e767e46d | ||
|
|
8e373cee8d | ||
|
|
6b1b5054f8 | ||
|
|
0fe152b1ef | ||
|
|
e77e87b936 | ||
|
|
0b08af7082 | ||
|
|
010eb1e0d6 | ||
|
|
83a851b556 | ||
|
|
1cd51cc2de | ||
|
|
f3c15c7df8 | ||
|
|
6a5435764e | ||
|
|
dfad4f0ff4 | ||
|
|
aea1c46bea | ||
|
|
78f600ebce | ||
|
|
c896fe393f | ||
|
|
b4b654b53f | ||
|
|
dddc06c3b2 | ||
|
|
596412cb8f | ||
|
|
e3a314b649 | ||
|
|
2bdb4bca9e | ||
|
|
211451d234 | ||
|
|
e1731fe316 | ||
|
|
ee186a40c2 | ||
|
|
32a0688028 | ||
|
|
e5ed7d4af1 | ||
|
|
30627fe91e | ||
|
|
77bd162872 | ||
|
|
c6ab047167 | ||
|
|
8c2195c820 | ||
|
|
5e99f651ec | ||
|
|
0de15121f2 | ||
|
|
212ba35aef | ||
|
|
827ec1b63a | ||
|
|
e2a2c86a31 | ||
|
|
df31eb1214 | ||
|
|
0d6a4975a3 | ||
|
|
7de2665344 | ||
|
|
058ca28d88 | ||
|
|
b9593361a4 | ||
|
|
a54e01ef2f | ||
|
|
fb641c74be | ||
|
|
c642150b85 |
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: I have a question or need support
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
url: https://discord.immich.app
|
||||
about: We use GitHub for tracking bugs, please check out our Discord channel for freaky fast support.
|
||||
- name: Feature Request
|
||||
url: https://github.com/immich-app/immich/discussions/new?category=feature-request
|
||||
about: Please use our GitHub Discussion for making feature requests.
|
||||
- name: I'm unsure where to go
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
url: https://discord.immich.app
|
||||
about: If you are unsure where to go, then joining our Discord is recommended; Just ask!
|
||||
|
||||
36
.github/labeler.yml
vendored
36
.github/labeler.yml
vendored
@@ -1,23 +1,35 @@
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: cli/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- cli/src/**
|
||||
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: docs/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- docs/blob/**
|
||||
- docs/docs/**
|
||||
- docs/src/**
|
||||
- docs/static/**
|
||||
|
||||
🖥️web:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: web/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- web/src/**
|
||||
- web/static/**
|
||||
|
||||
📱mobile:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: mobile/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- mobile/lib/**
|
||||
- mobile/test/**
|
||||
|
||||
🗄️server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: server/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- server/src/**
|
||||
- server/test/**
|
||||
|
||||
🧠machine-learning:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: machine-learning/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- machine-learning/app/**
|
||||
|
||||
8
.github/workflows/cli.yml
vendored
8
.github/workflows/cli.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
@@ -56,10 +56,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v5.4.0
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
@@ -63,10 +63,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v5.4.0
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
@@ -124,7 +124,11 @@ jobs:
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
5
.github/workflows/docs-build.yml
vendored
5
.github/workflows/docs-build.yml
vendored
@@ -26,6 +26,11 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
56
.github/workflows/test.yml
vendored
56
.github/workflows/test.yml
vendored
@@ -21,6 +21,11 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
@@ -54,7 +59,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -79,6 +84,38 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
name: CLI (Windows)
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Web
|
||||
runs-on: ubuntu-latest
|
||||
@@ -90,6 +127,11 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
@@ -133,7 +175,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -241,6 +283,11 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
|
||||
@@ -291,6 +338,11 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
|
||||
|
||||
@@ -131,4 +131,4 @@ conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
FAQ at https://www.contributor-covenant.org/faq. Translations are
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
|
||||
48
Makefile
48
Makefile
@@ -35,3 +35,51 @@ sql:
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
|
||||
renovate:
|
||||
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
||||
|
||||
MODULES = e2e server web cli sdk
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'build' >/dev/null \
|
||||
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build || true
|
||||
format-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'format:fix' >/dev/null \
|
||||
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run format:fix || true
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
|
||||
build-all: $(foreach M,$(MODULES),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
check-all: $(foreach M,$(MODULES),check-$M) ;
|
||||
lint-all: $(foreach M,$(MODULES),lint-$M) ;
|
||||
format-all: $(foreach M,$(MODULES),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(MODULES),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
92
README.md
92
README.md
@@ -1,7 +1,7 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<a href="https://discord.immich.app">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
@@ -19,20 +19,21 @@
|
||||
<br/>
|
||||
<p align="center">
|
||||
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
@@ -42,45 +43,36 @@
|
||||
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
|
||||
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
|
||||
|
||||
## Content
|
||||
> [!NOTE]
|
||||
> You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
|
||||
- [Official Documentation](https://immich.app/docs)
|
||||
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
|
||||
## Links
|
||||
|
||||
- [Documentation](https://immich.app/docs)
|
||||
- [About](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Roadmap](https://immich.app/roadmap)
|
||||
- [Demo](#demo)
|
||||
- [Features](#features)
|
||||
- [Introduction](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
- [Translations](https://immich.app/docs/developer/translations)
|
||||
- [Contributing](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Demo
|
||||
|
||||
You can access the web demo at https://demo.immich.app
|
||||
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
|
||||
|
||||
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
|
||||
|
||||
```bash title="Demo Credential"
|
||||
The credential
|
||||
email: demo@immich.app
|
||||
password: demo
|
||||
```
|
||||
### Login credentials
|
||||
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Activities
|
||||
|
||||

|
||||
| Email | Password |
|
||||
| --------------- | -------- |
|
||||
| demo@immich.app | demo |
|
||||
|
||||
## Features
|
||||
|
||||
|
||||
| Features | Mobile | Web |
|
||||
| :--------------------------------------------- | -------- | ----- |
|
||||
| :------------------------------------------- | ------ | --- |
|
||||
| Upload and view videos and photos | Yes | Yes |
|
||||
| Auto backup when the app is opened | Yes | N/A |
|
||||
| Prevent duplication of assets | Yes | Yes |
|
||||
@@ -110,13 +102,19 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
| Read-only gallery | Yes | Yes |
|
||||
| Stacked Photos | Yes | Yes |
|
||||
|
||||
## Contributors
|
||||
## Translations
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
Read more about translations [here](https://immich.app/docs/developer/translations).
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
</a>
|
||||
|
||||
## Star History
|
||||
## Repository activity
|
||||
|
||||

|
||||
|
||||
## Star history
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
@@ -125,3 +123,9 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please report security issues to `alex.tran1502@gmail.com`
|
||||
Please report security issues to `security@immich.app`
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.14
|
||||
20.15.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine3.19@sha256:696ae41fb5880949a15ade7879a2deae93b3f0723f757bdb5b8a9e4a744ce27f as core
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
@@ -16,4 +16,4 @@ RUN npm run build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
|
||||
492
cli/package-lock.json
generated
492
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.8",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -18,7 +18,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.3.1",
|
||||
"@types/node": "^20.14.10",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -28,14 +28,15 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^53.0.0",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.2.2",
|
||||
"vitest-fetch-mock": "^0.2.2",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -59,9 +60,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.14.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
201
cli/src/commands/asset.spec.ts
Normal file
201
cli/src/commands/asset.spec.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
describe('getAlbumName', () => {
|
||||
it('should return a non-undefined value', () => {
|
||||
if (os.platform() === 'win32') {
|
||||
// This is meaningless for Unix systems.
|
||||
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
|
||||
}
|
||||
expect(getAlbumName('D:/parentfolder/test/Filename.txt', {} as UploadOptionsDto)).toBe('test');
|
||||
});
|
||||
|
||||
it('has higher priority to return `albumName` in `options`', () => {
|
||||
expect(getAlbumName('/parentfolder/test/Filename.txt', { albumName: 'example' } as UploadOptionsDto)).toBe(
|
||||
'example',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadFiles', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const baseUrl = 'http://example.com';
|
||||
const apiKey = 'key';
|
||||
const retry = 3;
|
||||
|
||||
const fetchMocker = createFetchMock(vi);
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
|
||||
// Defaults
|
||||
vi.mocked(defaults).baseUrl = baseUrl;
|
||||
vi.mocked(defaults).headers = { 'x-api-key': apiKey };
|
||||
|
||||
fetchMocker.enableMocks();
|
||||
fetchMocker.resetMocks();
|
||||
});
|
||||
|
||||
it('returns new assets when upload file is successful', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is successful', async () => {
|
||||
let counter = 0;
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
counter++;
|
||||
if (counter < retry) {
|
||||
throw new Error('Network error');
|
||||
}
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is failed', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
throw new Error('Network error');
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForDuplicates', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const testFileChecksum = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'; // SHA1
|
||||
const retry = 3;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
});
|
||||
|
||||
it('checks duplicates', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await checkForDuplicates([testFilePath], { concurrency: 1 });
|
||||
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
{
|
||||
checksum: testFileChecksum,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns duplicates when check duplicates is rejected', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Reject,
|
||||
id: testFilePath,
|
||||
assetId: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
reason: Reason.Duplicate,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns new assets when check duplicates is accepted', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is successful', async () => {
|
||||
let mocked = vi.mocked(checkBulkUpload);
|
||||
for (let i = 1; i < retry; i++) {
|
||||
mocked = mocked.mockRejectedValueOnce(new Error('Network error'));
|
||||
}
|
||||
mocked.mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is failed', async () => {
|
||||
vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error'));
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -15,8 +15,8 @@ import { Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
@@ -25,7 +25,7 @@ const s = (count: number) => (count === 1 ? '' : 's');
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
|
||||
interface UploadOptionsDto {
|
||||
export interface UploadOptionsDto {
|
||||
recursive?: boolean;
|
||||
ignore?: string;
|
||||
dryRun?: boolean;
|
||||
@@ -84,7 +84,7 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
@@ -100,32 +100,50 @@ const checkForDuplicates = async (files: string[], { concurrency, skipHash }: Up
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
|
||||
try {
|
||||
// TODO refactor into a queue
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
|
||||
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||
const queue = new Queue<string[], AssetBulkUploadCheckResults>(
|
||||
async (filepaths: string[]) => {
|
||||
const dto = await Promise.all(
|
||||
filepaths.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })),
|
||||
);
|
||||
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
const results = response.results as AssetBulkUploadCheckResults;
|
||||
for (const { id: filepath, assetId, action } of results) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
progressBar.increment();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
progressBar.increment(filepaths.length);
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await queue.push(items);
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
progressBar.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to verify ${failedTasks.length} file${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -159,37 +177,52 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
|
||||
const newAssets: Asset[] = [];
|
||||
|
||||
try {
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await Promise.all(
|
||||
items.map(async (filepath) => {
|
||||
const stats = statsMap.get(filepath) as Stats;
|
||||
const response = await uploadFile(filepath, stats);
|
||||
const queue = new Queue<string, AssetMediaResponseDto>(
|
||||
async (filepath: string) => {
|
||||
const stats = statsMap.get(filepath);
|
||||
if (!stats) {
|
||||
throw new Error(`Stats not found for ${filepath}`);
|
||||
}
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
const response = await uploadFile(filepath, stats);
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
return response;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
return response;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
for (const filepath of files) {
|
||||
await queue.push(filepath);
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||
}
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to upload ${failedTasks.length} asset${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
@@ -346,7 +379,9 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||
return options.albumName ?? folderName;
|
||||
// `filepath` valid format:
|
||||
// - Windows: `D:\\test\\Filename.txt` or `D:/test/Filename.txt`
|
||||
// - Unix: `/test/Filename.txt`
|
||||
export const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
return options.albumName ?? path.basename(path.dirname(filepath));
|
||||
};
|
||||
|
||||
131
cli/src/queue.ts
Normal file
131
cli/src/queue.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import * as fastq from 'fastq';
|
||||
import { uniqueId } from 'lodash-es';
|
||||
|
||||
export type Task<T, R> = {
|
||||
readonly id: string;
|
||||
status: 'idle' | 'processing' | 'succeeded' | 'failed';
|
||||
data: T;
|
||||
error: unknown | undefined;
|
||||
count: number;
|
||||
// TODO: Could be useful to adding progress property.
|
||||
// TODO: Could be useful to adding start_at/end_at/duration properties.
|
||||
result: undefined | R;
|
||||
};
|
||||
|
||||
export type QueueOptions = {
|
||||
verbose?: boolean;
|
||||
concurrency?: number;
|
||||
retry?: number;
|
||||
// TODO: Could be useful to adding timeout property for retry.
|
||||
};
|
||||
|
||||
export type ComputedQueueOptions = Required<QueueOptions>;
|
||||
|
||||
export const defaultQueueOptions = {
|
||||
concurrency: 1,
|
||||
retry: 0,
|
||||
verbose: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* An in-memory queue that processes tasks in parallel with a given concurrency.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @template T - The type of the worker task data.
|
||||
* @template R - The type of the worker output data.
|
||||
*/
|
||||
export class Queue<T, R> {
|
||||
private readonly queue: fastq.queueAsPromised<string, Task<T, R>>;
|
||||
private readonly store = new Map<string, Task<T, R>>();
|
||||
readonly options: ComputedQueueOptions;
|
||||
readonly worker: (data: T) => Promise<R>;
|
||||
|
||||
/**
|
||||
* Create a new queue.
|
||||
* @param worker - The worker function that processes the task.
|
||||
* @param options - The queue options.
|
||||
*/
|
||||
constructor(worker: (data: T) => Promise<R>, options?: QueueOptions) {
|
||||
this.options = { ...defaultQueueOptions, ...options };
|
||||
this.worker = worker;
|
||||
this.store = new Map<string, Task<T, R>>();
|
||||
this.queue = this.buildQueue();
|
||||
}
|
||||
|
||||
get tasks(): Task<T, R>[] {
|
||||
const tasks: Task<T, R>[] = [];
|
||||
for (const task of this.store.values()) {
|
||||
tasks.push(task);
|
||||
}
|
||||
return tasks;
|
||||
}
|
||||
|
||||
getTask(id: string): Task<T, R> {
|
||||
const task = this.store.get(id);
|
||||
if (!task) {
|
||||
throw new Error(`Task with id ${id} not found`);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the queue to be empty.
|
||||
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
async drained(): Promise<void> {
|
||||
await this.queue.drain();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a task at the end of the queue.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @param data
|
||||
* @returns Promise<void> - A Promise that will be fulfilled (rejected) when the task is completed successfully (unsuccessfully).
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
async push(data: T): Promise<Task<T, R>> {
|
||||
const id = uniqueId();
|
||||
const task: Task<T, R> = { id, status: 'idle', error: undefined, count: 0, data, result: undefined };
|
||||
this.store.set(id, task);
|
||||
return this.queue.push(id);
|
||||
}
|
||||
|
||||
// TODO: Support more function delegation to fastq.
|
||||
|
||||
private buildQueue(): fastq.queueAsPromised<string, Task<T, R>> {
|
||||
return fastq.promise((id: string) => {
|
||||
const task = this.getTask(id);
|
||||
return this.work(task);
|
||||
}, this.options.concurrency);
|
||||
}
|
||||
|
||||
private async work(task: Task<T, R>): Promise<Task<T, R>> {
|
||||
task.count += 1;
|
||||
task.error = undefined;
|
||||
task.status = 'processing';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] processing:', task);
|
||||
}
|
||||
try {
|
||||
task.result = await this.worker(task.data);
|
||||
task.status = 'succeeded';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] succeeded:', task);
|
||||
}
|
||||
return task;
|
||||
} catch (error) {
|
||||
task.error = error;
|
||||
task.status = 'failed';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] failed:', task);
|
||||
}
|
||||
if (this.options.retry > 0 && task.count < this.options.retry) {
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] retry:', task);
|
||||
}
|
||||
return this.work(task);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
|
||||
interface Test {
|
||||
@@ -9,6 +10,10 @@ interface Test {
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
const readContent = (path: string) => {
|
||||
return readFileSync(path).toString();
|
||||
};
|
||||
|
||||
const extensions = [
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
@@ -256,7 +261,8 @@ const tests: Test[] = [
|
||||
{
|
||||
test: 'should support ignoring absolute paths',
|
||||
options: {
|
||||
pathsToCrawl: ['/'],
|
||||
// Currently, fast-glob has some caveat when dealing with `/`.
|
||||
pathsToCrawl: ['/*s'],
|
||||
recursive: true,
|
||||
exclusionPattern: '/images/**',
|
||||
},
|
||||
@@ -276,14 +282,16 @@ describe('crawl', () => {
|
||||
describe('crawl', () => {
|
||||
for (const { test, options, files } of tests) {
|
||||
it(test, async () => {
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||
// The file contents is the same as the path.
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
|
||||
|
||||
const actual = await crawl({ ...options, extensions });
|
||||
const expected = Object.entries(files)
|
||||
.filter((entry) => entry[1])
|
||||
.map(([file]) => file);
|
||||
|
||||
expect(actual.sort()).toEqual(expected.sort());
|
||||
// Compare file's content instead of path since a file can be represent in multiple ways.
|
||||
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||
import { glob } from 'fast-glob';
|
||||
import { convertPathToPattern, glob } from 'fast-glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||
import { platform } from 'node:os';
|
||||
import { join, resolve } from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
@@ -106,6 +107,11 @@ export interface CrawlOptions {
|
||||
exclusionPattern?: string;
|
||||
extensions: string[];
|
||||
}
|
||||
|
||||
const convertPathToPatternOnWin = (path: string) => {
|
||||
return platform() === 'win32' ? convertPathToPattern(path) : path;
|
||||
};
|
||||
|
||||
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
||||
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||
@@ -124,11 +130,11 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(absolutePath);
|
||||
} else {
|
||||
patterns.push(absolutePath);
|
||||
patterns.push(convertPathToPatternOnWin(absolutePath));
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(currentPath);
|
||||
patterns.push(convertPathToPatternOnWin(currentPath));
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { defineConfig } from 'vite';
|
||||
import tsconfigPaths from 'vite-tsconfig-paths';
|
||||
|
||||
export default defineConfig({
|
||||
resolve: { alias: { src: '/src' } },
|
||||
build: {
|
||||
rollupOptions: {
|
||||
input: 'src/index.ts',
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.34.0"
|
||||
constraints = "4.34.0"
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
hashes = [
|
||||
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.34.0"
|
||||
version = "4.36.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.34.0"
|
||||
constraints = "4.34.0"
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
hashes = [
|
||||
"h1:+W0+Xe1AUh7yvHjDbgR9T7CY1UbBC3Y6U7Eo+ucLnJM=",
|
||||
"h1:2+1lKObDDdFZRluvROF3RKtXD66CFT3PfnHOvR6CmfA=",
|
||||
"h1:7vluN2wmw8D9nI11YwTgoGv3hGDXlkt8xqQ4L/JABeQ=",
|
||||
"h1:B0Urm8ZKTJ8cXzSCtEpJ+o+LsD8MXaD6LU59qVbh50Q=",
|
||||
"h1:FpGLCm5oF12FaRti3E4iQJlkVbdCC7toyGVuH8og7KY=",
|
||||
"h1:FunTmrCMDy+rom7YskY0WiL5/Y164zFrrD9xnBxU5NY=",
|
||||
"h1:GrxZhEb+5HzmHF/BvZBdGKBJy6Wyjme0+ABVDz/63to=",
|
||||
"h1:J36dda2K42/oTfHuZ4jKkW5+nI6BTWFRUvo60P17NJg=",
|
||||
"h1:Kq0Wyn+j6zoQeghMYixbnfnyP9ZSIEJbOCzMbaCiAQQ=",
|
||||
"h1:TKxunXCiS/z105sN/kBNFwU6tIKD67JKJ3ZKjwzoCuI=",
|
||||
"h1:TR0URKFQxsRO5/v7bKm5hkD/CTTjsG7aVGllL/Mf25c=",
|
||||
"h1:V+3Qs0Reb6r+8p4XjE5ZFDWYrOIN0x5SwORz4wvHOJ4=",
|
||||
"h1:mZB3Ui7V/lPQMQK53eBOjIHcrul74252dT06Kgn3J+s=",
|
||||
"h1:wJwZrIXxoki8omXLJ7XA7B1KaSrtcLMJp090fRtFRAc=",
|
||||
"zh:02aa46743c1585ada8faa7db23af68ea614053a506f88f05d1090ff5e0e68076",
|
||||
"zh:1e1a545e83e6457a0e15357b23139bc288fb4fbd5e9a5ddfedc95a6a0216b08c",
|
||||
"zh:29eef2621e0b1501f620e615bf73b1b90d5417d745e38af63634bc03250faf87",
|
||||
"zh:3c20989d7e1e141882e6091384bf85fdc83f70f3d29e3e047c493a07de992095",
|
||||
"zh:3d39619379ba29c7ffb15196f0ea72a04c84cfcdf4b39ac42ac4cf4c19f3eae2",
|
||||
"zh:805f4a2774e9279c590b8214aabe6df9dcc22bb995df2530513f2f78c647ce75",
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8af716f8655a57aa986861a8a7fa1d724594a284bd77c870eaea4db5f8b9732d",
|
||||
"zh:a3d13c93b4e6ee6004782debaa9a17f990f2fe8ec8ba545c232818bb6064aba9",
|
||||
"zh:bfa136acf82d3719473c0064446cc16d1b0303d98b06f55f503b7abeebceadb1",
|
||||
"zh:ca6cf9254ae5436f2efbc01a0e3f7e4aa3c08b45182037b3eb3eb9539b2f7aec",
|
||||
"zh:cba32d5de02674004e0a5955bd5222016d9991ca0553d4bd3bea517cd9def6ab",
|
||||
"zh:d22c8cd527c6d0e84567f57be5911792e2fcd5969e3bba3747489f18bb16705b",
|
||||
"zh:e4eeede9b3e72cdadd6cc252d4cbcf41baee6ecfd12bacd927e2dcbe733ab210",
|
||||
"zh:facdaa787a69f86203cd3cc6922baea0b4a18bd9c36b0a8162e2e88ef6c90655",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.34.0"
|
||||
version = "4.36.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,16 @@ services:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
IMMICH_REPOSITORY: immich-app/immich
|
||||
IMMICH_REPOSITORY_URL: https://github.com/immich-app/immich
|
||||
IMMICH_SOURCE_REF: local
|
||||
IMMICH_SOURCE_COMMIT: af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_SOURCE_URL: https://github.com/immich-app/immich/commit/af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_BUILD: '9654404849'
|
||||
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
|
||||
IMMICH_BUILD_IMAGE: development
|
||||
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
@@ -84,7 +94,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
@@ -103,11 +113,26 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
command:
|
||||
[
|
||||
'postgres',
|
||||
'-c',
|
||||
'shared_preload_libraries=vectors.so',
|
||||
'-c',
|
||||
'search_path="$$user", public, vectors',
|
||||
'-c',
|
||||
'logging_collector=on',
|
||||
'-c',
|
||||
'max_wal_size=2GB',
|
||||
'-c',
|
||||
'shared_buffers=512MB',
|
||||
'-c',
|
||||
'wal_compression=on',
|
||||
]
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
|
||||
@@ -41,7 +41,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -61,7 +61,7 @@ services:
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
@@ -73,7 +73,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:5c435642ca4d8427ca26f4901c11114023004709037880cd7860d5b7176aa731
|
||||
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -85,7 +85,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.0.0-ubuntu@sha256:dcd3ae78713958a862732c3608d32c03f0c279c35a2032d74b80b12c5cdc47b8
|
||||
image: grafana/grafana:11.1.0-ubuntu@sha256:c7fc29ec783d5e7fc1bdfaad6f92345a345cffbc5d21c388ca228175006fc107
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
image: docker.io/redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -59,7 +59,7 @@ services:
|
||||
volumes:
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.14
|
||||
20.15.1
|
||||
|
||||
@@ -94,7 +94,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
77
docs/blog/2024/update-july-2024.mdx
Normal file
77
docs/blog/2024/update-july-2024.mdx
Normal file
@@ -0,0 +1,77 @@
|
||||
---
|
||||
title: Immich Update - July 2024
|
||||
authors: [alextran]
|
||||
tags: [update, v1.106.0]
|
||||
---
|
||||
|
||||
Hello everybody! Alex from Immich here and I am back with another development progress update for the project.
|
||||
|
||||
Summer has returned once again, and the night sky is filled with stars, thank you for **38_000 shining stars** you have sent to our [GitHub repo](https://github.com/immich-app/immich)! Since the last announcement several core contributors have started full time. Everything is going great with development, PRs get merged with _brrrrrrr_ rate, conversation exchange between team members is on a new high, we met and are working with the great engineers at FUTO. The spirit is high and we have a lot of things brewing that we think you will like.
|
||||
|
||||
Let's go over some of the updates we had since the last post.
|
||||
|
||||
### Container consolidation
|
||||
|
||||
Reduced the number of total containers from 5 to 4 by making the microservices thread get spawned directly in the server container. Woohoo, remember when Immich had 7 containers?
|
||||
|
||||
### Email notifications
|
||||
|
||||

|
||||
|
||||
We added email notifications to the app with SMTP settings that you can configure for the following events
|
||||
|
||||
- A new account is created for you.
|
||||
- You are added to a shared album.
|
||||
- New media is added to an album.
|
||||
|
||||
### Versioned docs
|
||||
|
||||
You can now jump back into the past or take a peek at the unreleased version of the documentation by selecting the version on the website.
|
||||
|
||||

|
||||
|
||||
### Similarity deduplication
|
||||
|
||||
With more machine learning and CLIP magic, we now have similarity deduplication built into the application where it will search for closely similar images and let you decide what to do with them; i.e keep or trash.
|
||||
|
||||

|
||||
|
||||
### Permanent URL for asset on the web
|
||||
|
||||
The detail view for an asset now has a permanent URL so you can easily share them with your loved ones.
|
||||
|
||||
### Web app translations
|
||||
|
||||
We now have a public Weblate project which the community can use to translate the webapp to their native languages. We are planning to port the mobile app translation to this platform as well. If you would like to contribute, you can take a look [here](https://hosted.weblate.org/projects/immich/immich/). We're already close to 50% translations -- we really appreciate everyone contributing to that!
|
||||
|
||||

|
||||
|
||||
### Read-only/Editor mode on shared album
|
||||
|
||||
As the owner of the album, you can choose if the shared user can edit the album or to only view the content of the album without any modification.
|
||||
|
||||

|
||||
|
||||
### Better video thumbnails
|
||||
|
||||
Immich now tries to find a descriptive video thumbnail instead of simply using the first frame. No more black images for thumbnails!
|
||||
|
||||
### Public Roadmap
|
||||
|
||||
We now have a [public roadmap](https://immich.app/roadmap), giving you a high-level overview of things the team is working on. The first goal of this roadmap is to bring Immich to a stable release, which is expected sometime later this year. Some of the highlights include
|
||||
|
||||
- Auto stacking - Auto stacking of burst photos
|
||||
- Basic editor - Basic photo editing capabilities
|
||||
- Workflows - Automate tasks with workflows
|
||||
- Fine grained access controls - Granular access controls for users and api keys
|
||||
- Better background backups - Rework background backups to be more reliable
|
||||
- Private/locked photos - Private assets with extra protections
|
||||
|
||||
Beyond the items in the roadmap, we have _many many_ more ideas for Immich. The team and I hope that you are enjoying the application, find it helpful in your life and we have nothing but the intention of building out great software for you all!
|
||||
|
||||
Have an amazing Summer or Winter for those in the southern hemisphere! :D
|
||||
|
||||
Until next time,
|
||||
|
||||
Cheers!
|
||||
Alex
|
||||
@@ -133,40 +133,6 @@ For example, say you have existing transcodes with the policy "Videos higher tha
|
||||
|
||||
No. Our design principle is that the original assets should always be untouched.
|
||||
|
||||
### How can I move all data (photos, persons, albums, libraries) from one user to another?
|
||||
|
||||
This is not officially supported but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add, for example, an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file so that you can use a web interface.
|
||||
|
||||
<details>
|
||||
<summary>Steps</summary>
|
||||
|
||||
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||
|
||||
2. Find the ID of both the 'source' and the 'destination' user (it's the id column in the `users` table)
|
||||
|
||||
3. Four tables need to be updated:
|
||||
|
||||
```sql
|
||||
BEGIN;
|
||||
-- reassign albums
|
||||
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
-- reassign people
|
||||
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
-- reassign assets
|
||||
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
||||
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
||||
|
||||
-- reassign external libraries
|
||||
UPDATE libraries SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Albums
|
||||
@@ -442,4 +408,11 @@ docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --
|
||||
|
||||
</details>
|
||||
|
||||
If corruption is detected, you should immediately make a backup before performing any other work in the database.
|
||||
To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed.
|
||||
After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected.
|
||||
The damaged database dump can be used to manually recover any changes made since the last backup, if needed.
|
||||
|
||||
The causes of possible corruption are many, but can include unexpected poweroffs or unmounts, use of a network share for Postgres data, or a poor storage medium such an SD card or failing HDD/SSD.
|
||||
|
||||
[huggingface]: https://huggingface.co/immich-app
|
||||
|
||||
@@ -76,6 +76,7 @@ services:
|
||||
backup:
|
||||
container_name: immich_db_dumper
|
||||
image: prodrigestivill/postgres-backup-local:14
|
||||
restart: always
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -191,6 +192,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
</Tabs>
|
||||
|
||||
:::danger
|
||||
Do not touch the files inside these folders under any circumstances except taking a backup, changing or removing an asset can cause untracked and missing files.
|
||||
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
|
||||
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
|
||||
:::
|
||||
|
||||
@@ -27,7 +27,7 @@ Copy the entire `immich-server` block as a new service and make the following ch
|
||||
+ container_name: immich_microservices
|
||||
```
|
||||
|
||||
Once you have two copies of the immich-server service, make the following chnages to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
|
||||
Once you have two copies of the immich-server service, make the following changes to each one. This will allow one container to only serve the web UI and API, and the other one to handle all other tasks.
|
||||
|
||||
```diff
|
||||
services:
|
||||
|
||||
21
docs/docs/developer/translations.md
Normal file
21
docs/docs/developer/translations.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Translations
|
||||
|
||||
:::tip
|
||||
You can request a new language [here](https://hosted.weblate.org/new-lang/immich/immich/).
|
||||
:::
|
||||
|
||||
## Weblate
|
||||
|
||||
[Weblate](https://weblate.org/) is a "libre software web-based continuous localization system". Immich localization efforts are managed on their [hosted platform](https://hosted.weblate.org/projects/immich/immich/).
|
||||
|
||||
## International message format
|
||||
|
||||
Plurals, numbers, dates and other locale specific message formats can be handled by using the [ICU message format](https://unicode-org.github.io/icu/userguide/format_parse/messages/). Internally, this is handled by the [intl-messageformat](https://www.npmjs.com/package/intl-messageformat) library. Their [documentation](https://formatjs.io/docs/intl-messageformat/) includes common, editable examples via a "live editor" feature, which can be useful to test and debug message formats.
|
||||
|
||||
## Progress
|
||||
|
||||
Immich currently supports the following languages:
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
</a>
|
||||
@@ -1,7 +1,7 @@
|
||||
# Troubleshooting
|
||||
|
||||
:::tip
|
||||
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.gg/D8JsnBEuKb) server, where we have a dedicated channel for `#contributing`.
|
||||
A great option to get assistance with troubleshooting is to join our [Discord](https://discord.immich.app) server, where we have a dedicated channel for `#contributing`.
|
||||
:::
|
||||
|
||||
## Known Issues
|
||||
|
||||
@@ -4,7 +4,7 @@ This page gives a few pointers on how to access your Immich instance from outsid
|
||||
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
|
||||
|
||||
:::danger
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you susceptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
:::
|
||||
|
||||
## Option 1: VPN to home network
|
||||
|
||||
@@ -45,8 +45,6 @@ Regardless of filesystem, it is not recommended to use a network share for your
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
|
||||
@@ -27,7 +27,7 @@ For more information about setting up the community image see [here](https://git
|
||||
|
||||
:::info
|
||||
|
||||
- Guide was written using Unraid v6.12.10
|
||||
- Guide was written using Unraid v6.12.10.
|
||||
- Requires you to have installed the plugin: [Docker Compose Manager](https://forums.unraid.net/topic/114415-plugin-docker-compose-manager/)
|
||||
- An Unraid share created for your images
|
||||
- There has been a [report](https://forums.unraid.net/topic/130006-errortraps-traps-node27707-trap-invalid-opcode-ip14fcfc8d03c0-sp7fff32889dd8-more/#comment-1189395) of this not working if your Unraid server doesn't support AVX _(e.g. using a T610)_
|
||||
@@ -46,7 +46,8 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
/>
|
||||
|
||||
3. Select the cog ⚙️ next to Immich then click "**Edit Stack**"
|
||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default.
|
||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
||||
|
||||
<details >
|
||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
||||
<ul>
|
||||
@@ -70,6 +71,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
/>
|
||||
</ul>
|
||||
</details>
|
||||
|
||||
5. Click "**Save Changes**", you will be promoted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||
|
||||
@@ -13,4 +13,4 @@ Running into an issue or have a question? Try the following:
|
||||
|
||||
[github-issues]: https://github.com/immich-app/immich/issues
|
||||
[github-releases]: https://github.com/immich-app/immich/releases
|
||||
[discord-link]: https://discord.com/invite/D8JsnBEuKb
|
||||
[discord-link]: https://discord.immich.app
|
||||
|
||||
@@ -5,21 +5,21 @@ sidebar_position: 3
|
||||
# Quick Start
|
||||
|
||||
Here is a quick, no-choices path to install Immich and take it for a test drive.
|
||||
Once you've tried it, perhaps you'll use one of the many other ways
|
||||
Once you've tried it, you might use one of the many other ways
|
||||
to install and use it.
|
||||
|
||||
## Requirements
|
||||
|
||||
Check the [requirements page](/docs/install/requirements) to get started.
|
||||
|
||||
## Install and launch via Docker Compose
|
||||
## Install and Launch via Docker Compose
|
||||
|
||||
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions
|
||||
to install the server.
|
||||
|
||||
- Where random passwords are required, `pwgen` is a handy utility.
|
||||
- `UPLOAD_LOCATION` should be set to some new directory on the server
|
||||
with free space.
|
||||
with enough free space.
|
||||
- You may ignore "Step 4 - Upgrading".
|
||||
|
||||
## Try the Web UI
|
||||
@@ -48,26 +48,26 @@ import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
|
||||
|
||||
In the mobile app, you should see the photo you uploaded from the web UI.
|
||||
|
||||
### Transfer Photos from your Mobile Device
|
||||
### Transfer Photos from Your Mobile Device
|
||||
|
||||
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||
|
||||
<MobileAppBackup />
|
||||
|
||||
Depending on how many photos are on your mobile device, this backup may
|
||||
The backup time differs depending on how many photos are on your mobile device. Large uploads may
|
||||
take quite a while.
|
||||
|
||||
You can select the Jobs tab to see Immich processing your photos.
|
||||
You can select the **Jobs** tab to see Immich processing your photos.
|
||||
|
||||
<img src={require('/docs/guides/img/jobs-tab.png').default} title="Jobs tab" />
|
||||
|
||||
## Set up your backups
|
||||
## Set up Your Backups
|
||||
|
||||
You may want to back up the content of your Immich instance
|
||||
along with other parts of your server; be sure to read about
|
||||
[database backup](/docs/administration/backup-and-restore).
|
||||
|
||||
## Where to go from here?
|
||||
## Where to Go From Here
|
||||
|
||||
You may decide you'd like to install the server a different way;
|
||||
the Install category on the left menu provides many options.
|
||||
|
||||
@@ -4,11 +4,17 @@ sidebar_position: 5
|
||||
|
||||
# Support The Project
|
||||
|
||||
## Contributing
|
||||
## Report issues
|
||||
|
||||
1. Testing - Using Immich and reporting bugs is a great way to help support the project. Found a bug? [Open an issue on GitHub][github-issue].
|
||||
1. Translations - The Immich mobile app has been translated into [17 languages][github-langs] so far! To contribute with translations, email me at alex.tran1502@gmail.com or send me a message on discord.
|
||||
1. Development - If you are a programmer or developer, take a look at Immich's [technology stack](/docs/developer/architecture.mdx) and consider fixing bugs or building new features. The team and I are always looking for new contributors. For information about how to contribute as a developer, see the [Developer](/docs/developer/architecture.mdx) section.
|
||||
By far the easiest way to help make Immich better it to use it and report issues and bugs. Found a bug? [Open an issue on GitHub][github-issue].
|
||||
|
||||
## Translations
|
||||
|
||||
Support the project by localizing on [Weblate](https://hosted.weblate.org/projects/immich/immich/). For more information, see the [Translations](/docs/developer/translations) section.
|
||||
|
||||
## Development
|
||||
|
||||
If you are a programmer or developer, take a look at Immich's [technology stack](/docs/developer/architecture.mdx) and consider fixing bugs or building new features. The team and I are always looking for new contributors. For information about how to contribute as a developer, see the [Developer](/docs/developer/architecture.mdx) section.
|
||||
|
||||
[github-issue]: https://github.com/immich-app/immich/issues/new/choose
|
||||
[github-langs]: https://github.com/immich-app/immich/tree/main/mobile/assets/i18n
|
||||
|
||||
@@ -92,6 +92,7 @@ const config = {
|
||||
alt: 'Immich Logo',
|
||||
src: 'img/immich-logo-inline-light.png',
|
||||
srcDark: 'img/immich-logo-inline-dark.png',
|
||||
className: 'rounded-none',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
@@ -124,7 +125,7 @@ const config = {
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'https://discord.gg/D8JsnBEuKb',
|
||||
href: 'https://discord.immich.app',
|
||||
label: 'Discord',
|
||||
position: 'right',
|
||||
},
|
||||
@@ -151,7 +152,7 @@ const config = {
|
||||
items: [
|
||||
{
|
||||
label: 'Discord',
|
||||
href: 'https://discord.com/invite/D8JsnBEuKb',
|
||||
href: 'https://discord.immich.app',
|
||||
},
|
||||
{
|
||||
label: 'Reddit',
|
||||
|
||||
379
docs/package-lock.json
generated
379
docs/package-lock.json
generated
@@ -2155,9 +2155,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/core": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.3.2.tgz",
|
||||
"integrity": "sha512-PzKMydKI3IU1LmeZQDi+ut5RSuilbXnA8QdowGeJEgU8EJjmx3rBHNT1LxQxOVqNEwpWi/csLwd9bn7rUjggPA==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.4.0.tgz",
|
||||
"integrity": "sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.23.3",
|
||||
"@babel/generator": "^7.23.3",
|
||||
@@ -2169,12 +2170,12 @@
|
||||
"@babel/runtime": "^7.22.6",
|
||||
"@babel/runtime-corejs3": "^7.22.6",
|
||||
"@babel/traverse": "^7.22.8",
|
||||
"@docusaurus/cssnano-preset": "3.3.2",
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/cssnano-preset": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"autoprefixer": "^10.4.14",
|
||||
"babel-loader": "^9.1.3",
|
||||
"babel-plugin-dynamic-import-node": "^2.3.3",
|
||||
@@ -2240,9 +2241,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/cssnano-preset": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.3.2.tgz",
|
||||
"integrity": "sha512-+5+epLk/Rp4vFML4zmyTATNc3Is+buMAL6dNjrMWahdJCJlMWMPd/8YfU+2PA57t8mlSbhLJ7vAZVy54cd1vRQ==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz",
|
||||
"integrity": "sha512-qwLFSz6v/pZHy/UP32IrprmH5ORce86BGtN0eBtG75PpzQJAzp9gefspox+s8IEOr0oZKuQ/nhzZ3xwyc3jYJQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cssnano-preset-advanced": "^6.1.2",
|
||||
"postcss": "^8.4.38",
|
||||
@@ -2254,9 +2256,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/logger": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.3.2.tgz",
|
||||
"integrity": "sha512-Ldu38GJ4P8g4guN7d7pyCOJ7qQugG7RVyaxrK8OnxuTlaImvQw33aDRwaX2eNmX8YK6v+//Z502F4sOZbHHCHQ==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.4.0.tgz",
|
||||
"integrity": "sha512-bZwkX+9SJ8lB9kVRkXw+xvHYSMGG4bpYHKGXeXFvyVc79NMeeBSGgzd4TQLHH+DYeOJoCdl8flrFJVxlZ0wo/Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^4.1.2",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2266,13 +2269,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/mdx-loader": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.3.2.tgz",
|
||||
"integrity": "sha512-AFRxj/aOk3/mfYDPxE3wTbrjeayVRvNSZP7mgMuUlrb2UlPRbSVAFX1k2RbgAJrnTSwMgb92m2BhJgYRfptN3g==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz",
|
||||
"integrity": "sha512-kSSbrrk4nTjf4d+wtBA9H+FGauf2gCax89kV8SUSJu3qaTdSIKdWERlngsiHaCFgZ7laTJ8a67UFf+xlFPtuTw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@mdx-js/mdx": "^3.0.0",
|
||||
"@slorber/remark-comment": "^1.0.0",
|
||||
"escape-html": "^1.0.3",
|
||||
@@ -2304,11 +2308,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/module-type-aliases": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.3.2.tgz",
|
||||
"integrity": "sha512-b/XB0TBJah5yKb4LYuJT4buFvL0MGAb0+vJDrJtlYMguRtsEBkf2nWl5xP7h4Dlw6ol0hsHrCYzJ50kNIOEclw==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz",
|
||||
"integrity": "sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@types/history": "^4.7.11",
|
||||
"@types/react": "*",
|
||||
"@types/react-router-config": "*",
|
||||
@@ -2322,17 +2327,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-blog": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.3.2.tgz",
|
||||
"integrity": "sha512-fJU+dmqp231LnwDJv+BHVWft8pcUS2xVPZdeYH6/ibH1s2wQ/sLcmUrGWyIv/Gq9Ptj8XWjRPMghlxghuPPoxg==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz",
|
||||
"integrity": "sha512-vv6ZAj78ibR5Jh7XBUT4ndIjmlAxkijM3Sx5MAAzC1gyv0vupDQNhzuFg1USQmQVj3P5I6bquk12etPV3LJ+Xw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"feed": "^4.2.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
@@ -2353,18 +2359,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-docs": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.3.2.tgz",
|
||||
"integrity": "sha512-Dm1ri2VlGATTN3VGk1ZRqdRXWa1UlFubjaEL6JaxaK7IIFqN/Esjpl+Xw10R33loHcRww/H76VdEeYayaL76eg==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz",
|
||||
"integrity": "sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/module-type-aliases": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@types/react-router-config": "^5.0.7",
|
||||
"combine-promises": "^1.1.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
@@ -2383,15 +2390,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-pages": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.3.2.tgz",
|
||||
"integrity": "sha512-EKc9fQn5H2+OcGER8x1aR+7URtAGWySUgULfqE/M14+rIisdrBstuEZ4lUPDRrSIexOVClML82h2fDS+GSb8Ew==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.4.0.tgz",
|
||||
"integrity": "sha512-h2+VN/0JjpR8fIkDEAoadNjfR3oLzB+v1qSXbIAKjQ46JAHx3X22n9nqS+BWSQnTnp1AjkjSvZyJMekmcwxzxg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"tslib": "^2.6.0",
|
||||
"webpack": "^5.88.1"
|
||||
@@ -2405,13 +2413,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-debug": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.3.2.tgz",
|
||||
"integrity": "sha512-oBIBmwtaB+YS0XlmZ3gCO+cMbsGvIYuAKkAopoCh0arVjtlyPbejzPrHuCoRHB9G7abjNZw7zoONOR8+8LM5+Q==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.4.0.tgz",
|
||||
"integrity": "sha512-uV7FDUNXGyDSD3PwUaf5YijX91T5/H9SX4ErEcshzwgzWwBtK37nUWPU3ZLJfeTavX3fycTOqk9TglpOLaWkCg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"react-json-view-lite": "^1.2.0",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2425,13 +2434,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-analytics": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.3.2.tgz",
|
||||
"integrity": "sha512-jXhrEIhYPSClMBK6/IA8qf1/FBoxqGXZvg7EuBax9HaK9+kL3L0TJIlatd8jQJOMtds8mKw806TOCc3rtEad1A==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.4.0.tgz",
|
||||
"integrity": "sha512-mCArluxEGi3cmYHqsgpGGt3IyLCrFBxPsxNZ56Mpur0xSlInnIHoeLDH7FvVVcPJRPSQ9/MfRqLsainRw+BojA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2443,13 +2453,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-gtag": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.3.2.tgz",
|
||||
"integrity": "sha512-vcrKOHGbIDjVnNMrfbNpRQR1x6Jvcrb48kVzpBAOsKbj9rXZm/idjVAXRaewwobHdOrJkfWS/UJoxzK8wyLRBQ==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.4.0.tgz",
|
||||
"integrity": "sha512-Dsgg6PLAqzZw5wZ4QjUYc8Z2KqJqXxHxq3vIoyoBWiLEEfigIs7wHR+oiWUQy3Zk9MIk6JTYj7tMoQU0Jm3nqA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@types/gtag.js": "^0.0.12",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
@@ -2462,13 +2473,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-tag-manager": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.3.2.tgz",
|
||||
"integrity": "sha512-ldkR58Fdeks0vC+HQ+L+bGFSJsotQsipXD+iKXQFvkOfmPIV6QbHRd7IIcm5b6UtwOiK33PylNS++gjyLUmaGw==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz",
|
||||
"integrity": "sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2480,16 +2492,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-sitemap": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.3.2.tgz",
|
||||
"integrity": "sha512-/ZI1+bwZBhAgC30inBsHe3qY9LOZS+79fRGkNdTcGHRMcdAp6Vw2pCd1gzlxd/xU+HXsNP6cLmTOrggmRp3Ujg==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.4.0.tgz",
|
||||
"integrity": "sha512-+0VDvx9SmNrFNgwPoeoCha+tRoAjopwT0+pYO1xAbyLcewXSemq+eLxEa46Q1/aoOaJQ0qqHELuQM7iS2gp33Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"sitemap": "^7.1.1",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2503,23 +2516,24 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/preset-classic": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.3.2.tgz",
|
||||
"integrity": "sha512-1SDS7YIUN1Pg3BmD6TOTjhB7RSBHJRpgIRKx9TpxqyDrJ92sqtZhomDc6UYoMMLQNF2wHFZZVGFjxJhw2VpL+Q==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.4.0.tgz",
|
||||
"integrity": "sha512-Ohj6KB7siKqZaQhNJVMBBUzT3Nnp6eTKqO+FXO3qu/n1hJl3YLwVKTWBg28LF7MWrKu46UuYavwMRxud0VyqHg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/plugin-content-blog": "3.3.2",
|
||||
"@docusaurus/plugin-content-docs": "3.3.2",
|
||||
"@docusaurus/plugin-content-pages": "3.3.2",
|
||||
"@docusaurus/plugin-debug": "3.3.2",
|
||||
"@docusaurus/plugin-google-analytics": "3.3.2",
|
||||
"@docusaurus/plugin-google-gtag": "3.3.2",
|
||||
"@docusaurus/plugin-google-tag-manager": "3.3.2",
|
||||
"@docusaurus/plugin-sitemap": "3.3.2",
|
||||
"@docusaurus/theme-classic": "3.3.2",
|
||||
"@docusaurus/theme-common": "3.3.2",
|
||||
"@docusaurus/theme-search-algolia": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2"
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/plugin-debug": "3.4.0",
|
||||
"@docusaurus/plugin-google-analytics": "3.4.0",
|
||||
"@docusaurus/plugin-google-gtag": "3.4.0",
|
||||
"@docusaurus/plugin-google-tag-manager": "3.4.0",
|
||||
"@docusaurus/plugin-sitemap": "3.4.0",
|
||||
"@docusaurus/theme-classic": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-search-algolia": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
@@ -2530,22 +2544,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-classic": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.3.2.tgz",
|
||||
"integrity": "sha512-gepHFcsluIkPb4Im9ukkiO4lXrai671wzS3cKQkY9BXQgdVwsdPf/KS0Vs4Xlb0F10fTz+T3gNjkxNEgSN9M0A==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.4.0.tgz",
|
||||
"integrity": "sha512-0IPtmxsBYv2adr1GnZRdMkEQt1YW6tpzrUPj02YxNpvJ5+ju4E13J5tB4nfdaen/tfR1hmpSPlTFPvTf4kwy8Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/module-type-aliases": "3.3.2",
|
||||
"@docusaurus/plugin-content-blog": "3.3.2",
|
||||
"@docusaurus/plugin-content-docs": "3.3.2",
|
||||
"@docusaurus/plugin-content-pages": "3.3.2",
|
||||
"@docusaurus/theme-common": "3.3.2",
|
||||
"@docusaurus/theme-translations": "3.3.2",
|
||||
"@docusaurus/types": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-translations": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"copy-text-to-clipboard": "^3.2.0",
|
||||
@@ -2569,17 +2584,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-common": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.3.2.tgz",
|
||||
"integrity": "sha512-kXqSaL/sQqo4uAMQ4fHnvRZrH45Xz2OdJ3ABXDS7YVGPSDTBC8cLebFrRR4YF9EowUHto1UC/EIklJZQMG/usA==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.4.0.tgz",
|
||||
"integrity": "sha512-0A27alXuv7ZdCg28oPE8nH/Iz73/IUejVaCazqu9elS4ypjiLhK3KfzdSQBnL/g7YfHSlymZKdiOHEo8fJ0qMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/mdx-loader": "3.3.2",
|
||||
"@docusaurus/module-type-aliases": "3.3.2",
|
||||
"@docusaurus/plugin-content-blog": "3.3.2",
|
||||
"@docusaurus/plugin-content-docs": "3.3.2",
|
||||
"@docusaurus/plugin-content-pages": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@types/history": "^4.7.11",
|
||||
"@types/react": "*",
|
||||
"@types/react-router-config": "*",
|
||||
@@ -2598,18 +2614,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-search-algolia": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.3.2.tgz",
|
||||
"integrity": "sha512-qLkfCl29VNBnF1MWiL9IyOQaHxUvicZp69hISyq/xMsNvFKHFOaOfk9xezYod2Q9xx3xxUh9t/QPigIei2tX4w==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz",
|
||||
"integrity": "sha512-aiHFx7OCw4Wck1z6IoShVdUWIjntC8FHCw9c5dR8r3q4Ynh+zkS8y2eFFunN/DL6RXPzpnvKCg3vhLQYJDmT9Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docsearch/react": "^3.5.2",
|
||||
"@docusaurus/core": "3.3.2",
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/plugin-content-docs": "3.3.2",
|
||||
"@docusaurus/theme-common": "3.3.2",
|
||||
"@docusaurus/theme-translations": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-validation": "3.3.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-translations": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"algoliasearch": "^4.18.0",
|
||||
"algoliasearch-helper": "^3.13.3",
|
||||
"clsx": "^2.0.0",
|
||||
@@ -2628,9 +2645,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-translations": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.3.2.tgz",
|
||||
"integrity": "sha512-bPuiUG7Z8sNpGuTdGnmKl/oIPeTwKr0AXLGu9KaP6+UFfRZiyWbWE87ti97RrevB2ffojEdvchNujparR3jEZQ==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz",
|
||||
"integrity": "sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fs-extra": "^11.1.1",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2640,9 +2658,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/types": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.3.2.tgz",
|
||||
"integrity": "sha512-5p201S7AZhliRxTU7uMKtSsoC8mgPA9bs9b5NQg1IRdRxJfflursXNVsgc3PcMqiUTul/v1s3k3rXXFlRE890w==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.4.0.tgz",
|
||||
"integrity": "sha512-4jcDO8kXi5Cf9TcyikB/yKmz14f2RZ2qTRerbHAsS+5InE9ZgSLBNLsewtFTcTOXSVcbU3FoGOzcNWAmU1TR0A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@mdx-js/mdx": "^3.0.0",
|
||||
"@types/history": "^4.7.11",
|
||||
@@ -2660,12 +2679,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.3.2.tgz",
|
||||
"integrity": "sha512-f4YMnBVymtkSxONv4Y8js3Gez9IgHX+Lcg6YRMOjVbq8sgCcdYK1lf6SObAuz5qB/mxiSK7tW0M9aaiIaUSUJg==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.4.0.tgz",
|
||||
"integrity": "sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@svgr/webpack": "^8.1.0",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"file-loader": "^6.2.0",
|
||||
@@ -2682,6 +2702,7 @@
|
||||
"shelljs": "^0.8.5",
|
||||
"tslib": "^2.6.0",
|
||||
"url-loader": "^4.1.1",
|
||||
"utility-types": "^3.10.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2697,9 +2718,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils-common": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.3.2.tgz",
|
||||
"integrity": "sha512-QWFTLEkPYsejJsLStgtmetMFIA3pM8EPexcZ4WZ7b++gO5jGVH7zsipREnCHzk6+eDgeaXfkR6UPaTt86bp8Og==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.4.0.tgz",
|
||||
"integrity": "sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
@@ -2716,15 +2738,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils-validation": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.3.2.tgz",
|
||||
"integrity": "sha512-itDgFs5+cbW9REuC7NdXals4V6++KifgVMzoGOOOSIifBQw+8ULhy86u5e1lnptVL0sv8oAjq2alO7I40GR7pA==",
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz",
|
||||
"integrity": "sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.3.2",
|
||||
"@docusaurus/utils": "3.3.2",
|
||||
"@docusaurus/utils-common": "3.3.2",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"fs-extra": "^11.2.0",
|
||||
"joi": "^17.9.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -12615,9 +12640,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
|
||||
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
@@ -12728,9 +12754,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"version": "8.4.39",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -12745,9 +12771,10 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"picocolors": "^1.0.1",
|
||||
"source-map-js": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -13573,10 +13600,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.2.5",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz",
|
||||
"integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==",
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
|
||||
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
@@ -15986,9 +16014,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tailwindcss": {
|
||||
"version": "3.4.3",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz",
|
||||
"integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==",
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.4.tgz",
|
||||
"integrity": "sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@alloc/quick-lru": "^5.2.0",
|
||||
"arg": "^5.0.2",
|
||||
@@ -16025,6 +16054,7 @@
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
|
||||
"integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"is-glob": "^4.0.3"
|
||||
},
|
||||
@@ -16346,9 +16376,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"version": "5.5.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
|
||||
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
|
||||
@@ -56,6 +56,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.14.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,6 +38,11 @@ const guides: CommunityGuidesProps[] = [
|
||||
description: 'Import your Google Photos files into Immich and add your albums',
|
||||
url: 'https://github.com/immich-app/immich/discussions/1340',
|
||||
},
|
||||
{
|
||||
title: 'Access Immich with custom domain',
|
||||
description: 'Access your local Immich installation over the internet using your own domain',
|
||||
url: 'https://github.com/ppr88/immich-guides/blob/main/open-immich-custom-domain.md',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
|
||||
@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
|
||||
<div className="flex flex-col flex-grow justify-between gap-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
{cardIcon === 'immich' ? (
|
||||
<img src="img/immich-logo.svg" height="30" />
|
||||
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
|
||||
) : (
|
||||
<Icon path={cardIcon} size={1} color={item.iconColor} />
|
||||
)}
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
@tailwind utilities;
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Overpass:ital,wght@0,300;0,400;0,500;0,600;0,700;1,300;1,400;1,500;1,600;1,700&display=swap');
|
||||
@import url('https://fonts.googleapis.com/css2?family=Snowburst+One&display=swap');
|
||||
|
||||
html,
|
||||
button {
|
||||
@@ -48,7 +47,3 @@ img {
|
||||
div[class^='announcementBar_'] {
|
||||
min-height: 2rem;
|
||||
}
|
||||
|
||||
.navbar__brand .navbar__title {
|
||||
@apply font-immich-title text-2xl font-normal text-immich-primary dark:text-immich-dark-primary;
|
||||
}
|
||||
|
||||
77
docs/src/pages/cursed-knowledge.tsx
Normal file
77
docs/src/pages/cursed-knowledge.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import { mdiCalendarToday, mdiLeadPencil, mdiLockOutline, mdiSpeedometerSlow, mdiWeb } from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item as TimelineItem, Timeline } from '../components/timeline';
|
||||
|
||||
const withLanguage = (date: Date) => (language: string) => date.toLocaleDateString(language);
|
||||
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiLeadPencil,
|
||||
iconColor: 'gold',
|
||||
title: 'PostgreSQL NOTIFY is cursed',
|
||||
description:
|
||||
'PostgreSQL does everything in a transaction, including NOTIFY. This means using the socket.io postgres-adapter writes to WAL every 5 seconds.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/10801', text: '#10801' },
|
||||
date: new Date(2024, 6, 3),
|
||||
},
|
||||
{
|
||||
icon: mdiWeb,
|
||||
iconColor: 'lightskyblue',
|
||||
title: 'npm scripts are cursed',
|
||||
description:
|
||||
'npm scripts make a http call to the npm registry each time they run, which means they are a terrible way to execute a health check.',
|
||||
link: { url: 'https://github.com/immich-app/immich/issues/10796', text: '#10796' },
|
||||
date: new Date(2024, 6, 3),
|
||||
},
|
||||
{
|
||||
icon: mdiSpeedometerSlow,
|
||||
iconColor: 'brown',
|
||||
title: '50 extra packages are cursed',
|
||||
description:
|
||||
'There is a user in the JavaScript community who goes around adding "backwards compatibility" to projects. They do this by adding 50 extra package dependencies to your project, which are maintained by them.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/10690', text: '#10690' },
|
||||
date: new Date(2024, 5, 28),
|
||||
},
|
||||
{
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'gold',
|
||||
title: 'Long passwords are cursed',
|
||||
description:
|
||||
'The bcrypt implementation only uses the first 72 bytes of a string. Any characters after that are ignored.',
|
||||
// link: GHSA-4p64-9f7h-3432
|
||||
date: new Date(2024, 5, 25),
|
||||
},
|
||||
{
|
||||
icon: mdiCalendarToday,
|
||||
iconColor: 'greenyellow',
|
||||
title: 'JavaScript Date objects are cursed',
|
||||
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/6787', text: '#6787' },
|
||||
date: new Date(2024, 0, 31),
|
||||
},
|
||||
];
|
||||
|
||||
export default function CursedKnowledgePage(): JSX.Element {
|
||||
return (
|
||||
<Layout title="Cursed Knowledge" description="Things we wish we didn't know">
|
||||
<section className="my-8">
|
||||
<h1 className="md:text-6xl text-center mb-10 text-immich-primary dark:text-immich-dark-primary px-2">
|
||||
Cursed Knowledge
|
||||
</h1>
|
||||
<p className="text-center text-xl px-2">
|
||||
Cursed knowledge we have learned as a result of building Immich that we wish we never knew.
|
||||
</p>
|
||||
<div className="flex justify-around mt-8 w-full max-w-full">
|
||||
<Timeline
|
||||
items={items
|
||||
.sort((a, b) => b.date.getTime() - a.date.getTime())
|
||||
.map((item) => ({ ...item, getDateLabel: withLanguage(item.date) }))}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
@@ -10,7 +10,7 @@ function HomepageHeader() {
|
||||
<section className="text-center m-6 p-12 border border-red-400 rounded-[50px] bg-slate-200 dark:bg-immich-dark-gray">
|
||||
<img
|
||||
src={isDarkTheme ? 'img/immich-logo-stacked-dark.svg' : 'img/immich-logo-stacked-light.svg'}
|
||||
className="md:h-60 h-44 mb-2 antialiased"
|
||||
className="md:h-60 h-44 mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div className="sm:text-2xl text-lg md:text-4xl mb-12 sm:leading-tight">
|
||||
@@ -36,7 +36,7 @@ function HomepageHeader() {
|
||||
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-dark-primary dark:bg-immich-primary rounded-full hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
to="https://discord.gg/D8JsnBEuKb"
|
||||
to="https://discord.immich.app"
|
||||
>
|
||||
Discord
|
||||
</Link>
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
mdiCheckboxMarked,
|
||||
mdiCloudUploadOutline,
|
||||
mdiCollage,
|
||||
mdiContentDuplicate,
|
||||
mdiDevices,
|
||||
mdiEmailOutline,
|
||||
mdiExpansionCard,
|
||||
@@ -28,12 +29,14 @@ import {
|
||||
mdiForum,
|
||||
mdiHandshakeOutline,
|
||||
mdiHeart,
|
||||
mdiHistory,
|
||||
mdiImage,
|
||||
mdiImageAlbum,
|
||||
mdiImageEdit,
|
||||
mdiImageMultipleOutline,
|
||||
mdiImageSearch,
|
||||
mdiKeyboardSettingsOutline,
|
||||
mdiLockOutline,
|
||||
mdiMagnify,
|
||||
mdiMagnifyScan,
|
||||
mdiMap,
|
||||
@@ -63,14 +66,13 @@ import {
|
||||
mdiVectorCombine,
|
||||
mdiVideo,
|
||||
mdiWeb,
|
||||
mdiContentDuplicate,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.106.0': new Date(2024, 5, 11),
|
||||
'v1.106.1': new Date(2024, 5, 11),
|
||||
'v1.104.0': new Date(2024, 4, 13),
|
||||
'v1.103.0': new Date(2024, 3, 29),
|
||||
'v1.102.0': new Date(2024, 3, 15),
|
||||
@@ -159,6 +161,14 @@ const withRelease = ({
|
||||
};
|
||||
|
||||
const roadmap: Item[] = [
|
||||
{
|
||||
done: false,
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'sandybrown',
|
||||
title: 'Private/locked photos',
|
||||
description: 'Private assets with extra protections',
|
||||
getDateLabel: () => 'Planned for 2024',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiRocketLaunch,
|
||||
@@ -199,14 +209,6 @@ const roadmap: Item[] = [
|
||||
description: 'Granular access controls for users and api keys',
|
||||
getDateLabel: () => 'Planned for 2024',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiWeb,
|
||||
iconColor: 'royalblue',
|
||||
title: 'Web translations',
|
||||
description: 'Translate the web application to multiple languages',
|
||||
getDateLabel: () => 'Planned for 2024',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiCameraBurst,
|
||||
@@ -218,18 +220,31 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiHistory,
|
||||
title: 'Versioned documentation',
|
||||
description: 'View documentation as it was at the time of past releases',
|
||||
release: 'v1.106.1',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiWeb,
|
||||
iconColor: 'royalblue',
|
||||
title: 'Web translations',
|
||||
description: 'Translate the web application to multiple languages',
|
||||
release: 'v1.106.1',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiContentDuplicate,
|
||||
title: 'Similar image detection',
|
||||
description: 'Detect duplicate assets that aren’t exactly identical',
|
||||
release: 'v1.106.0',
|
||||
release: 'v1.106.1',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiVectorCombine,
|
||||
title: 'Container consolidation',
|
||||
description:
|
||||
'The microservices container can be run as a worker within the server image, allowing us to remove it from the default stack.',
|
||||
release: 'v1.106.0',
|
||||
release: 'v1.106.1',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiPencil,
|
||||
|
||||
20
docs/static/archived-versions.json
vendored
20
docs/static/archived-versions.json
vendored
@@ -1,4 +1,24 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.2",
|
||||
"url": "https://v1.107.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.1",
|
||||
"url": "https://v1.107.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.0",
|
||||
"url": "https://v1.107.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.4",
|
||||
"url": "https://v1.106.4.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.3",
|
||||
"url": "https://v1.106.3.archive.immich.app"
|
||||
|
||||
@@ -21,9 +21,6 @@ module.exports = {
|
||||
'immich-dark-fg': '#e5e7eb',
|
||||
'immich-dark-gray': '#212121',
|
||||
},
|
||||
fontFamily: {
|
||||
'immich-title': ['Snowburst One', 'cursive'],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.14
|
||||
20.15.1
|
||||
|
||||
@@ -10,6 +10,11 @@ services:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
args:
|
||||
- BUILD_ID=1234567890
|
||||
- BUILD_IMAGE=e2e
|
||||
- BUILD_SOURCE_REF=e2e
|
||||
- BUILD_SOURCE_COMMIT=e2eeeeeeeeeeeeeeeeee
|
||||
environment:
|
||||
- DB_HOSTNAME=database
|
||||
- DB_USERNAME=postgres
|
||||
@@ -17,9 +22,12 @@ services:
|
||||
- DB_DATABASE_NAME=immich
|
||||
- IMMICH_MACHINE_LEARNING_ENABLED=false
|
||||
- IMMICH_METRICS=true
|
||||
- IMMICH_ENV=testing
|
||||
volumes:
|
||||
- upload:/usr/src/app/upload
|
||||
- ./test-assets:/test-assets
|
||||
extra_hosts:
|
||||
- 'auth-server:host-gateway'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
@@ -27,7 +35,7 @@ services:
|
||||
- 2283:3001
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:d6c2911ac51b289db208767581a5d154544f2b2fe4914ea5056443f62dc6e900
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
|
||||
1272
e2e/package-lock.json
generated
1272
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.106.3",
|
||||
"version": "1.108.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -23,7 +23,8 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^20.11.17",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
@@ -33,13 +34,15 @@
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^53.0.0",
|
||||
"exiftool-vendored": "^26.0.0",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"exiftool-vendored": "^27.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -47,6 +50,6 @@
|
||||
"vitest": "^1.3.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.14.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
await addAssetsToAlbum(
|
||||
{ id: user2Albums[0].id, bulkIdsDto: { ids: [user1Asset1.id] } },
|
||||
{ id: user2Albums[0].id, bulkIdsDto: { ids: [user1Asset1.id, user1Asset2.id] } },
|
||||
{ headers: asBearerAuth(user1.accessToken) },
|
||||
);
|
||||
|
||||
@@ -261,7 +261,7 @@ describe('/albums', () => {
|
||||
.get(`/albums?assetId=${user1Asset2.id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(body).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should return the album collection filtered by assetId and ignores shared=true', async () => {
|
||||
@@ -509,7 +509,17 @@ describe('/albums', () => {
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not be able to remove foreign asset from own album', async () => {
|
||||
it('should require authorization', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user1Albums[1].id}/assets`)
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`)
|
||||
.send({ ids: [user1Asset1.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should be able to remove foreign asset from owned album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user2Albums[0].id}/assets`)
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`)
|
||||
@@ -519,8 +529,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual([
|
||||
expect.objectContaining({
|
||||
id: user1Asset1.id,
|
||||
success: false,
|
||||
error: 'no_permission',
|
||||
success: true,
|
||||
}),
|
||||
]);
|
||||
});
|
||||
@@ -555,10 +564,10 @@ describe('/albums', () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user2Albums[0].id}/assets`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ ids: [user1Asset1.id] });
|
||||
.send({ ids: [user1Asset2.id] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user1Asset1.id, success: true })]);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user1Asset2.id, success: true })]);
|
||||
});
|
||||
|
||||
it('should not be able to remove assets from album as a viewer', async () => {
|
||||
|
||||
@@ -588,6 +588,58 @@ describe('/asset', () => {
|
||||
const after = await utils.getAssetInfo(admin.accessToken, assetId);
|
||||
expect(after.isTrashed).toBe(true);
|
||||
});
|
||||
|
||||
it('should clean up live photos', async () => {
|
||||
const { id: motionId } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { filename: 'test.mp4', bytes: makeRandomImage() },
|
||||
});
|
||||
const { id: photoId } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: photoId });
|
||||
await utils.waitForWebsocketEvent({ event: 'assetHidden', id: motionId });
|
||||
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, photoId);
|
||||
expect(asset.livePhotoVideoId).toBe(motionId);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete('/assets')
|
||||
.send({ ids: [photoId], force: true })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: photoId });
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: motionId });
|
||||
});
|
||||
|
||||
it('should not delete a shared motion asset', async () => {
|
||||
const { id: motionId } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { filename: 'test.mp4', bytes: makeRandomImage() },
|
||||
});
|
||||
const { id: asset1 } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
|
||||
const { id: asset2 } = await utils.createAsset(admin.accessToken, { livePhotoVideoId: motionId });
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset1 });
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset2 });
|
||||
await utils.waitForWebsocketEvent({ event: 'assetHidden', id: motionId });
|
||||
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, asset1);
|
||||
expect(asset.livePhotoVideoId).toBe(motionId);
|
||||
|
||||
const { status } = await request(app)
|
||||
.delete('/assets')
|
||||
.send({ ids: [asset1], force: true })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: asset1 });
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
|
||||
|
||||
await expect(utils.getAssetInfo(admin.accessToken, motionId)).resolves.toMatchObject({ id: motionId });
|
||||
await expect(utils.getAssetInfo(admin.accessToken, asset2)).resolves.toMatchObject({
|
||||
id: asset2,
|
||||
livePhotoVideoId: motionId,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/thumbnail', () => {
|
||||
|
||||
@@ -100,6 +100,12 @@ describe('/auth/*', () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should reject an invalid email', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.invalidEmail);
|
||||
});
|
||||
}
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
|
||||
@@ -1,12 +1,85 @@
|
||||
import {
|
||||
LoginResponseDto,
|
||||
SystemConfigOAuthDto,
|
||||
getConfigDefaults,
|
||||
getMyUser,
|
||||
startOAuth,
|
||||
updateConfig,
|
||||
} from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
|
||||
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const authServer = {
|
||||
internal: 'http://auth-server:3000',
|
||||
external: 'http://127.0.0.1:3000',
|
||||
};
|
||||
|
||||
const redirect = async (url: string, cookies?: string[]) => {
|
||||
const { headers } = await request(url)
|
||||
.get('/')
|
||||
.set('Cookie', cookies || []);
|
||||
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
|
||||
};
|
||||
|
||||
const loginWithOAuth = async (sub: OAuthUser | string) => {
|
||||
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: `${baseUrl}/auth/login` } });
|
||||
|
||||
// login
|
||||
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
|
||||
const response2 = await request(authServer.external + response1.location)
|
||||
.post('/')
|
||||
.set('Cookie', response1.cookies)
|
||||
.type('form')
|
||||
.send({ prompt: 'login', login: sub, password: 'password' });
|
||||
|
||||
// approve
|
||||
const response3 = await redirect(response2.header.location, response1.cookies);
|
||||
const response4 = await request(authServer.external + response3.location)
|
||||
.post('/')
|
||||
.type('form')
|
||||
.set('Cookie', response3.cookies)
|
||||
.send({ prompt: 'consent' });
|
||||
|
||||
const response5 = await redirect(response4.header.location, response3.cookies.slice(1));
|
||||
const redirectUrl = response5.location;
|
||||
|
||||
expect(redirectUrl).toBeDefined();
|
||||
const params = new URL(redirectUrl).searchParams;
|
||||
expect(params.get('code')).toBeDefined();
|
||||
expect(params.get('state')).toBeDefined();
|
||||
|
||||
return redirectUrl;
|
||||
};
|
||||
|
||||
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
|
||||
const options = { headers: asBearerAuth(token) };
|
||||
const defaults = await getConfigDefaults(options);
|
||||
const merged = {
|
||||
...defaults.oauth,
|
||||
buttonText: 'Login with Immich',
|
||||
issuerUrl: `${authServer.internal}/.well-known/openid-configuration`,
|
||||
...dto,
|
||||
};
|
||||
await updateConfig({ systemConfigDto: { ...defaults, oauth: merged } }, options);
|
||||
};
|
||||
|
||||
describe(`/oauth`, () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
await utils.adminSetup();
|
||||
admin = await utils.adminSetup();
|
||||
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
storageLabelClaim: 'immich_username',
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/authorize', () => {
|
||||
@@ -15,5 +88,171 @@ describe(`/oauth`, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['redirectUri must be a string', 'redirectUri should not be empty']));
|
||||
});
|
||||
|
||||
it('should return a redirect uri', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/authorize')
|
||||
.send({ redirectUri: 'http://127.0.0.1:2283/auth/login' });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
|
||||
|
||||
const params = new URL(body.url).searchParams;
|
||||
expect(params.get('client_id')).toBe('client-default');
|
||||
expect(params.get('response_type')).toBe('code');
|
||||
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2283/auth/login');
|
||||
expect(params.get('state')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/callback', () => {
|
||||
it(`should throw an error if a url is not provided`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({});
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url must be a string', 'url should not be empty']));
|
||||
});
|
||||
|
||||
it(`should throw an error if the url is empty`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url: '' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-auto-register@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a user without an email', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
|
||||
});
|
||||
|
||||
it('should set the quota from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-quota@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.quotaSizeInBytes).toBe(25 * 2 ** 30); // 25 GiB;
|
||||
});
|
||||
|
||||
it('should set the storage label from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-username@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.storageLabel).toBe('user-username');
|
||||
});
|
||||
|
||||
it('should work with RS256 signed tokens', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_TOKENS,
|
||||
clientSecret: OAuthClient.RS256_TOKENS,
|
||||
autoRegister: true,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-RS256-token@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with RS256 signed user profiles', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_PROFILE,
|
||||
clientSecret: OAuthClient.RS256_PROFILE,
|
||||
buttonText: 'Login with Immich',
|
||||
profileSigningAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-signed-profile@immich.app',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid token algorithm', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'something-that-does-not-work',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(500);
|
||||
expect(body).toMatchObject({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to finish oauth',
|
||||
statusCode: 500,
|
||||
});
|
||||
});
|
||||
|
||||
describe('autoRegister: false', () => {
|
||||
beforeAll(async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
autoRegister: false,
|
||||
buttonText: 'Login with Immich',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not auto register the user', async () => {
|
||||
const url = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
|
||||
});
|
||||
|
||||
it('should link to an existing user by email', async () => {
|
||||
const { userId } = await utils.userSetup(admin.accessToken, {
|
||||
name: 'OAuth User 3',
|
||||
email: 'oauth-user3@immich.app',
|
||||
password: 'password',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId,
|
||||
userEmail: 'oauth-user3@immich.app',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -230,4 +230,21 @@ describe('/people', () => {
|
||||
expect(body).toMatchObject({ birthDate: null });
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /people/:id/merge', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/people/${uuidDto.notFound}/merge`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not supporting merging a person into themselves', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/people/${visiblePerson.id}/merge`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ ids: [visiblePerson.id] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Cannot merge a person into themselves'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -339,6 +339,13 @@ describe('/search', () => {
|
||||
should: 'should search by model',
|
||||
deferred: () => ({ dto: { model: 'Canon EOS 7D' }, assets: [assetDenali] }),
|
||||
},
|
||||
{
|
||||
should: 'should allow searching the upload library (libraryId: null)',
|
||||
deferred: () => ({
|
||||
dto: { libraryId: null, size: 1 },
|
||||
assets: [assetLast],
|
||||
}),
|
||||
},
|
||||
];
|
||||
|
||||
for (const { should, deferred } of searchTests) {
|
||||
|
||||
@@ -15,6 +15,40 @@ describe('/server-info', () => {
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
});
|
||||
|
||||
describe('GET /server-info/about', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server-info/about');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return about information', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server-info/about')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
version: expect.any(String),
|
||||
versionUrl: expect.any(String),
|
||||
repository: 'immich-app/immich',
|
||||
repositoryUrl: 'https://github.com/immich-app/immich',
|
||||
build: '1234567890',
|
||||
buildUrl: 'https://github.com/immich-app/immich/actions/runs/1234567890',
|
||||
buildImage: 'e2e',
|
||||
buildImageUrl: 'https://github.com/immich-app/immich/pkgs/container/immich-server',
|
||||
sourceRef: 'e2e',
|
||||
sourceCommit: 'e2eeeeeeeeeeeeeeeeee',
|
||||
sourceUrl: 'https://github.com/immich-app/immich/commit/e2eeeeeeeeeeeeeeeeee',
|
||||
nodejs: expect.any(String),
|
||||
ffmpeg: expect.any(String),
|
||||
imagemagick: expect.any(String),
|
||||
libvips: expect.any(String),
|
||||
exiftool: expect.any(String),
|
||||
licensed: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server-info/storage', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server-info/storage');
|
||||
|
||||
307
e2e/src/api/specs/server.e2e-spec.ts
Normal file
307
e2e/src/api/specs/server.e2e-spec.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const serverLicense = {
|
||||
licenseKey: 'IMSV-6ECZ-91TE-WZRM-Q7AQ-MBN4-UW48-2CPT-71X9',
|
||||
activationKey:
|
||||
'4kJUNUWMq13J14zqPFm1NodRcI6MV6DeOGvQNIgrM8Sc9nv669wyEVvFw1Nz4Kb1W7zLWblOtXEQzpRRqC4r4fKjewJxfbpeo9sEsqAVIfl4Ero-Vp1Dg21-sVdDGZEAy2oeTCXAyCT5d1JqrqR6N1qTAm4xOx9ujXQRFYhjRG8uwudw7_Q49pF18Tj5OEv9qCqElxztoNck4i6O_azsmsoOQrLIENIWPh3EynBN3ESpYERdCgXO8MlWeuG14_V1HbNjnJPZDuvYg__YfMzoOEtfm1sCqEaJ2Ww-BaX7yGfuCL4XsuZlCQQNHjfscy_WywVfIZPKCiW8QR74i0cSzQ',
|
||||
};
|
||||
|
||||
describe('/server', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
});
|
||||
|
||||
describe('GET /server/about', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server/about');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return about information', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/about')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
version: expect.any(String),
|
||||
versionUrl: expect.any(String),
|
||||
repository: 'immich-app/immich',
|
||||
repositoryUrl: 'https://github.com/immich-app/immich',
|
||||
build: '1234567890',
|
||||
buildUrl: 'https://github.com/immich-app/immich/actions/runs/1234567890',
|
||||
buildImage: 'e2e',
|
||||
buildImageUrl: 'https://github.com/immich-app/immich/pkgs/container/immich-server',
|
||||
sourceRef: 'e2e',
|
||||
sourceCommit: 'e2eeeeeeeeeeeeeeeeee',
|
||||
sourceUrl: 'https://github.com/immich-app/immich/commit/e2eeeeeeeeeeeeeeeeee',
|
||||
nodejs: expect.any(String),
|
||||
ffmpeg: expect.any(String),
|
||||
imagemagick: expect.any(String),
|
||||
libvips: expect.any(String),
|
||||
exiftool: expect.any(String),
|
||||
licensed: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/storage', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server/storage');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return the disk information', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/storage')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
diskAvailable: expect.any(String),
|
||||
diskAvailableRaw: expect.any(Number),
|
||||
diskSize: expect.any(String),
|
||||
diskSizeRaw: expect.any(Number),
|
||||
diskUsagePercentage: expect.any(Number),
|
||||
diskUse: expect.any(String),
|
||||
diskUseRaw: expect.any(Number),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/ping', () => {
|
||||
it('should respond with pong', async () => {
|
||||
const { status, body } = await request(app).get('/server/ping');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ res: 'pong' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/version', () => {
|
||||
it('should respond with the server version', async () => {
|
||||
const { status, body } = await request(app).get('/server/version');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
major: expect.any(Number),
|
||||
minor: expect.any(Number),
|
||||
patch: expect.any(Number),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/features', () => {
|
||||
it('should respond with the server features', async () => {
|
||||
const { status, body } = await request(app).get('/server/features');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
smartSearch: false,
|
||||
configFile: false,
|
||||
duplicateDetection: false,
|
||||
facialRecognition: false,
|
||||
map: true,
|
||||
reverseGeocoding: true,
|
||||
oauth: false,
|
||||
oauthAutoLaunch: false,
|
||||
passwordLogin: true,
|
||||
search: true,
|
||||
sidecar: true,
|
||||
trash: true,
|
||||
email: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/config', () => {
|
||||
it('should respond with the server configuration', async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
loginPageMessage: '',
|
||||
oauthButtonText: 'Login with OAuth',
|
||||
trashDays: 30,
|
||||
userDeleteDelay: 7,
|
||||
isInitialized: true,
|
||||
externalDomain: '',
|
||||
isOnboarded: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server/statistics');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/statistics')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should return the server stats', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/statistics')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
photos: 0,
|
||||
usage: 0,
|
||||
usageByUser: [
|
||||
{
|
||||
quotaSizeInBytes: null,
|
||||
photos: 0,
|
||||
usage: 0,
|
||||
userName: 'Immich Admin',
|
||||
userId: admin.userId,
|
||||
videos: 0,
|
||||
},
|
||||
{
|
||||
quotaSizeInBytes: null,
|
||||
photos: 0,
|
||||
usage: 0,
|
||||
userName: 'User 1',
|
||||
userId: nonAdmin.userId,
|
||||
videos: 0,
|
||||
},
|
||||
],
|
||||
videos: 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/media-types', () => {
|
||||
it('should return accepted media types', async () => {
|
||||
const { status, body } = await request(app).get('/server/media-types');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
sidecar: ['.xmp'],
|
||||
image: expect.any(Array),
|
||||
video: expect.any(Array),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/theme', () => {
|
||||
it('should respond with the server theme', async () => {
|
||||
const { status, body } = await request(app).get('/server/theme');
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
customCss: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/server/license');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should return the server license', async () => {
|
||||
await request(app).put('/server/license').set('Authorization', `Bearer ${admin.accessToken}`).send(serverLicense);
|
||||
const { status, body } = await request(app)
|
||||
.get('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
...serverLicense,
|
||||
activatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /server/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete('/server/license');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete('/server/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should delete the server license', async () => {
|
||||
await request(app)
|
||||
.delete('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send(serverLicense);
|
||||
const { status } = await request(app).get('/server/license').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /server/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/server/license');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/server/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should set the server license', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send(serverLicense);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ ...serverLicense, activatedAt: expect.any(String) });
|
||||
const { body: licenseBody } = await request(app)
|
||||
.get('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(licenseBody).toEqual({ ...serverLicense, activatedAt: expect.any(String) });
|
||||
});
|
||||
|
||||
it('should reject license not starting with IMSV-', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ licenseKey: 'IMCL-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD', activationKey: 'activationKey' });
|
||||
expect(status).toBe(400);
|
||||
expect(body.message).toBe('Invalid license key');
|
||||
});
|
||||
|
||||
it('should reject license with invalid activation key', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/server/license')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ licenseKey: serverLicense.licenseKey, activationKey: `invalid${serverLicense.activationKey}` });
|
||||
expect(status).toBe(400);
|
||||
expect(body.message).toBe('Invalid license key');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
getUserAdmin,
|
||||
getUserPreferencesAdmin,
|
||||
login,
|
||||
updateAssets,
|
||||
} from '@immich/sdk';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
@@ -20,18 +21,16 @@ describe('/admin/users', () => {
|
||||
let nonAdmin: LoginResponseDto;
|
||||
let deletedUser: LoginResponseDto;
|
||||
let userToDelete: LoginResponseDto;
|
||||
let userToHardDelete: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
|
||||
[websocket, nonAdmin, deletedUser, userToDelete, userToHardDelete] = await Promise.all([
|
||||
[websocket, nonAdmin, deletedUser, userToDelete] = await Promise.all([
|
||||
utils.connectWebsocket(admin.accessToken),
|
||||
utils.userSetup(admin.accessToken, createUserDto.user1),
|
||||
utils.userSetup(admin.accessToken, createUserDto.user2),
|
||||
utils.userSetup(admin.accessToken, createUserDto.user3),
|
||||
utils.userSetup(admin.accessToken, createUserDto.user4),
|
||||
]);
|
||||
|
||||
await deleteUserAdmin(
|
||||
@@ -64,13 +63,12 @@ describe('/admin/users', () => {
|
||||
.get(`/admin/users`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(4);
|
||||
expect(body).toHaveLength(3);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ email: admin.userEmail }),
|
||||
expect.objectContaining({ email: nonAdmin.userEmail }),
|
||||
expect.objectContaining({ email: userToDelete.userEmail }),
|
||||
expect.objectContaining({ email: userToHardDelete.userEmail }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
@@ -81,13 +79,12 @@ describe('/admin/users', () => {
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toHaveLength(5);
|
||||
expect(body).toHaveLength(4);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ email: admin.userEmail }),
|
||||
expect.objectContaining({ email: nonAdmin.userEmail }),
|
||||
expect.objectContaining({ email: userToDelete.userEmail }),
|
||||
expect.objectContaining({ email: userToHardDelete.userEmail }),
|
||||
expect.objectContaining({ email: deletedUser.userEmail }),
|
||||
]),
|
||||
);
|
||||
@@ -250,18 +247,23 @@ describe('/admin/users', () => {
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
avatar: { color: 'orange' },
|
||||
memories: { enabled: false },
|
||||
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
|
||||
});
|
||||
expect(body).toMatchObject({ avatar: { color: 'orange' } });
|
||||
|
||||
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toEqual({
|
||||
avatar: { color: 'orange' },
|
||||
memories: { enabled: false },
|
||||
emailNotifications: { enabled: true, albumInvite: true, albumUpdate: true },
|
||||
});
|
||||
expect(after).toMatchObject({ avatar: { color: 'orange' } });
|
||||
});
|
||||
|
||||
it('should update download archive size', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}/preferences`)
|
||||
.send({ download: { archiveSize: 1_234_567 } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ download: { archiveSize: 1_234_567 } });
|
||||
|
||||
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ download: { archiveSize: 1_234_567 } });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -294,19 +296,49 @@ describe('/admin/users', () => {
|
||||
});
|
||||
|
||||
it('should hard delete a user', async () => {
|
||||
const user = await utils.userSetup(admin.accessToken, createUserDto.create('hard-delete-1'));
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/admin/users/${userToHardDelete.userId}`)
|
||||
.delete(`/admin/users/${user.userId}`)
|
||||
.send({ force: true })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
id: userToHardDelete.userId,
|
||||
id: user.userId,
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: expect.any(String),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'userDelete', id: userToHardDelete.userId, timeout: 5000 });
|
||||
await utils.waitForWebsocketEvent({ event: 'userDelete', id: user.userId, timeout: 5000 });
|
||||
});
|
||||
|
||||
it('should hard delete a user with stacked assets', async () => {
|
||||
const user = await utils.userSetup(admin.accessToken, createUserDto.create('hard-delete-1'));
|
||||
|
||||
const [asset1, asset2] = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
await updateAssets(
|
||||
{ assetBulkUpdateDto: { stackParentId: asset1.id, ids: [asset2.id] } },
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/admin/users/${user.userId}`)
|
||||
.send({ force: true })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
id: user.userId,
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: expect.any(String),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'userDelete', id: user.userId, timeout: 5000 });
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -5,6 +5,12 @@ import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const userLicense = {
|
||||
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
|
||||
activationKey:
|
||||
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
|
||||
};
|
||||
|
||||
describe('/users', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let deletedUser: LoginResponseDto;
|
||||
@@ -72,6 +78,24 @@ describe('/users', () => {
|
||||
quotaUsageInBytes: 0,
|
||||
});
|
||||
});
|
||||
|
||||
it('should get my user with license info', async () => {
|
||||
const { status: licenseStatus } = await request(app)
|
||||
.put(`/users/me/license`)
|
||||
.send(userLicense)
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(licenseStatus).toBe(200);
|
||||
const { status, body } = await request(app)
|
||||
.get(`/users/me`)
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
id: nonAdmin.userId,
|
||||
email: nonAdmin.userEmail,
|
||||
quotaUsageInBytes: 0,
|
||||
license: userLicense,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /users/me', () => {
|
||||
@@ -173,6 +197,45 @@ describe('/users', () => {
|
||||
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ memories: { enabled: false } });
|
||||
});
|
||||
|
||||
it('should update avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
.send({ avatar: { color: 'blue' } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ avatar: { color: 'blue' } });
|
||||
|
||||
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ avatar: { color: 'blue' } });
|
||||
});
|
||||
|
||||
it('should require an integer for download archive size', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
.send({ download: { archiveSize: 1_234_567.89 } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['download.archiveSize must be an integer number']));
|
||||
});
|
||||
|
||||
it('should update download archive size', async () => {
|
||||
const before = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toMatchObject({ download: { archiveSize: 4 * 2 ** 30 } });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
.send({ download: { archiveSize: 1_234_567 } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ download: { archiveSize: 1_234_567 } });
|
||||
|
||||
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ download: { archiveSize: 1_234_567 } });
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /users/:id', () => {
|
||||
@@ -197,4 +260,81 @@ describe('/users', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /server/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/users/me/license');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return the user license', async () => {
|
||||
await request(app)
|
||||
.put('/users/me/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
|
||||
.send(userLicense);
|
||||
const { status, body } = await request(app)
|
||||
.get('/users/me/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
...userLicense,
|
||||
activatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /users/me/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status } = await request(app).put(`/users/me/license`);
|
||||
expect(status).toEqual(401);
|
||||
});
|
||||
|
||||
it('should set the user license', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/license`)
|
||||
.send(userLicense)
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ ...userLicense, activatedAt: expect.any(String) });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ ...userLicense, activatedAt: expect.any(String) });
|
||||
const { body: licenseBody } = await request(app)
|
||||
.get('/users/me/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(licenseBody).toEqual({ ...userLicense, activatedAt: expect.any(String) });
|
||||
});
|
||||
|
||||
it('should reject license not starting with IMCL-', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/users/me/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
|
||||
.send({ licenseKey: 'IMSV-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD-ABCD', activationKey: 'activationKey' });
|
||||
expect(status).toBe(400);
|
||||
expect(body.message).toBe('Invalid license key');
|
||||
});
|
||||
|
||||
it('should reject license with invalid activation key', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/users/me/license')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
|
||||
.send({ licenseKey: userLicense.licenseKey, activationKey: `invalid${userLicense.activationKey}` });
|
||||
expect(status).toBe(400);
|
||||
expect(body.message).toBe('Invalid license key');
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /users/me/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status } = await request(app).put(`/users/me/license`);
|
||||
expect(status).toEqual(401);
|
||||
});
|
||||
|
||||
it('should delete the user license', async () => {
|
||||
const { status } = await request(app)
|
||||
.delete(`/users/me/license`)
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,11 +9,30 @@ describe(`immich-admin`, () => {
|
||||
|
||||
describe('list-users', () => {
|
||||
it('should list the admin user', async () => {
|
||||
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']);
|
||||
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']).promise;
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout).toContain("email: 'admin@immich.cloud'");
|
||||
expect(stdout).toContain("name: 'Immich Admin'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('reset-admin-password', () => {
|
||||
it('should reset admin password', async () => {
|
||||
const { child, promise } = immichAdmin(['reset-admin-password']);
|
||||
|
||||
let data = '';
|
||||
child.stdout.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
if (data.includes('Please choose a new password (optional)')) {
|
||||
child.stdin.end('\n');
|
||||
}
|
||||
});
|
||||
|
||||
const { stderr, stdout, exitCode } = await promise;
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout).toContain('The admin password has been updated to:');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -61,6 +61,12 @@ export const errorDto = {
|
||||
message: 'The server already has an admin',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidEmail: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: ['email must be an email'],
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
export const signupResponseDto = {
|
||||
@@ -81,6 +87,7 @@ export const signupResponseDto = {
|
||||
quotaUsageInBytes: 0,
|
||||
quotaSizeInBytes: null,
|
||||
status: 'active',
|
||||
license: null,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
117
e2e/src/setup/auth-server.ts
Normal file
117
e2e/src/setup/auth-server.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { exportJWK, generateKeyPair } from 'jose';
|
||||
import Provider from 'oidc-provider';
|
||||
|
||||
export enum OAuthClient {
|
||||
DEFAULT = 'client-default',
|
||||
RS256_TOKENS = 'client-RS256-tokens',
|
||||
RS256_PROFILE = 'client-RS256-profile',
|
||||
}
|
||||
|
||||
export enum OAuthUser {
|
||||
NO_EMAIL = 'no-email',
|
||||
NO_NAME = 'no-name',
|
||||
WITH_QUOTA = 'with-quota',
|
||||
WITH_USERNAME = 'with-username',
|
||||
}
|
||||
|
||||
const claims = [
|
||||
{ sub: OAuthUser.NO_EMAIL },
|
||||
{
|
||||
sub: OAuthUser.NO_NAME,
|
||||
email: 'oauth-no-name@immich.app',
|
||||
email_verified: true,
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_USERNAME,
|
||||
email: 'oauth-with-username@immich.app',
|
||||
email_verified: true,
|
||||
immich_username: 'user-username',
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_QUOTA,
|
||||
email: 'oauth-with-quota@immich.app',
|
||||
email_verified: true,
|
||||
preferred_username: 'user-quota',
|
||||
immich_quota: 25,
|
||||
},
|
||||
];
|
||||
|
||||
const withDefaultClaims = (sub: string) => ({
|
||||
sub,
|
||||
email: `${sub}@immich.app`,
|
||||
name: 'OAuth User',
|
||||
given_name: `OAuth`,
|
||||
family_name: 'User',
|
||||
email_verified: true,
|
||||
});
|
||||
|
||||
const getClaims = (sub: string) => claims.find((user) => user.sub === sub) || withDefaultClaims(sub);
|
||||
|
||||
const setup = async () => {
|
||||
const { privateKey, publicKey } = await generateKeyPair('RS256');
|
||||
|
||||
const port = 3000;
|
||||
const host = '0.0.0.0';
|
||||
const oidc = new Provider(`http://${host}:${port}`, {
|
||||
renderError: async (ctx, out, error) => {
|
||||
console.error(out);
|
||||
console.error(error);
|
||||
ctx.body = 'Internal Server Error';
|
||||
},
|
||||
findAccount: (ctx, sub) => ({ accountId: sub, claims: () => getClaims(sub) }),
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
claims: {
|
||||
openid: ['sub'],
|
||||
email: ['email', 'email_verified'],
|
||||
profile: ['name', 'given_name', 'family_name', 'preferred_username', 'immich_quota', 'immich_username'],
|
||||
},
|
||||
features: {
|
||||
jwtUserinfo: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
cookies: {
|
||||
names: {
|
||||
session: 'oidc.session',
|
||||
interaction: 'oidc.interaction',
|
||||
resume: 'oidc.resume',
|
||||
state: 'oidc.state',
|
||||
},
|
||||
},
|
||||
pkce: {
|
||||
required: () => false,
|
||||
},
|
||||
jwks: { keys: [await exportJWK(privateKey)] },
|
||||
clients: [
|
||||
{
|
||||
client_id: OAuthClient.DEFAULT,
|
||||
client_secret: OAuthClient.DEFAULT,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
response_types: ['code'],
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_TOKENS,
|
||||
client_secret: OAuthClient.RS256_TOKENS,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
id_token_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_PROFILE,
|
||||
client_secret: OAuthClient.RS256_PROFILE,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
userinfo_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const onStart = () => console.log(`[auth-server] http://${host}:${port}/.well-known/openid-configuration`);
|
||||
const app = oidc.listen(port, host, onStart);
|
||||
return () => app.close();
|
||||
};
|
||||
|
||||
export default setup;
|
||||
@@ -47,14 +47,13 @@ import { makeRandomImage } from 'src/generators';
|
||||
import request from 'supertest';
|
||||
|
||||
type CommandResponse = { stdout: string; stderr: string; exitCode: number | null };
|
||||
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete';
|
||||
type EventType = 'assetUpload' | 'assetUpdate' | 'assetDelete' | 'userDelete' | 'assetHidden';
|
||||
type WaitOptions = { event: EventType; id?: string; total?: number; timeout?: number };
|
||||
type AdminSetupOptions = { onboarding?: boolean };
|
||||
type AssetData = { bytes?: Buffer; filename: string };
|
||||
|
||||
const dbUrl = 'postgres://postgres:postgres@127.0.0.1:5433/immich';
|
||||
const baseUrl = 'http://127.0.0.1:2283';
|
||||
|
||||
export const baseUrl = 'http://127.0.0.1:2283';
|
||||
export const shareUrl = `${baseUrl}/share`;
|
||||
export const app = `${baseUrl}/api`;
|
||||
// TODO move test assets into e2e/assets
|
||||
@@ -64,13 +63,13 @@ export const tempDir = tmpdir();
|
||||
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]);
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
|
||||
const executeCommand = (command: string, args: string[]) => {
|
||||
let _resolve: (value: CommandResponse) => void;
|
||||
const deferred = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
|
||||
const promise = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
|
||||
const child = spawn(command, args, { stdio: 'pipe' });
|
||||
|
||||
let stdout = '';
|
||||
@@ -86,12 +85,13 @@ const executeCommand = (command: string, args: string[]) => {
|
||||
});
|
||||
});
|
||||
|
||||
return deferred;
|
||||
return { promise, child };
|
||||
};
|
||||
|
||||
let client: pg.Client | null = null;
|
||||
|
||||
const events: Record<EventType, Set<string>> = {
|
||||
assetHidden: new Set<string>(),
|
||||
assetUpload: new Set<string>(),
|
||||
assetUpdate: new Set<string>(),
|
||||
assetDelete: new Set<string>(),
|
||||
@@ -151,10 +151,6 @@ export const utils = {
|
||||
|
||||
const sql: string[] = [];
|
||||
|
||||
if (tables.includes('asset_stack')) {
|
||||
sql.push('UPDATE "assets" SET "stackId" = NULL;');
|
||||
}
|
||||
|
||||
for (const table of tables) {
|
||||
if (table === 'system_metadata') {
|
||||
// prevent reverse geocoder from being re-initialized
|
||||
@@ -203,6 +199,7 @@ export const utils = {
|
||||
.on('connect', () => resolve(websocket))
|
||||
.on('on_upload_success', (data: AssetResponseDto) => onEvent({ event: 'assetUpload', id: data.id }))
|
||||
.on('on_asset_update', (data: AssetResponseDto) => onEvent({ event: 'assetUpdate', id: data.id }))
|
||||
.on('on_asset_hidden', (assetId: string) => onEvent({ event: 'assetHidden', id: assetId }))
|
||||
.on('on_asset_delete', (assetId: string) => onEvent({ event: 'assetDelete', id: assetId }))
|
||||
.on('on_user_delete', (userId: string) => onEvent({ event: 'userDelete', id: userId }))
|
||||
.connect();
|
||||
@@ -398,14 +395,7 @@ export const utils = {
|
||||
return;
|
||||
}
|
||||
|
||||
const vector = Array.from({ length: 512 }, Math.random);
|
||||
const embedding = `[${vector.join(',')}]`;
|
||||
|
||||
await client.query('INSERT INTO asset_faces ("assetId", "personId", "embedding") VALUES ($1, $2, $3)', [
|
||||
assetId,
|
||||
personId,
|
||||
embedding,
|
||||
]);
|
||||
await client.query('INSERT INTO asset_faces ("assetId", "personId") VALUES ($1, $2)', [assetId, personId]);
|
||||
},
|
||||
|
||||
setPersonThumbnail: async (personId: string) => {
|
||||
|
||||
@@ -51,6 +51,13 @@ test.describe('Shared Links', () => {
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
});
|
||||
|
||||
test('download all from shared link', async ({ page }) => {
|
||||
await page.goto(`/share/${sharedLink.key}`);
|
||||
await page.getByRole('heading', { name: 'Test Album' }).waitFor();
|
||||
await page.getByRole('button', { name: 'Download' }).click();
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
});
|
||||
|
||||
test('enter password for a shared link', async ({ page }) => {
|
||||
await page.goto(`/share/${sharedLinkPassword.key}`);
|
||||
await page.getByPlaceholder('Password').fill('test-password');
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
// skip `docker compose up` if `make e2e` was already run
|
||||
const globalSetup: string[] = [];
|
||||
const globalSetup: string[] = ['src/setup/auth-server.ts'];
|
||||
try {
|
||||
await fetch('http://127.0.0.1:2283/api/server-info/ping');
|
||||
} catch {
|
||||
globalSetup.push('src/setup.ts');
|
||||
globalSetup.push('src/setup/docker-compose.ts');
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:96de1ea4821d73fd2c1853d1fdc3cf794ccfe2fae4c3f08579e846de51760a61 as builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:7bec1574675e7fd9e3a540a03cd7d6811c59ca261bd300cd665369d8f435298a as builder-cpu
|
||||
|
||||
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as builder-openvino
|
||||
USER root
|
||||
@@ -36,7 +36,7 @@ RUN python3 -m venv /opt/venv
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --with ${DEVICE} --without dev
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:fc39d2e68b554c3f0a5cb8a776280c0b3d73b4c04b83dbade835e2a171ca27ef as prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:17ec9dc2367aa748559d0212f34665ec4df801129de32db705ea34654b5bc77a as prod-cpu
|
||||
|
||||
FROM openvino/ubuntu22_runtime:2023.3.0@sha256:176646df619032ea6c10faf842867119c393e7497b7f88b5e307e932a0fd5aa8 as prod-openvino
|
||||
USER root
|
||||
|
||||
@@ -52,8 +52,6 @@ class Ann(metaclass=_Singleton):
|
||||
def __init__(self, log_level: int = 3, tuning_level: int = 1, tuning_file: str | None = None) -> None:
|
||||
if not is_available:
|
||||
raise RuntimeError("libann is not available!")
|
||||
if tuning_file and not exists(tuning_file):
|
||||
raise ValueError("tuning_file must point to an existing (possibly empty) file!")
|
||||
if tuning_level == 0 and tuning_file is None:
|
||||
raise ValueError("tuning_level == 0 reads existing tuning information and requires a tuning_file")
|
||||
if tuning_level < 0 or tuning_level > 3:
|
||||
@@ -67,6 +65,12 @@ class Ann(metaclass=_Singleton):
|
||||
self.input_shapes: dict[int, tuple[tuple[int], ...]] = {}
|
||||
self.ann: int | None = None
|
||||
self.new()
|
||||
|
||||
if self.tuning_file is not None:
|
||||
# make sure tuning file exists (without clearing contents)
|
||||
# once filled, the tuning file reduces the cost/time of the first
|
||||
# inference after model load by 10s of seconds
|
||||
open(self.tuning_file, "a").close()
|
||||
|
||||
def new(self) -> None:
|
||||
if self.ann is None:
|
||||
@@ -95,17 +99,19 @@ class Ann(metaclass=_Singleton):
|
||||
model_path: str,
|
||||
fast_math: bool = True,
|
||||
fp16: bool = False,
|
||||
save_cached_network: bool = False,
|
||||
cached_network_path: str | None = None,
|
||||
) -> int:
|
||||
if not model_path.endswith((".armnn", ".tflite", ".onnx")):
|
||||
raise ValueError("model_path must be a file with extension .armnn, .tflite or .onnx")
|
||||
if not exists(model_path):
|
||||
raise ValueError("model_path must point to an existing file!")
|
||||
|
||||
save_cached_network = False
|
||||
if cached_network_path is not None and not exists(cached_network_path):
|
||||
raise ValueError("cached_network_path must point to an existing (possibly empty) file!")
|
||||
if save_cached_network and cached_network_path is None:
|
||||
raise ValueError("save_cached_network is True, cached_network_path must be specified!")
|
||||
save_cached_network = True
|
||||
# create empty model cache file
|
||||
open(cached_network_path, "a").close()
|
||||
|
||||
net_id: int = libann.load(
|
||||
self.ann,
|
||||
model_path.encode(),
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
g++ -shared -O3 -o libann.so -fuse-ld=gold -std=c++17 -I"$ARMNN_PATH"/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L"$ARMNN_PATH" ann.cpp
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
cd armnn-23.11/ || exit
|
||||
g++ -o ../armnnconverter -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||
@@ -1,201 +0,0 @@
|
||||
name: annexport
|
||||
channels:
|
||||
- pytorch
|
||||
- nvidia
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- _libgcc_mutex=0.1=conda_forge
|
||||
- _openmp_mutex=4.5=2_kmp_llvm
|
||||
- aiohttp=3.9.1=py310h2372a71_0
|
||||
- aiosignal=1.3.1=pyhd8ed1ab_0
|
||||
- arpack=3.8.0=nompi_h0baa96a_101
|
||||
- async-timeout=4.0.3=pyhd8ed1ab_0
|
||||
- attrs=23.1.0=pyh71513ae_1
|
||||
- aws-c-auth=0.7.3=h28f7589_1
|
||||
- aws-c-cal=0.6.1=hc309b26_1
|
||||
- aws-c-common=0.9.0=hd590300_0
|
||||
- aws-c-compression=0.2.17=h4d4d85c_2
|
||||
- aws-c-event-stream=0.3.1=h2e3709c_4
|
||||
- aws-c-http=0.7.11=h00aa349_4
|
||||
- aws-c-io=0.13.32=he9a53bd_1
|
||||
- aws-c-mqtt=0.9.3=hb447be9_1
|
||||
- aws-c-s3=0.3.14=hf3aad02_1
|
||||
- aws-c-sdkutils=0.1.12=h4d4d85c_1
|
||||
- aws-checksums=0.1.17=h4d4d85c_1
|
||||
- aws-crt-cpp=0.21.0=hb942446_5
|
||||
- aws-sdk-cpp=1.10.57=h85b1a90_19
|
||||
- blas=2.120=openblas
|
||||
- blas-devel=3.9.0=20_linux64_openblas
|
||||
- brotli-python=1.0.9=py310hd8f1fbe_9
|
||||
- bzip2=1.0.8=hd590300_5
|
||||
- c-ares=1.23.0=hd590300_0
|
||||
- ca-certificates=2023.11.17=hbcca054_0
|
||||
- certifi=2023.11.17=pyhd8ed1ab_0
|
||||
- charset-normalizer=3.3.2=pyhd8ed1ab_0
|
||||
- click=8.1.7=unix_pyh707e725_0
|
||||
- colorama=0.4.6=pyhd8ed1ab_0
|
||||
- coloredlogs=15.0.1=pyhd8ed1ab_3
|
||||
- cuda-cudart=11.7.99=0
|
||||
- cuda-cupti=11.7.101=0
|
||||
- cuda-libraries=11.7.1=0
|
||||
- cuda-nvrtc=11.7.99=0
|
||||
- cuda-nvtx=11.7.91=0
|
||||
- cuda-runtime=11.7.1=0
|
||||
- dataclasses=0.8=pyhc8e2a94_3
|
||||
- datasets=2.14.7=pyhd8ed1ab_0
|
||||
- dill=0.3.7=pyhd8ed1ab_0
|
||||
- filelock=3.13.1=pyhd8ed1ab_0
|
||||
- flatbuffers=23.5.26=h59595ed_1
|
||||
- freetype=2.12.1=h267a509_2
|
||||
- frozenlist=1.4.0=py310h2372a71_1
|
||||
- fsspec=2023.10.0=pyhca7485f_0
|
||||
- ftfy=6.1.3=pyhd8ed1ab_0
|
||||
- gflags=2.2.2=he1b5a44_1004
|
||||
- glog=0.6.0=h6f12383_0
|
||||
- glpk=5.0=h445213a_0
|
||||
- gmp=6.3.0=h59595ed_0
|
||||
- gmpy2=2.1.2=py310h3ec546c_1
|
||||
- huggingface_hub=0.17.3=pyhd8ed1ab_0
|
||||
- humanfriendly=10.0=pyhd8ed1ab_6
|
||||
- icu=73.2=h59595ed_0
|
||||
- idna=3.6=pyhd8ed1ab_0
|
||||
- importlib-metadata=7.0.0=pyha770c72_0
|
||||
- importlib_metadata=7.0.0=hd8ed1ab_0
|
||||
- joblib=1.3.2=pyhd8ed1ab_0
|
||||
- keyutils=1.6.1=h166bdaf_0
|
||||
- krb5=1.21.2=h659d440_0
|
||||
- lcms2=2.15=h7f713cb_2
|
||||
- ld_impl_linux-64=2.40=h41732ed_0
|
||||
- lerc=4.0.0=h27087fc_0
|
||||
- libabseil=20230125.3=cxx17_h59595ed_0
|
||||
- libarrow=12.0.1=hb87d912_8_cpu
|
||||
- libblas=3.9.0=20_linux64_openblas
|
||||
- libbrotlicommon=1.0.9=h166bdaf_9
|
||||
- libbrotlidec=1.0.9=h166bdaf_9
|
||||
- libbrotlienc=1.0.9=h166bdaf_9
|
||||
- libcblas=3.9.0=20_linux64_openblas
|
||||
- libcrc32c=1.1.2=h9c3ff4c_0
|
||||
- libcublas=11.10.3.66=0
|
||||
- libcufft=10.7.2.124=h4fbf590_0
|
||||
- libcufile=1.8.1.2=0
|
||||
- libcurand=10.3.4.101=0
|
||||
- libcurl=8.5.0=hca28451_0
|
||||
- libcusolver=11.4.0.1=0
|
||||
- libcusparse=11.7.4.91=0
|
||||
- libdeflate=1.19=hd590300_0
|
||||
- libedit=3.1.20191231=he28a2e2_2
|
||||
- libev=4.33=hd590300_2
|
||||
- libevent=2.1.12=hf998b51_1
|
||||
- libffi=3.4.2=h7f98852_5
|
||||
- libgcc-ng=13.2.0=h807b86a_3
|
||||
- libgfortran-ng=13.2.0=h69a702a_3
|
||||
- libgfortran5=13.2.0=ha4646dd_3
|
||||
- libgoogle-cloud=2.12.0=hac9eb74_1
|
||||
- libgrpc=1.54.3=hb20ce57_0
|
||||
- libhwloc=2.9.3=default_h554bfaf_1009
|
||||
- libiconv=1.17=hd590300_1
|
||||
- libjpeg-turbo=2.1.5.1=hd590300_1
|
||||
- liblapack=3.9.0=20_linux64_openblas
|
||||
- liblapacke=3.9.0=20_linux64_openblas
|
||||
- libnghttp2=1.58.0=h47da74e_1
|
||||
- libnpp=11.7.4.75=0
|
||||
- libnsl=2.0.1=hd590300_0
|
||||
- libnuma=2.0.16=h0b41bf4_1
|
||||
- libnvjpeg=11.8.0.2=0
|
||||
- libopenblas=0.3.25=pthreads_h413a1c8_0
|
||||
- libpng=1.6.39=h753d276_0
|
||||
- libprotobuf=3.21.12=hfc55251_2
|
||||
- libsentencepiece=0.1.99=h180e1df_0
|
||||
- libsqlite=3.44.2=h2797004_0
|
||||
- libssh2=1.11.0=h0841786_0
|
||||
- libstdcxx-ng=13.2.0=h7e041cc_3
|
||||
- libthrift=0.18.1=h8fd135c_2
|
||||
- libtiff=4.6.0=h29866fb_1
|
||||
- libutf8proc=2.8.0=h166bdaf_0
|
||||
- libuuid=2.38.1=h0b41bf4_0
|
||||
- libwebp-base=1.3.2=hd590300_0
|
||||
- libxcb=1.15=h0b41bf4_0
|
||||
- libxml2=2.11.6=h232c23b_0
|
||||
- libzlib=1.2.13=hd590300_5
|
||||
- llvm-openmp=17.0.6=h4dfa4b3_0
|
||||
- lz4-c=1.9.4=hcb278e6_0
|
||||
- mkl=2022.2.1=h84fe81f_16997
|
||||
- mkl-devel=2022.2.1=ha770c72_16998
|
||||
- mkl-include=2022.2.1=h84fe81f_16997
|
||||
- mpc=1.3.1=hfe3b2da_0
|
||||
- mpfr=4.2.1=h9458935_0
|
||||
- mpmath=1.3.0=pyhd8ed1ab_0
|
||||
- multidict=6.0.4=py310h2372a71_1
|
||||
- multiprocess=0.70.15=py310h2372a71_1
|
||||
- ncurses=6.4=h59595ed_2
|
||||
- numpy=1.26.2=py310hb13e2d6_0
|
||||
- onnx=1.14.0=py310ha3deec4_1
|
||||
- onnx2torch=1.5.13=pyhd8ed1ab_0
|
||||
- onnxruntime=1.16.3=py310hd4b7fbc_1_cpu
|
||||
- open-clip-torch=2.23.0=pyhd8ed1ab_1
|
||||
- openblas=0.3.25=pthreads_h7a3da1a_0
|
||||
- openjpeg=2.5.0=h488ebb8_3
|
||||
- openssl=3.2.0=hd590300_1
|
||||
- orc=1.9.0=h2f23424_1
|
||||
- packaging=23.2=pyhd8ed1ab_0
|
||||
- pandas=2.1.4=py310hcc13569_0
|
||||
- pillow=10.0.1=py310h29da1c1_1
|
||||
- pip=23.3.1=pyhd8ed1ab_0
|
||||
- protobuf=4.21.12=py310heca2aa9_0
|
||||
- pthread-stubs=0.4=h36c2ea0_1001
|
||||
- pyarrow=12.0.1=py310h0576679_8_cpu
|
||||
- pyarrow-hotfix=0.6=pyhd8ed1ab_0
|
||||
- pysocks=1.7.1=pyha2e5f31_6
|
||||
- python=3.10.13=hd12c33a_0_cpython
|
||||
- python-dateutil=2.8.2=pyhd8ed1ab_0
|
||||
- python-flatbuffers=23.5.26=pyhd8ed1ab_0
|
||||
- python-tzdata=2023.3=pyhd8ed1ab_0
|
||||
- python-xxhash=3.4.1=py310h2372a71_0
|
||||
- python_abi=3.10=4_cp310
|
||||
- pytorch=1.13.1=cpu_py310hd11e9c7_1
|
||||
- pytorch-cuda=11.7=h778d358_5
|
||||
- pytorch-mutex=1.0=cuda
|
||||
- pytz=2023.3.post1=pyhd8ed1ab_0
|
||||
- pyyaml=6.0.1=py310h2372a71_1
|
||||
- rdma-core=28.9=h59595ed_1
|
||||
- re2=2023.03.02=h8c504da_0
|
||||
- readline=8.2=h8228510_1
|
||||
- regex=2023.10.3=py310h2372a71_0
|
||||
- requests=2.31.0=pyhd8ed1ab_0
|
||||
- s2n=1.3.49=h06160fa_0
|
||||
- sacremoses=0.0.53=pyhd8ed1ab_0
|
||||
- safetensors=0.3.3=py310hcb5633a_1
|
||||
- sentencepiece=0.1.99=hff52083_0
|
||||
- sentencepiece-python=0.1.99=py310hebdb9f0_0
|
||||
- sentencepiece-spm=0.1.99=h180e1df_0
|
||||
- setuptools=68.2.2=pyhd8ed1ab_0
|
||||
- six=1.16.0=pyh6c4a22f_0
|
||||
- sleef=3.5.1=h9b69904_2
|
||||
- snappy=1.1.10=h9fff704_0
|
||||
- sympy=1.12=pypyh9d50eac_103
|
||||
- tbb=2021.11.0=h00ab1b0_0
|
||||
- texttable=1.7.0=pyhd8ed1ab_0
|
||||
- timm=0.9.12=pyhd8ed1ab_0
|
||||
- tk=8.6.13=noxft_h4845f30_101
|
||||
- tokenizers=0.14.1=py310h320607d_2
|
||||
- torchvision=0.14.1=cpu_py310hd3d2ac3_1
|
||||
- tqdm=4.66.1=pyhd8ed1ab_0
|
||||
- transformers=4.35.2=pyhd8ed1ab_0
|
||||
- typing-extensions=4.9.0=hd8ed1ab_0
|
||||
- typing_extensions=4.9.0=pyha770c72_0
|
||||
- tzdata=2023c=h71feb2d_0
|
||||
- ucx=1.14.1=h64cca9d_5
|
||||
- urllib3=2.1.0=pyhd8ed1ab_0
|
||||
- wcwidth=0.2.12=pyhd8ed1ab_0
|
||||
- wheel=0.42.0=pyhd8ed1ab_0
|
||||
- xorg-libxau=1.0.11=hd590300_0
|
||||
- xorg-libxdmcp=1.1.3=h7f98852_0
|
||||
- xxhash=0.8.2=hd590300_0
|
||||
- xz=5.2.6=h166bdaf_0
|
||||
- yaml=0.2.5=h7f98852_2
|
||||
- yarl=1.9.3=py310h2372a71_0
|
||||
- zipp=3.17.0=pyhd8ed1ab_0
|
||||
- zlib=1.2.13=hd590300_5
|
||||
- zstd=1.5.5=hfc55251_0
|
||||
- pip:
|
||||
- git+https://github.com/fyfrey/TinyNeuralNetwork.git
|
||||
@@ -1,157 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from abc import abstractmethod
|
||||
|
||||
import onnx
|
||||
import open_clip
|
||||
import torch
|
||||
from onnx2torch import convert
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
from tinynn.converter import TFLiteConverter
|
||||
|
||||
|
||||
class ExportBase(torch.nn.Module):
|
||||
input_shape: tuple[int, ...]
|
||||
|
||||
def __init__(self, device: torch.device, name: str):
|
||||
super().__init__()
|
||||
self.device = device
|
||||
self.name = name
|
||||
self.optimize = 5
|
||||
self.nchw_transpose = False
|
||||
|
||||
@abstractmethod
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor]:
|
||||
pass
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand((1, 3, 224, 224), device=self.device)
|
||||
|
||||
|
||||
class ArcFace(ExportBase):
|
||||
input_shape = (1, 3, 112, 112)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
onnx_model = onnx.load_model(onnx_model_path)
|
||||
make_input_shape_fixed(onnx_model.graph, onnx_model.graph.input[0].name, self.input_shape)
|
||||
fix_output_shapes(onnx_model)
|
||||
self.model = convert(onnx_model).to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.FloatTensor = self.model(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor
|
||||
).float()
|
||||
assert isinstance(embedding, torch.FloatTensor)
|
||||
return embedding
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class RetinaFace(ExportBase):
|
||||
input_shape = (1, 3, 640, 640)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
self.optimize = 3
|
||||
self.model = convert(onnx_model_path).eval().to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> tuple[torch.FloatTensor]:
|
||||
out: torch.Tensor = self.model(input_tensor.half() if self.device.type == "cuda" else input_tensor)
|
||||
return tuple(o.float() for o in out)
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class ClipVision(ExportBase):
|
||||
input_shape = (1, 3, 224, 224)
|
||||
|
||||
def __init__(self, model_name: str, weights: str, device: torch.device):
|
||||
super().__init__(device, model_name + "__" + weights)
|
||||
self.model = open_clip.create_model(
|
||||
model_name,
|
||||
weights,
|
||||
precision="fp16" if device.type == "cuda" else "fp32",
|
||||
jit=False,
|
||||
require_pretrained=True,
|
||||
device=device,
|
||||
)
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.Tensor = self.model.encode_image(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor,
|
||||
normalize=True,
|
||||
).float()
|
||||
return embedding
|
||||
|
||||
|
||||
def export(model: ExportBase) -> None:
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad = False
|
||||
dummy_input = model.dummy_input()
|
||||
model(dummy_input)
|
||||
jit = torch.jit.trace(model, dummy_input) # type: ignore[no-untyped-call,attr-defined]
|
||||
tflite_model_path = f"output/{model.name}.tflite"
|
||||
os.makedirs("output", exist_ok=True)
|
||||
|
||||
converter = TFLiteConverter(
|
||||
jit,
|
||||
dummy_input,
|
||||
tflite_model_path,
|
||||
optimize=model.optimize,
|
||||
nchw_transpose=model.nchw_transpose,
|
||||
)
|
||||
# segfaults on ARM, must run on x86_64 / AMD64
|
||||
converter.convert()
|
||||
|
||||
armnn_model_path = f"output/{model.name}.armnn"
|
||||
os.environ["LD_LIBRARY_PATH"] = "armnn"
|
||||
subprocess.run(
|
||||
[
|
||||
"./armnnconverter",
|
||||
"-f",
|
||||
"tflite-binary",
|
||||
"-m",
|
||||
tflite_model_path,
|
||||
"-i",
|
||||
"input_tensor",
|
||||
"-o",
|
||||
"output_tensor",
|
||||
"-p",
|
||||
armnn_model_path,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
raise RuntimeError(f"Can only run on x86_64 / AMD64, not {platform.machine()}")
|
||||
|
||||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||
if device.type != "cuda":
|
||||
logging.warning(
|
||||
"No CUDA available, cannot create fp16 model! proceeding to create a fp32 model (use only for testing)"
|
||||
)
|
||||
models = [
|
||||
ClipVision("ViT-B-32", "openai", device),
|
||||
ArcFace("buffalo_l_rec.onnx", device),
|
||||
RetinaFace("buffalo_l_det.onnx", device),
|
||||
]
|
||||
for model in models:
|
||||
export(model)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with torch.no_grad():
|
||||
main()
|
||||
@@ -8,6 +8,8 @@ from fastapi.testclient import TestClient
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
|
||||
from app.config import log
|
||||
|
||||
from .main import app
|
||||
|
||||
|
||||
@@ -96,12 +98,83 @@ def clip_tokenizer_cfg() -> dict[str, Any]:
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def providers(request: pytest.FixtureRequest) -> Iterator[dict[str, Any]]:
|
||||
def providers(request: pytest.FixtureRequest) -> Iterator[mock.Mock]:
|
||||
marker = request.node.get_closest_marker("providers")
|
||||
if marker is None:
|
||||
raise ValueError("Missing marker 'providers'")
|
||||
|
||||
providers = marker.args[0]
|
||||
with mock.patch("app.models.base.ort.get_available_providers") as mocked:
|
||||
with mock.patch("app.sessions.ort.ort.get_available_providers") as mocked:
|
||||
mocked.return_value = providers
|
||||
yield providers
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def ort_pybind() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.sessions.ort.ort.capi._pybind_state") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def ov_device_ids(request: pytest.FixtureRequest, ort_pybind: mock.Mock) -> Iterator[mock.Mock]:
|
||||
marker = request.node.get_closest_marker("ov_device_ids")
|
||||
if marker is None:
|
||||
raise ValueError("Missing marker 'ov_device_ids'")
|
||||
ort_pybind.get_available_openvino_device_ids.return_value = marker.args[0]
|
||||
return ort_pybind
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def ort_session() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.sessions.ort.ort.InferenceSession") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def ann_session() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.sessions.ann.Ann") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def rmtree() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.base.rmtree", autospec=True) as mocked:
|
||||
mocked.avoids_symlink_attacks = True
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def path() -> Iterator[mock.Mock]:
|
||||
path = mock.MagicMock()
|
||||
path.exists.return_value = True
|
||||
path.is_dir.return_value = True
|
||||
path.is_file.return_value = True
|
||||
path.with_suffix.return_value = path
|
||||
path.return_value = path
|
||||
|
||||
with mock.patch("app.models.base.Path", return_value=path) as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def info() -> Iterator[mock.Mock]:
|
||||
with mock.patch.object(log, "info") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def warning() -> Iterator[mock.Mock]:
|
||||
with mock.patch.object(log, "warning") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def exception() -> Iterator[mock.Mock]:
|
||||
with mock.patch.object(log, "exception") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def snapshot_download() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.base.snapshot_download") as mocked:
|
||||
yield mocked
|
||||
|
||||
@@ -29,6 +29,7 @@ from .schemas import (
|
||||
InferenceEntry,
|
||||
InferenceResponse,
|
||||
MessageResponse,
|
||||
ModelFormat,
|
||||
ModelIdentity,
|
||||
ModelTask,
|
||||
ModelType,
|
||||
@@ -192,23 +193,28 @@ async def load(model: InferenceModel) -> InferenceModel:
|
||||
return model
|
||||
|
||||
def _load(model: InferenceModel) -> InferenceModel:
|
||||
if model.load_attempts > 1:
|
||||
raise HTTPException(500, f"Failed to load model '{model.model_name}'")
|
||||
with lock:
|
||||
model.load()
|
||||
try:
|
||||
model.load()
|
||||
except FileNotFoundError as e:
|
||||
if model.model_format == ModelFormat.ONNX:
|
||||
raise e
|
||||
log.exception(e)
|
||||
log.warning(
|
||||
f"{model.model_format.upper()} is available, but model '{model.model_name}' does not support it."
|
||||
)
|
||||
model.model_format = ModelFormat.ONNX
|
||||
model.load()
|
||||
return model
|
||||
|
||||
try:
|
||||
await run(_load, model)
|
||||
return model
|
||||
return await run(_load, model)
|
||||
except (OSError, InvalidProtobuf, BadZipFile, NoSuchFile):
|
||||
log.warning(
|
||||
(
|
||||
f"Failed to load {model.model_type.replace('_', ' ')} model '{model.model_name}'."
|
||||
"Clearing cache and retrying."
|
||||
)
|
||||
)
|
||||
log.warning(f"Failed to load {model.model_type.replace('_', ' ')} model '{model.model_name}'. Clearing cache.")
|
||||
model.clear_cache()
|
||||
await run(_load, model)
|
||||
return model
|
||||
return await run(_load, model)
|
||||
|
||||
|
||||
async def idle_shutdown_task() -> None:
|
||||
|
||||
@@ -5,15 +5,14 @@ from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from typing import Any, ClassVar
|
||||
|
||||
import onnxruntime as ort
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
import ann.ann
|
||||
from app.models.constants import SUPPORTED_PROVIDERS
|
||||
from app.sessions.ort import OrtSession
|
||||
|
||||
from ..config import clean_name, log, settings
|
||||
from ..schemas import ModelFormat, ModelIdentity, ModelSession, ModelTask, ModelType
|
||||
from .ann import AnnSession
|
||||
from ..sessions.ann import AnnSession
|
||||
|
||||
|
||||
class InferenceModel(ABC):
|
||||
@@ -24,19 +23,17 @@ class InferenceModel(ABC):
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
providers: list[str] | None = None,
|
||||
provider_options: list[dict[str, Any]] | None = None,
|
||||
sess_options: ort.SessionOptions | None = None,
|
||||
preferred_format: ModelFormat | None = None,
|
||||
model_format: ModelFormat | None = None,
|
||||
session: ModelSession | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.loaded = False
|
||||
self.loaded = session is not None
|
||||
self.load_attempts = 0
|
||||
self.model_name = clean_name(model_name)
|
||||
self.cache_dir = Path(cache_dir) if cache_dir is not None else self.cache_dir_default
|
||||
self.providers = providers if providers is not None else self.providers_default
|
||||
self.provider_options = provider_options if provider_options is not None else self.provider_options_default
|
||||
self.sess_options = sess_options if sess_options is not None else self.sess_options_default
|
||||
self.preferred_format = preferred_format if preferred_format is not None else self.preferred_format_default
|
||||
self.cache_dir = Path(cache_dir) if cache_dir is not None else self._cache_dir_default
|
||||
self.model_format = model_format if model_format is not None else self._model_format_default
|
||||
if session is not None:
|
||||
self.session = session
|
||||
|
||||
def download(self) -> None:
|
||||
if not self.cached:
|
||||
@@ -48,9 +45,11 @@ class InferenceModel(ABC):
|
||||
def load(self) -> None:
|
||||
if self.loaded:
|
||||
return
|
||||
self.load_attempts += 1
|
||||
|
||||
self.download()
|
||||
log.info(f"Loading {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
|
||||
attempt = f"Attempt #{self.load_attempts} to load" if self.load_attempts > 1 else "Loading"
|
||||
log.info(f"{attempt} {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
|
||||
self.session = self._load()
|
||||
self.loaded = True
|
||||
|
||||
@@ -67,7 +66,7 @@ class InferenceModel(ABC):
|
||||
pass
|
||||
|
||||
def _download(self) -> None:
|
||||
ignore_patterns = [] if self.preferred_format == ModelFormat.ARMNN else ["*.armnn"]
|
||||
ignore_patterns = [] if self.model_format == ModelFormat.ARMNN else ["*.armnn"]
|
||||
snapshot_download(
|
||||
f"immich-app/{clean_name(self.model_name)}",
|
||||
cache_dir=self.cache_dir,
|
||||
@@ -103,25 +102,13 @@ class InferenceModel(ABC):
|
||||
|
||||
def _make_session(self, model_path: Path) -> ModelSession:
|
||||
if not model_path.is_file():
|
||||
onnx_path = model_path.with_suffix(".onnx")
|
||||
if not onnx_path.is_file():
|
||||
raise ValueError(f"Model path '{model_path}' does not exist")
|
||||
|
||||
log.warning(
|
||||
f"Could not find model path '{model_path}'. " f"Falling back to ONNX model path '{onnx_path}' instead.",
|
||||
)
|
||||
model_path = onnx_path
|
||||
raise FileNotFoundError(f"Model file not found: {model_path}")
|
||||
|
||||
match model_path.suffix:
|
||||
case ".armnn":
|
||||
session = AnnSession(model_path)
|
||||
session: ModelSession = AnnSession(model_path)
|
||||
case ".onnx":
|
||||
session = ort.InferenceSession(
|
||||
model_path.as_posix(),
|
||||
sess_options=self.sess_options,
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
)
|
||||
session = OrtSession(model_path)
|
||||
case _:
|
||||
raise ValueError(f"Unsupported model file type: {model_path.suffix}")
|
||||
return session
|
||||
@@ -132,7 +119,7 @@ class InferenceModel(ABC):
|
||||
|
||||
@property
|
||||
def model_path(self) -> Path:
|
||||
return self.model_dir / f"model.{self.preferred_format}"
|
||||
return self.model_dir / f"model.{self.model_format}"
|
||||
|
||||
@property
|
||||
def model_task(self) -> ModelTask:
|
||||
@@ -151,7 +138,7 @@ class InferenceModel(ABC):
|
||||
self._cache_dir = cache_dir
|
||||
|
||||
@property
|
||||
def cache_dir_default(self) -> Path:
|
||||
def _cache_dir_default(self) -> Path:
|
||||
return settings.cache_folder / self.model_task.value / self.model_name
|
||||
|
||||
@property
|
||||
@@ -159,95 +146,14 @@ class InferenceModel(ABC):
|
||||
return self.model_path.is_file()
|
||||
|
||||
@property
|
||||
def providers(self) -> list[str]:
|
||||
return self._providers
|
||||
def model_format(self) -> ModelFormat:
|
||||
return self._model_format
|
||||
|
||||
@providers.setter
|
||||
def providers(self, providers: list[str]) -> None:
|
||||
log.info(
|
||||
(f"Setting '{self.model_name}' execution providers to {providers}, " "in descending order of preference"),
|
||||
)
|
||||
self._providers = providers
|
||||
@model_format.setter
|
||||
def model_format(self, model_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting model format to {model_format}")
|
||||
self._model_format = model_format
|
||||
|
||||
@property
|
||||
def providers_default(self) -> list[str]:
|
||||
available_providers = set(ort.get_available_providers())
|
||||
log.debug(f"Available ORT providers: {available_providers}")
|
||||
if (openvino := "OpenVINOExecutionProvider") in available_providers:
|
||||
device_ids: list[str] = ort.capi._pybind_state.get_available_openvino_device_ids()
|
||||
log.debug(f"Available OpenVINO devices: {device_ids}")
|
||||
|
||||
gpu_devices = [device_id for device_id in device_ids if device_id.startswith("GPU")]
|
||||
if not gpu_devices:
|
||||
log.warning("No GPU device found in OpenVINO. Falling back to CPU.")
|
||||
available_providers.remove(openvino)
|
||||
return [provider for provider in SUPPORTED_PROVIDERS if provider in available_providers]
|
||||
|
||||
@property
|
||||
def provider_options(self) -> list[dict[str, Any]]:
|
||||
return self._provider_options
|
||||
|
||||
@provider_options.setter
|
||||
def provider_options(self, provider_options: list[dict[str, Any]]) -> None:
|
||||
log.debug(f"Setting execution provider options to {provider_options}")
|
||||
self._provider_options = provider_options
|
||||
|
||||
@property
|
||||
def provider_options_default(self) -> list[dict[str, Any]]:
|
||||
options = []
|
||||
for provider in self.providers:
|
||||
match provider:
|
||||
case "CPUExecutionProvider" | "CUDAExecutionProvider":
|
||||
option = {"arena_extend_strategy": "kSameAsRequested"}
|
||||
case "OpenVINOExecutionProvider":
|
||||
option = {"device_type": "GPU_FP32", "cache_dir": (self.cache_dir / "openvino").as_posix()}
|
||||
case _:
|
||||
option = {}
|
||||
options.append(option)
|
||||
return options
|
||||
|
||||
@property
|
||||
def sess_options(self) -> ort.SessionOptions:
|
||||
return self._sess_options
|
||||
|
||||
@sess_options.setter
|
||||
def sess_options(self, sess_options: ort.SessionOptions) -> None:
|
||||
log.debug(f"Setting execution_mode to {sess_options.execution_mode.name}")
|
||||
log.debug(f"Setting inter_op_num_threads to {sess_options.inter_op_num_threads}")
|
||||
log.debug(f"Setting intra_op_num_threads to {sess_options.intra_op_num_threads}")
|
||||
self._sess_options = sess_options
|
||||
|
||||
@property
|
||||
def sess_options_default(self) -> ort.SessionOptions:
|
||||
sess_options = ort.SessionOptions()
|
||||
sess_options.enable_cpu_mem_arena = False
|
||||
|
||||
# avoid thread contention between models
|
||||
if settings.model_inter_op_threads > 0:
|
||||
sess_options.inter_op_num_threads = settings.model_inter_op_threads
|
||||
# these defaults work well for CPU, but bottleneck GPU
|
||||
elif settings.model_inter_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
|
||||
sess_options.inter_op_num_threads = 1
|
||||
|
||||
if settings.model_intra_op_threads > 0:
|
||||
sess_options.intra_op_num_threads = settings.model_intra_op_threads
|
||||
elif settings.model_intra_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
|
||||
sess_options.intra_op_num_threads = 2
|
||||
|
||||
if sess_options.inter_op_num_threads > 1:
|
||||
sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
|
||||
|
||||
return sess_options
|
||||
|
||||
@property
|
||||
def preferred_format(self) -> ModelFormat:
|
||||
return self._preferred_format
|
||||
|
||||
@preferred_format.setter
|
||||
def preferred_format(self, preferred_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting preferred format to {preferred_format}")
|
||||
self._preferred_format = preferred_format
|
||||
|
||||
@property
|
||||
def preferred_format_default(self) -> ModelFormat:
|
||||
def _model_format_default(self) -> ModelFormat:
|
||||
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX
|
||||
|
||||
@@ -22,11 +22,12 @@ class BaseCLIPTextualEncoder(InferenceModel):
|
||||
return res
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = super()._load()
|
||||
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
|
||||
self.tokenizer = self._load_tokenizer()
|
||||
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
return super()._load()
|
||||
return session
|
||||
|
||||
@abstractmethod
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
@@ -14,15 +13,9 @@ class FaceDetector(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
|
||||
@@ -3,37 +3,32 @@ from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
import onnxruntime as ort
|
||||
from insightface.model_zoo import ArcFaceONNX
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from numpy.typing import NDArray
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
from PIL import Image
|
||||
|
||||
from app.config import clean_name, log
|
||||
from app.config import log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelSession, ModelTask, ModelType
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from app.sessions import has_batch_axis
|
||||
|
||||
|
||||
class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
|
||||
self.batch = self.model_format == ModelFormat.ONNX
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if not self._has_batch_dim(session):
|
||||
self._add_batch_dim(self.model_path)
|
||||
if self.batch and not has_batch_axis(session):
|
||||
self._add_batch_axis(self.model_path)
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = ArcFaceONNX(
|
||||
self.model_path.with_suffix(".onnx").as_posix(),
|
||||
@@ -47,9 +42,20 @@ class FaceRecognizer(InferenceModel):
|
||||
if faces["boxes"].shape[0] == 0:
|
||||
return []
|
||||
inputs = decode_cv2(inputs)
|
||||
embeddings: NDArray[np.float32] = self.model.get_feat(self._crop(inputs, faces))
|
||||
cropped_faces = self._crop(inputs, faces)
|
||||
embeddings = self._predict_batch(cropped_faces) if self.batch else self._predict_single(cropped_faces)
|
||||
return self.postprocess(faces, embeddings)
|
||||
|
||||
def _predict_batch(self, cropped_faces: list[NDArray[np.uint8]]) -> NDArray[np.float32]:
|
||||
embeddings: NDArray[np.float32] = self.model.get_feat(cropped_faces)
|
||||
return embeddings
|
||||
|
||||
def _predict_single(self, cropped_faces: list[NDArray[np.uint8]]) -> NDArray[np.float32]:
|
||||
embeddings: list[NDArray[np.float32]] = []
|
||||
for face in cropped_faces:
|
||||
embeddings.append(self.model.get_feat(face))
|
||||
return np.concatenate(embeddings, axis=0)
|
||||
|
||||
def postprocess(self, faces: FaceDetectionOutput, embeddings: NDArray[np.float32]) -> FacialRecognitionOutput:
|
||||
return [
|
||||
{
|
||||
@@ -63,11 +69,8 @@ class FaceRecognizer(InferenceModel):
|
||||
def _crop(self, image: NDArray[np.uint8], faces: FaceDetectionOutput) -> list[NDArray[np.uint8]]:
|
||||
return [norm_crop(image, landmark) for landmark in faces["landmarks"]]
|
||||
|
||||
def _has_batch_dim(self, session: ort.InferenceSession) -> bool:
|
||||
return not isinstance(session, ort.InferenceSession) or session.get_inputs()[0].shape[0] == "batch"
|
||||
|
||||
def _add_batch_dim(self, model_path: Path) -> None:
|
||||
log.debug(f"Adding batch dimension to model {model_path}")
|
||||
def _add_batch_axis(self, model_path: Path) -> None:
|
||||
log.debug(f"Adding batch axis to model {model_path}")
|
||||
proto = onnx.load(model_path)
|
||||
static_input_dims = [shape.dim_value for shape in proto.graph.input[0].type.tensor_type.shape.dim[1:]]
|
||||
static_output_dims = [shape.dim_value for shape in proto.graph.output[0].type.tensor_type.shape.dim[1:]]
|
||||
|
||||
@@ -54,6 +54,14 @@ class ModelSource(StrEnum):
|
||||
ModelIdentity = tuple[ModelType, ModelTask]
|
||||
|
||||
|
||||
class SessionNode(Protocol):
|
||||
@property
|
||||
def name(self) -> str | None: ...
|
||||
|
||||
@property
|
||||
def shape(self) -> tuple[int, ...]: ...
|
||||
|
||||
|
||||
class ModelSession(Protocol):
|
||||
def run(
|
||||
self,
|
||||
@@ -62,6 +70,10 @@ class ModelSession(Protocol):
|
||||
run_options: Any = None,
|
||||
) -> list[npt.NDArray[np.float32]]: ...
|
||||
|
||||
def get_inputs(self) -> list[SessionNode]: ...
|
||||
|
||||
def get_outputs(self) -> list[SessionNode]: ...
|
||||
|
||||
|
||||
class HasProfiling(Protocol):
|
||||
profiling: dict[str, float]
|
||||
|
||||
5
machine-learning/app/sessions/__init__.py
Normal file
5
machine-learning/app/sessions/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from app.schemas import ModelSession
|
||||
|
||||
|
||||
def has_batch_axis(session: ModelSession) -> bool:
|
||||
return not isinstance(session.get_inputs()[0].shape[0], int) or session.get_inputs()[0].shape[0] < 0
|
||||
@@ -7,6 +7,7 @@ import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from ann.ann import Ann
|
||||
from app.schemas import SessionNode
|
||||
|
||||
from ..config import log, settings
|
||||
|
||||
@@ -16,27 +17,15 @@ class AnnSession:
|
||||
Wrapper for ANN to be drop-in replacement for ONNX session.
|
||||
"""
|
||||
|
||||
def __init__(self, model_path: Path):
|
||||
tuning_file = Path(settings.cache_folder) / "gpu-tuning.ann"
|
||||
with tuning_file.open(mode="a"):
|
||||
# make sure tuning file exists (without clearing contents)
|
||||
# once filled, the tuning file reduces the cost/time of the first
|
||||
# inference after model load by 10s of seconds
|
||||
pass
|
||||
self.ann = Ann(tuning_level=3, tuning_file=tuning_file.as_posix())
|
||||
log.info("Loading ANN model %s ...", model_path)
|
||||
cache_file = model_path.with_suffix(".anncache")
|
||||
save = False
|
||||
if not cache_file.is_file():
|
||||
save = True
|
||||
with cache_file.open(mode="a"):
|
||||
# create empty model cache file
|
||||
pass
|
||||
def __init__(self, model_path: Path, cache_dir: Path = settings.cache_folder) -> None:
|
||||
self.model_path = model_path
|
||||
self.cache_dir = cache_dir
|
||||
self.ann = Ann(tuning_level=3, tuning_file=(cache_dir / "gpu-tuning.ann").as_posix())
|
||||
|
||||
log.info("Loading ANN model %s ...", model_path)
|
||||
self.model = self.ann.load(
|
||||
model_path.as_posix(),
|
||||
save_cached_network=save,
|
||||
cached_network_path=cache_file.as_posix(),
|
||||
cached_network_path=model_path.with_suffix(".anncache").as_posix(),
|
||||
)
|
||||
log.info("Loaded ANN model with ID %d", self.model)
|
||||
|
||||
@@ -45,11 +34,11 @@ class AnnSession:
|
||||
log.info("Unloaded ANN model %d", self.model)
|
||||
self.ann.destroy()
|
||||
|
||||
def get_inputs(self) -> list[AnnNode]:
|
||||
def get_inputs(self) -> list[SessionNode]:
|
||||
shapes = self.ann.input_shapes[self.model]
|
||||
return [AnnNode(None, s) for s in shapes]
|
||||
|
||||
def get_outputs(self) -> list[AnnNode]:
|
||||
def get_outputs(self) -> list[SessionNode]:
|
||||
shapes = self.ann.output_shapes[self.model]
|
||||
return [AnnNode(None, s) for s in shapes]
|
||||
|
||||
129
machine-learning/app/sessions/ort.py
Normal file
129
machine-learning/app/sessions/ort.py
Normal file
@@ -0,0 +1,129 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from app.models.constants import SUPPORTED_PROVIDERS
|
||||
from app.schemas import SessionNode
|
||||
|
||||
from ..config import log, settings
|
||||
|
||||
|
||||
class OrtSession:
|
||||
def __init__(
|
||||
self,
|
||||
model_path: Path | str,
|
||||
providers: list[str] | None = None,
|
||||
provider_options: list[dict[str, Any]] | None = None,
|
||||
sess_options: ort.SessionOptions | None = None,
|
||||
):
|
||||
self.model_path = Path(model_path)
|
||||
self.providers = providers if providers is not None else self._providers_default
|
||||
self.provider_options = provider_options if provider_options is not None else self._provider_options_default
|
||||
self.sess_options = sess_options if sess_options is not None else self._sess_options_default
|
||||
self.session = ort.InferenceSession(
|
||||
self.model_path.as_posix(),
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
sess_options=self.sess_options,
|
||||
)
|
||||
|
||||
def get_inputs(self) -> list[SessionNode]:
|
||||
inputs: list[SessionNode] = self.session.get_inputs()
|
||||
return inputs
|
||||
|
||||
def get_outputs(self) -> list[SessionNode]:
|
||||
outputs: list[SessionNode] = self.session.get_outputs()
|
||||
return outputs
|
||||
|
||||
def run(
|
||||
self,
|
||||
output_names: list[str] | None,
|
||||
input_feed: dict[str, NDArray[np.float32]] | dict[str, NDArray[np.int32]],
|
||||
run_options: Any = None,
|
||||
) -> list[NDArray[np.float32]]:
|
||||
outputs: list[NDArray[np.float32]] = self.session.run(output_names, input_feed, run_options)
|
||||
return outputs
|
||||
|
||||
@property
|
||||
def providers(self) -> list[str]:
|
||||
return self._providers
|
||||
|
||||
@providers.setter
|
||||
def providers(self, providers: list[str]) -> None:
|
||||
log.info(f"Setting execution providers to {providers}, in descending order of preference")
|
||||
self._providers = providers
|
||||
|
||||
@property
|
||||
def _providers_default(self) -> list[str]:
|
||||
available_providers = set(ort.get_available_providers())
|
||||
log.debug(f"Available ORT providers: {available_providers}")
|
||||
if (openvino := "OpenVINOExecutionProvider") in available_providers:
|
||||
device_ids: list[str] = ort.capi._pybind_state.get_available_openvino_device_ids()
|
||||
log.debug(f"Available OpenVINO devices: {device_ids}")
|
||||
|
||||
gpu_devices = [device_id for device_id in device_ids if device_id.startswith("GPU")]
|
||||
if not gpu_devices:
|
||||
log.warning("No GPU device found in OpenVINO. Falling back to CPU.")
|
||||
available_providers.remove(openvino)
|
||||
return [provider for provider in SUPPORTED_PROVIDERS if provider in available_providers]
|
||||
|
||||
@property
|
||||
def provider_options(self) -> list[dict[str, Any]]:
|
||||
return self._provider_options
|
||||
|
||||
@provider_options.setter
|
||||
def provider_options(self, provider_options: list[dict[str, Any]]) -> None:
|
||||
log.debug(f"Setting execution provider options to {provider_options}")
|
||||
self._provider_options = provider_options
|
||||
|
||||
@property
|
||||
def _provider_options_default(self) -> list[dict[str, Any]]:
|
||||
options = []
|
||||
for provider in self.providers:
|
||||
match provider:
|
||||
case "CPUExecutionProvider" | "CUDAExecutionProvider":
|
||||
option = {"arena_extend_strategy": "kSameAsRequested"}
|
||||
case "OpenVINOExecutionProvider":
|
||||
option = {"device_type": "GPU_FP32", "cache_dir": (self.model_path.parent / "openvino").as_posix()}
|
||||
case _:
|
||||
option = {}
|
||||
options.append(option)
|
||||
return options
|
||||
|
||||
@property
|
||||
def sess_options(self) -> ort.SessionOptions:
|
||||
return self._sess_options
|
||||
|
||||
@sess_options.setter
|
||||
def sess_options(self, sess_options: ort.SessionOptions) -> None:
|
||||
log.debug(f"Setting execution_mode to {sess_options.execution_mode.name}")
|
||||
log.debug(f"Setting inter_op_num_threads to {sess_options.inter_op_num_threads}")
|
||||
log.debug(f"Setting intra_op_num_threads to {sess_options.intra_op_num_threads}")
|
||||
self._sess_options = sess_options
|
||||
|
||||
@property
|
||||
def _sess_options_default(self) -> ort.SessionOptions:
|
||||
sess_options = ort.SessionOptions()
|
||||
sess_options.enable_cpu_mem_arena = False
|
||||
|
||||
# avoid thread contention between models
|
||||
if settings.model_inter_op_threads > 0:
|
||||
sess_options.inter_op_num_threads = settings.model_inter_op_threads
|
||||
# these defaults work well for CPU, but bottleneck GPU
|
||||
elif settings.model_inter_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
|
||||
sess_options.inter_op_num_threads = 1
|
||||
|
||||
if settings.model_intra_op_threads > 0:
|
||||
sess_options.intra_op_num_threads = settings.model_intra_op_threads
|
||||
elif settings.model_intra_op_threads == 0 and self.providers == ["CPUExecutionProvider"]:
|
||||
sess_options.intra_op_num_threads = 2
|
||||
|
||||
if sess_options.inter_op_num_threads > 1:
|
||||
sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
|
||||
|
||||
return sess_options
|
||||
@@ -11,6 +11,7 @@ import cv2
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
import pytest
|
||||
from fastapi import HTTPException
|
||||
from fastapi.testclient import TestClient
|
||||
from PIL import Image
|
||||
from pytest import MonkeyPatch
|
||||
@@ -21,129 +22,16 @@ from app.models.clip.textual import MClipTextualEncoder, OpenClipTextualEncoder
|
||||
from app.models.clip.visual import OpenClipVisualEncoder
|
||||
from app.models.facial_recognition.detection import FaceDetector
|
||||
from app.models.facial_recognition.recognition import FaceRecognizer
|
||||
from app.sessions.ann import AnnSession
|
||||
from app.sessions.ort import OrtSession
|
||||
|
||||
from .config import Settings, log, settings
|
||||
from .config import Settings, settings
|
||||
from .models.base import InferenceModel
|
||||
from .models.cache import ModelCache
|
||||
from .schemas import ModelFormat, ModelTask, ModelType
|
||||
|
||||
|
||||
class TestBase:
|
||||
CPU_EP = ["CPUExecutionProvider"]
|
||||
CUDA_EP = ["CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
OV_EP = ["OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
||||
CUDA_EP_OUT_OF_ORDER = ["CPUExecutionProvider", "CUDAExecutionProvider"]
|
||||
TRT_EP = ["TensorrtExecutionProvider", "CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
|
||||
@pytest.mark.providers(CPU_EP)
|
||||
def test_sets_cpu_provider(self, providers: list[str]) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.CPU_EP
|
||||
|
||||
@pytest.mark.providers(CUDA_EP)
|
||||
def test_sets_cuda_provider_if_available(self, providers: list[str]) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.CUDA_EP
|
||||
|
||||
@pytest.mark.providers(OV_EP)
|
||||
def test_sets_openvino_provider_if_available(self, providers: list[str], mocker: MockerFixture) -> None:
|
||||
mocked = mocker.patch("app.models.base.ort.capi._pybind_state")
|
||||
mocked.get_available_openvino_device_ids.return_value = ["GPU.0", "CPU"]
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.OV_EP
|
||||
|
||||
@pytest.mark.providers(OV_EP)
|
||||
def test_avoids_openvino_if_gpu_not_available(self, providers: list[str], mocker: MockerFixture) -> None:
|
||||
mocked = mocker.patch("app.models.base.ort.capi._pybind_state")
|
||||
mocked.get_available_openvino_device_ids.return_value = ["CPU"]
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.CPU_EP
|
||||
|
||||
@pytest.mark.providers(CUDA_EP_OUT_OF_ORDER)
|
||||
def test_sets_providers_in_correct_order(self, providers: list[str]) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.CUDA_EP
|
||||
|
||||
@pytest.mark.providers(TRT_EP)
|
||||
def test_ignores_unsupported_providers(self, providers: list[str]) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.providers == self.CUDA_EP
|
||||
|
||||
def test_sets_provider_kwarg(self) -> None:
|
||||
providers = ["CUDAExecutionProvider"]
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", providers=providers)
|
||||
|
||||
assert encoder.providers == providers
|
||||
|
||||
def test_sets_default_provider_options(self, mocker: MockerFixture) -> None:
|
||||
mocked = mocker.patch("app.models.base.ort.capi._pybind_state")
|
||||
mocked.get_available_openvino_device_ids.return_value = ["GPU.0", "CPU"]
|
||||
|
||||
encoder = OpenClipTextualEncoder(
|
||||
"ViT-B-32__openai", providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
||||
)
|
||||
|
||||
assert encoder.provider_options == [
|
||||
{"device_type": "GPU_FP32", "cache_dir": (encoder.cache_dir / "openvino").as_posix()},
|
||||
{"arena_extend_strategy": "kSameAsRequested"},
|
||||
]
|
||||
|
||||
def test_sets_provider_options_kwarg(self) -> None:
|
||||
encoder = OpenClipTextualEncoder(
|
||||
"ViT-B-32__openai",
|
||||
providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"],
|
||||
provider_options=[],
|
||||
)
|
||||
|
||||
assert encoder.provider_options == []
|
||||
|
||||
def test_sets_default_sess_options(self) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.sess_options.execution_mode == ort.ExecutionMode.ORT_SEQUENTIAL
|
||||
assert encoder.sess_options.inter_op_num_threads == 1
|
||||
assert encoder.sess_options.intra_op_num_threads == 2
|
||||
assert encoder.sess_options.enable_cpu_mem_arena is False
|
||||
|
||||
def test_sets_default_sess_options_does_not_set_threads_if_non_cpu_and_default_threads(self) -> None:
|
||||
encoder = OpenClipTextualEncoder(
|
||||
"ViT-B-32__openai", providers=["CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
)
|
||||
|
||||
assert encoder.sess_options.inter_op_num_threads == 0
|
||||
assert encoder.sess_options.intra_op_num_threads == 0
|
||||
|
||||
def test_sets_default_sess_options_sets_threads_if_non_cpu_and_set_threads(self, mocker: MockerFixture) -> None:
|
||||
mock_settings = mocker.patch("app.models.base.settings", autospec=True)
|
||||
mock_settings.model_inter_op_threads = 2
|
||||
mock_settings.model_intra_op_threads = 4
|
||||
|
||||
encoder = OpenClipTextualEncoder(
|
||||
"ViT-B-32__openai", providers=["CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
)
|
||||
|
||||
assert encoder.sess_options.inter_op_num_threads == 2
|
||||
assert encoder.sess_options.intra_op_num_threads == 4
|
||||
|
||||
def test_sets_sess_options_kwarg(self) -> None:
|
||||
sess_options = ort.SessionOptions()
|
||||
encoder = OpenClipTextualEncoder(
|
||||
"ViT-B-32__openai",
|
||||
providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"],
|
||||
provider_options=[],
|
||||
sess_options=sess_options,
|
||||
)
|
||||
|
||||
assert sess_options is encoder.sess_options
|
||||
|
||||
def test_sets_default_cache_dir(self) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
@@ -155,29 +43,30 @@ class TestBase:
|
||||
|
||||
assert encoder.cache_dir == cache_dir
|
||||
|
||||
def test_sets_default_preferred_format(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_model_format(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
|
||||
assert encoder.preferred_format == ModelFormat.ONNX
|
||||
assert encoder.model_format == ModelFormat.ONNX
|
||||
|
||||
def test_sets_default_preferred_format_to_armnn_if_available(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_model_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", True)
|
||||
path.suffix = ".armnn"
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
|
||||
assert encoder.preferred_format == ModelFormat.ARMNN
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
def test_sets_preferred_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_model_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", False)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
|
||||
assert encoder.preferred_format == ModelFormat.ARMNN
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
def test_casts_cache_dir_string_to_path(self) -> None:
|
||||
cache_dir = "/test_cache"
|
||||
@@ -185,120 +74,53 @@ class TestBase:
|
||||
|
||||
assert encoder.cache_dir == Path(cache_dir)
|
||||
|
||||
def test_clear_cache(self, mocker: MockerFixture) -> None:
|
||||
mock_rmtree = mocker.patch("app.models.base.rmtree", autospec=True)
|
||||
mock_rmtree.avoids_symlink_attacks = True
|
||||
mock_cache_dir = mocker.Mock()
|
||||
mock_cache_dir.exists.return_value = True
|
||||
mock_cache_dir.is_dir.return_value = True
|
||||
mocker.patch("app.models.base.Path", return_value=mock_cache_dir)
|
||||
info = mocker.spy(log, "info")
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=mock_cache_dir)
|
||||
def test_clear_cache(self, rmtree: mock.Mock, path: mock.Mock, info: mock.Mock) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
encoder.clear_cache()
|
||||
|
||||
mock_rmtree.assert_called_once_with(encoder.cache_dir)
|
||||
rmtree.assert_called_once_with(encoder.cache_dir)
|
||||
info.assert_called_with(f"Cleared cache directory for model '{encoder.model_name}'.")
|
||||
|
||||
def test_clear_cache_warns_if_path_does_not_exist(self, mocker: MockerFixture) -> None:
|
||||
mock_rmtree = mocker.patch("app.models.base.rmtree", autospec=True)
|
||||
mock_rmtree.avoids_symlink_attacks = True
|
||||
mock_cache_dir = mocker.Mock()
|
||||
mock_cache_dir.exists.return_value = False
|
||||
mock_cache_dir.is_dir.return_value = True
|
||||
mocker.patch("app.models.base.Path", return_value=mock_cache_dir)
|
||||
warning = mocker.spy(log, "warning")
|
||||
def test_clear_cache_warns_if_path_does_not_exist(
|
||||
self, rmtree: mock.Mock, path: mock.Mock, warning: mock.Mock
|
||||
) -> None:
|
||||
path.return_value.exists.return_value = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=mock_cache_dir)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
encoder.clear_cache()
|
||||
|
||||
mock_rmtree.assert_not_called()
|
||||
rmtree.assert_not_called()
|
||||
warning.assert_called_once()
|
||||
|
||||
def test_clear_cache_raises_exception_if_vulnerable_to_symlink_attack(self, mocker: MockerFixture) -> None:
|
||||
mock_rmtree = mocker.patch("app.models.base.rmtree", autospec=True)
|
||||
mock_rmtree.avoids_symlink_attacks = False
|
||||
mock_cache_dir = mocker.Mock()
|
||||
mock_cache_dir.exists.return_value = True
|
||||
mock_cache_dir.is_dir.return_value = True
|
||||
mocker.patch("app.models.base.Path", return_value=mock_cache_dir)
|
||||
def test_clear_cache_raises_exception_if_vulnerable_to_symlink_attack(
|
||||
self, rmtree: mock.Mock, path: mock.Mock
|
||||
) -> None:
|
||||
rmtree.avoids_symlink_attacks = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=mock_cache_dir)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
with pytest.raises(RuntimeError):
|
||||
encoder.clear_cache()
|
||||
|
||||
mock_rmtree.assert_not_called()
|
||||
rmtree.assert_not_called()
|
||||
|
||||
def test_clear_cache_replaces_file_with_dir_if_path_is_file(self, mocker: MockerFixture) -> None:
|
||||
mock_rmtree = mocker.patch("app.models.base.rmtree", autospec=True)
|
||||
mock_rmtree.avoids_symlink_attacks = True
|
||||
mock_cache_dir = mocker.Mock()
|
||||
mock_cache_dir.exists.return_value = True
|
||||
mock_cache_dir.is_dir.return_value = False
|
||||
mocker.patch("app.models.base.Path", return_value=mock_cache_dir)
|
||||
warning = mocker.spy(log, "warning")
|
||||
def test_clear_cache_replaces_file_with_dir_if_path_is_file(
|
||||
self, rmtree: mock.Mock, path: mock.Mock, warning: mock.Mock
|
||||
) -> None:
|
||||
path.return_value.is_dir.return_value = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=mock_cache_dir)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
encoder.clear_cache()
|
||||
|
||||
mock_rmtree.assert_not_called()
|
||||
mock_cache_dir.unlink.assert_called_once()
|
||||
mock_cache_dir.mkdir.assert_called_once()
|
||||
rmtree.assert_not_called()
|
||||
path.return_value.unlink.assert_called_once()
|
||||
path.return_value.mkdir.assert_called_once()
|
||||
warning.assert_called_once()
|
||||
|
||||
def test_make_session_return_ann_if_available(self, mocker: MockerFixture) -> None:
|
||||
mock_model_path = mocker.Mock()
|
||||
mock_model_path.is_file.return_value = True
|
||||
mock_model_path.suffix = ".armnn"
|
||||
mock_model_path.with_suffix.return_value = mock_model_path
|
||||
mock_ann = mocker.patch("app.models.base.AnnSession")
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
encoder._make_session(mock_model_path)
|
||||
|
||||
mock_ann.assert_called_once()
|
||||
|
||||
def test_make_session_return_ort_if_available_and_ann_is_not(self, mocker: MockerFixture) -> None:
|
||||
mock_armnn_path = mocker.Mock()
|
||||
mock_armnn_path.is_file.return_value = False
|
||||
mock_armnn_path.suffix = ".armnn"
|
||||
|
||||
mock_onnx_path = mocker.Mock()
|
||||
mock_onnx_path.is_file.return_value = True
|
||||
mock_onnx_path.suffix = ".onnx"
|
||||
mock_armnn_path.with_suffix.return_value = mock_onnx_path
|
||||
|
||||
mock_ann = mocker.patch("app.models.base.AnnSession")
|
||||
mock_ort = mocker.patch("app.models.base.ort.InferenceSession")
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
encoder._make_session(mock_armnn_path)
|
||||
|
||||
mock_ort.assert_called_once()
|
||||
mock_ann.assert_not_called()
|
||||
|
||||
def test_make_session_raises_exception_if_path_does_not_exist(self, mocker: MockerFixture) -> None:
|
||||
mock_model_path = mocker.Mock()
|
||||
mock_model_path.is_file.return_value = False
|
||||
mock_model_path.suffix = ".onnx"
|
||||
mock_model_path.with_suffix.return_value = mock_model_path
|
||||
mock_ann = mocker.patch("app.models.base.AnnSession")
|
||||
mock_ort = mocker.patch("app.models.base.ort.InferenceSession")
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai")
|
||||
with pytest.raises(ValueError):
|
||||
encoder._make_session(mock_model_path)
|
||||
|
||||
mock_ann.assert_not_called()
|
||||
mock_ort.assert_not_called()
|
||||
|
||||
def test_download(self, mocker: MockerFixture) -> None:
|
||||
mock_snapshot_download = mocker.patch("app.models.base.snapshot_download")
|
||||
|
||||
def test_download(self, snapshot_download: mock.Mock) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir="/path/to/cache")
|
||||
encoder.download()
|
||||
|
||||
mock_snapshot_download.assert_called_once_with(
|
||||
snapshot_download.assert_called_once_with(
|
||||
"immich-app/ViT-B-32__openai",
|
||||
cache_dir=encoder.cache_dir,
|
||||
local_dir=encoder.cache_dir,
|
||||
@@ -306,13 +128,11 @@ class TestBase:
|
||||
ignore_patterns=["*.armnn"],
|
||||
)
|
||||
|
||||
def test_download_downloads_armnn_if_preferred_format(self, mocker: MockerFixture) -> None:
|
||||
mock_snapshot_download = mocker.patch("app.models.base.snapshot_download")
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
def test_download_downloads_armnn_if_preferred_format(self, snapshot_download: mock.Mock) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
encoder.download()
|
||||
|
||||
mock_snapshot_download.assert_called_once_with(
|
||||
snapshot_download.assert_called_once_with(
|
||||
"immich-app/ViT-B-32__openai",
|
||||
cache_dir=encoder.cache_dir,
|
||||
local_dir=encoder.cache_dir,
|
||||
@@ -320,6 +140,180 @@ class TestBase:
|
||||
ignore_patterns=[],
|
||||
)
|
||||
|
||||
def test_throws_exception_if_model_path_does_not_exist(
|
||||
self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock
|
||||
) -> None:
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.is_file.return_value = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
encoder.load()
|
||||
|
||||
snapshot_download.assert_called_once()
|
||||
ort_session.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ort_session")
|
||||
class TestOrtSession:
|
||||
CPU_EP = ["CPUExecutionProvider"]
|
||||
CUDA_EP = ["CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
OV_EP = ["OpenVINOExecutionProvider", "CPUExecutionProvider"]
|
||||
CUDA_EP_OUT_OF_ORDER = ["CPUExecutionProvider", "CUDAExecutionProvider"]
|
||||
TRT_EP = ["TensorrtExecutionProvider", "CUDAExecutionProvider", "CPUExecutionProvider"]
|
||||
|
||||
@pytest.mark.providers(CPU_EP)
|
||||
def test_sets_cpu_provider(self, providers: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.CPU_EP
|
||||
|
||||
@pytest.mark.providers(CUDA_EP)
|
||||
def test_sets_cuda_provider_if_available(self, providers: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.CUDA_EP
|
||||
|
||||
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
|
||||
@pytest.mark.providers(OV_EP)
|
||||
def test_sets_openvino_provider_if_available(self, providers: list[str], ov_device_ids: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.OV_EP
|
||||
|
||||
@pytest.mark.ov_device_ids(["CPU"])
|
||||
@pytest.mark.providers(OV_EP)
|
||||
def test_avoids_openvino_if_gpu_not_available(self, providers: list[str], ov_device_ids: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.CPU_EP
|
||||
|
||||
@pytest.mark.providers(CUDA_EP_OUT_OF_ORDER)
|
||||
def test_sets_providers_in_correct_order(self, providers: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.CUDA_EP
|
||||
|
||||
@pytest.mark.providers(TRT_EP)
|
||||
def test_ignores_unsupported_providers(self, providers: list[str]) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.providers == self.CUDA_EP
|
||||
|
||||
def test_sets_provider_kwarg(self) -> None:
|
||||
providers = ["CUDAExecutionProvider"]
|
||||
session = OrtSession("ViT-B-32__openai", providers=providers)
|
||||
|
||||
assert session.providers == providers
|
||||
|
||||
@pytest.mark.ov_device_ids(["GPU.0", "CPU"])
|
||||
def test_sets_default_provider_options(self, ov_device_ids: list[str]) -> None:
|
||||
model_path = "/cache/ViT-B-32__openai/model.onnx"
|
||||
session = OrtSession(model_path, providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"])
|
||||
|
||||
assert session.provider_options == [
|
||||
{"device_type": "GPU_FP32", "cache_dir": "/cache/ViT-B-32__openai/openvino"},
|
||||
{"arena_extend_strategy": "kSameAsRequested"},
|
||||
]
|
||||
|
||||
def test_sets_provider_options_kwarg(self) -> None:
|
||||
session = OrtSession(
|
||||
"ViT-B-32__openai",
|
||||
providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"],
|
||||
provider_options=[],
|
||||
)
|
||||
|
||||
assert session.provider_options == []
|
||||
|
||||
def test_sets_default_sess_options(self) -> None:
|
||||
session = OrtSession("ViT-B-32__openai")
|
||||
|
||||
assert session.sess_options.execution_mode == ort.ExecutionMode.ORT_SEQUENTIAL
|
||||
assert session.sess_options.inter_op_num_threads == 1
|
||||
assert session.sess_options.intra_op_num_threads == 2
|
||||
assert session.sess_options.enable_cpu_mem_arena is False
|
||||
|
||||
def test_sets_default_sess_options_does_not_set_threads_if_non_cpu_and_default_threads(self) -> None:
|
||||
session = OrtSession("ViT-B-32__openai", providers=["CUDAExecutionProvider", "CPUExecutionProvider"])
|
||||
|
||||
assert session.sess_options.inter_op_num_threads == 0
|
||||
assert session.sess_options.intra_op_num_threads == 0
|
||||
|
||||
def test_sets_default_sess_options_sets_threads_if_non_cpu_and_set_threads(self, mocker: MockerFixture) -> None:
|
||||
mock_settings = mocker.patch("app.sessions.ort.settings", autospec=True)
|
||||
mock_settings.model_inter_op_threads = 2
|
||||
mock_settings.model_intra_op_threads = 4
|
||||
|
||||
session = OrtSession("ViT-B-32__openai", providers=["CUDAExecutionProvider", "CPUExecutionProvider"])
|
||||
|
||||
assert session.sess_options.inter_op_num_threads == 2
|
||||
assert session.sess_options.intra_op_num_threads == 4
|
||||
|
||||
def test_sets_sess_options_kwarg(self) -> None:
|
||||
sess_options = ort.SessionOptions()
|
||||
session = OrtSession(
|
||||
"ViT-B-32__openai",
|
||||
providers=["OpenVINOExecutionProvider", "CPUExecutionProvider"],
|
||||
provider_options=[],
|
||||
sess_options=sess_options,
|
||||
)
|
||||
|
||||
assert sess_options is session.sess_options
|
||||
|
||||
|
||||
class TestAnnSession:
|
||||
def test_creates_ann_session(self, ann_session: mock.Mock, info: mock.Mock) -> None:
|
||||
model_path = mock.MagicMock(spec=Path)
|
||||
cache_dir = mock.MagicMock(spec=Path)
|
||||
|
||||
AnnSession(model_path, cache_dir)
|
||||
|
||||
ann_session.assert_called_once_with(tuning_level=3, tuning_file=(cache_dir / "gpu-tuning.ann").as_posix())
|
||||
ann_session.return_value.load.assert_called_once_with(
|
||||
model_path.as_posix(), cached_network_path=model_path.with_suffix(".anncache").as_posix()
|
||||
)
|
||||
info.assert_has_calls(
|
||||
[
|
||||
mock.call("Loading ANN model %s ...", model_path),
|
||||
mock.call("Loaded ANN model with ID %d", ann_session.return_value.load.return_value),
|
||||
]
|
||||
)
|
||||
|
||||
def test_get_inputs(self, ann_session: mock.Mock) -> None:
|
||||
ann_session.return_value.load.return_value = 123
|
||||
ann_session.return_value.input_shapes = {123: [(1, 3, 224, 224)]}
|
||||
session = AnnSession(Path("ViT-B-32__openai"))
|
||||
|
||||
inputs = session.get_inputs()
|
||||
|
||||
assert len(inputs) == 1
|
||||
assert inputs[0].name is None
|
||||
assert inputs[0].shape == (1, 3, 224, 224)
|
||||
|
||||
def test_get_outputs(self, ann_session: mock.Mock) -> None:
|
||||
ann_session.return_value.load.return_value = 123
|
||||
ann_session.return_value.output_shapes = {123: [(1, 3, 224, 224)]}
|
||||
session = AnnSession(Path("ViT-B-32__openai"))
|
||||
|
||||
outputs = session.get_outputs()
|
||||
|
||||
assert len(outputs) == 1
|
||||
assert outputs[0].name is None
|
||||
assert outputs[0].shape == (1, 3, 224, 224)
|
||||
|
||||
def test_run(self, ann_session: mock.Mock, mocker: MockerFixture) -> None:
|
||||
ann_session.return_value.load.return_value = 123
|
||||
np_spy = mocker.spy(np, "ascontiguousarray")
|
||||
session = AnnSession(Path("ViT-B-32__openai"))
|
||||
[input1, input2] = [np.random.rand(1, 3, 224, 224).astype(np.float32) for _ in range(2)]
|
||||
input_feed = {"input.1": input1, "input.2": input2}
|
||||
|
||||
session.run(None, input_feed)
|
||||
|
||||
ann_session.return_value.execute.assert_called_once_with(123, [input1, input2])
|
||||
np_spy.call_count == 2
|
||||
np_spy.assert_has_calls([mock.call(input1), mock.call(input2)])
|
||||
|
||||
|
||||
class TestCLIP:
|
||||
embedding = np.random.rand(512).astype(np.float32)
|
||||
@@ -486,6 +480,88 @@ class TestFaceRecognition:
|
||||
assert isinstance(call_args[0][0], np.ndarray)
|
||||
assert call_args[0][0].shape == (112, 112, 3)
|
||||
|
||||
def test_recognition_adds_batch_axis_for_ort(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
ort_session.return_value.get_inputs.return_value = [SimpleNamespace(name="input.1", shape=(1, 3, 224, 224))]
|
||||
ort_session.return_value.get_outputs.return_value = [SimpleNamespace(name="output.1", shape=(1, 800))]
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
proto = mock.Mock()
|
||||
|
||||
input_dims = mock.Mock()
|
||||
input_dims.name = "input.1"
|
||||
input_dims.type.tensor_type.shape.dim = [SimpleNamespace(dim_value=size) for size in [1, 3, 224, 224]]
|
||||
proto.graph.input = [input_dims]
|
||||
|
||||
output_dims = mock.Mock()
|
||||
output_dims.name = "output.1"
|
||||
output_dims.type.tensor_type.shape.dim = [SimpleNamespace(dim_value=size) for size in [1, 800]]
|
||||
proto.graph.output = [output_dims]
|
||||
|
||||
onnx.load.return_value = proto
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
update_dims.assert_called_once_with(proto, {"input.1": ["batch", 3, 224, 224]}, {"output.1": ["batch", 800]})
|
||||
onnx.save.assert_called_once_with(update_dims.return_value, face_recognizer.model_path)
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_if_exists(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ort_session.return_value.get_inputs.return_value = inputs
|
||||
ort_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
update_dims.assert_not_called()
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_for_armnn(
|
||||
self, ann_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".armnn"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ann_session.return_value.get_inputs.return_value = inputs
|
||||
ann_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", model_format=ModelFormat.ARMNN, cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is False
|
||||
update_dims.assert_not_called()
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestCache:
|
||||
@@ -627,6 +703,7 @@ class TestLoad:
|
||||
async def test_load(self) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 0
|
||||
|
||||
res = await load(mock_model)
|
||||
|
||||
@@ -650,6 +727,7 @@ class TestLoad:
|
||||
mock_model.model_task = ModelTask.SEARCH
|
||||
mock_model.load.side_effect = [OSError, None]
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 0
|
||||
|
||||
res = await load(mock_model)
|
||||
|
||||
@@ -657,6 +735,41 @@ class TestLoad:
|
||||
mock_model.clear_cache.assert_called_once()
|
||||
assert mock_model.load.call_count == 2
|
||||
|
||||
async def test_load_raises_if_os_error_and_already_retried(self) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
mock_model.model_task = ModelTask.SEARCH
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 2
|
||||
|
||||
with pytest.raises(HTTPException):
|
||||
await load(mock_model)
|
||||
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
mock_model.load.assert_not_called()
|
||||
|
||||
async def test_falls_back_to_onnx_if_other_format_does_not_exist(
|
||||
self, exception: mock.Mock, warning: mock.Mock
|
||||
) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
mock_model.model_task = ModelTask.SEARCH
|
||||
mock_model.model_format = ModelFormat.ARMNN
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 0
|
||||
error = FileNotFoundError()
|
||||
mock_model.load.side_effect = [error, None]
|
||||
|
||||
await load(mock_model)
|
||||
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
assert mock_model.load.call_count == 2
|
||||
exception.assert_called_once_with(error)
|
||||
warning.assert_called_once_with("ARMNN is available, but model 'test_model_name' does not support it.")
|
||||
mock_model.model_format = ModelFormat.ONNX
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not settings.test_full,
|
||||
|
||||
35
machine-learning/export/ann/Dockerfile
Normal file
35
machine-learning/export/ann/Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
||||
FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a as builder
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake \
|
||||
curl \
|
||||
git
|
||||
USER $MAMBA_USER
|
||||
|
||||
WORKDIR /home/mambauser
|
||||
ENV ARMNN_PATH=armnn
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/* .
|
||||
RUN ./download-armnn.sh && \
|
||||
./build-converter.sh && \
|
||||
./build.sh
|
||||
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER conda-lock.yml .
|
||||
RUN micromamba create -y -p /home/mambauser/venv -f conda-lock.yml && \
|
||||
micromamba clean --all --yes
|
||||
ENV PATH="/home/mambauser/venv/bin:${PATH}"
|
||||
|
||||
FROM gcr.io/distroless/base-debian12
|
||||
# FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a
|
||||
|
||||
WORKDIR /export/ann
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
LD_LIBRARY_PATH=/export/ann/armnn \
|
||||
PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
COPY --from=builder /home/mambauser/armnnconverter /home/mambauser/armnn ./
|
||||
COPY --from=builder /home/mambauser/venv /opt/venv
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER onnx2ann onnx2ann
|
||||
|
||||
ENTRYPOINT ["python", "-m", "onnx2ann"]
|
||||
1600
machine-learning/export/ann/conda-lock.yml
Normal file
1600
machine-learning/export/ann/conda-lock.yml
Normal file
File diff suppressed because it is too large
Load Diff
21
machine-learning/export/ann/env.yaml
Normal file
21
machine-learning/export/ann/env.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
name: onnx2ann
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python>=3.11,<4.0
|
||||
- onnx>=1.16.1
|
||||
# - onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- psutil>=6.0.0
|
||||
- flatbuffers>=24.3.25
|
||||
- ml_dtypes>=0.3.1
|
||||
- typer-slim>=0.12.3
|
||||
- huggingface_hub>=0.23.4
|
||||
- pip
|
||||
- pip:
|
||||
- onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- onnxsim>=0.4.36
|
||||
- onnx2tf>=1.24.1
|
||||
- onnx_graphsurgeon>=0.5.2
|
||||
- simple_onnx_processing_tools>=1.1.32
|
||||
- tf_keras>=2.16.0
|
||||
- git+https://github.com/microsoft/onnxconverter-common.git
|
||||
99
machine-learning/export/ann/onnx2ann/__main__.py
Normal file
99
machine-learning/export/ann/onnx2ann/__main__.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import os
|
||||
import platform
|
||||
from typing import Annotated, Optional
|
||||
|
||||
import typer
|
||||
|
||||
from onnx2ann.export import Exporter, ModelType, Precision
|
||||
|
||||
app = typer.Typer(add_completion=False, pretty_exceptions_show_locals=False)
|
||||
|
||||
|
||||
@app.command()
|
||||
def export(
|
||||
model_name: Annotated[
|
||||
str, typer.Argument(..., help="The name of the model to be exported as it exists in Hugging Face.")
|
||||
],
|
||||
model_type: Annotated[ModelType, typer.Option(..., "--type", "-t", help="The type of model to be exported.")],
|
||||
input_shapes: Annotated[
|
||||
list[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--input-shape",
|
||||
"-s",
|
||||
help="The shape of an input tensor to the model, each dimension separated by commas. "
|
||||
"Multiple shapes can be provided for multiple inputs.",
|
||||
),
|
||||
],
|
||||
precision: Annotated[
|
||||
Precision,
|
||||
typer.Option(
|
||||
...,
|
||||
"--precision",
|
||||
"-p",
|
||||
help="The precision of the exported model. `float16` requires a GPU.",
|
||||
),
|
||||
] = Precision.FLOAT32,
|
||||
cache_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--cache-dir",
|
||||
"-c",
|
||||
help="Directory where pre-export models will be stored.",
|
||||
envvar="CACHE_DIR",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = "~/.cache/huggingface",
|
||||
output_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--output-dir",
|
||||
"-o",
|
||||
help="Directory where exported models will be stored.",
|
||||
),
|
||||
] = "output",
|
||||
auth_token: Annotated[
|
||||
Optional[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--auth-token",
|
||||
"-t",
|
||||
help="If uploading models to Hugging Face, the auth token of the user or organisation.",
|
||||
envvar="HF_AUTH_TOKEN",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = None,
|
||||
force_export: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
...,
|
||||
"--force-export",
|
||||
"-f",
|
||||
help="Export the model even if an exported model already exists in the output directory.",
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
msg = f"Can only run on x86_64 / AMD64, not {platform.machine()}"
|
||||
raise RuntimeError(msg)
|
||||
os.environ.setdefault("LD_LIBRARY_PATH", "armnn")
|
||||
parsed_input_shapes = [tuple(map(int, shape.split(","))) for shape in input_shapes]
|
||||
model = Exporter(
|
||||
model_name, model_type, input_shapes=parsed_input_shapes, cache_dir=cache_dir, force_export=force_export
|
||||
)
|
||||
model_dir = os.path.join("output", model_name)
|
||||
output_dir = os.path.join(model_dir, model_type)
|
||||
armnn_model = model.to_armnn(output_dir, precision)
|
||||
|
||||
if not auth_token:
|
||||
return
|
||||
|
||||
from huggingface_hub import upload_file
|
||||
|
||||
relative_path = os.path.relpath(armnn_model, start=model_dir)
|
||||
upload_file(path_or_fileobj=armnn_model, path_in_repo=relative_path, repo_id=model.repo_name, token=auth_token)
|
||||
|
||||
|
||||
app()
|
||||
129
machine-learning/export/ann/onnx2ann/export.py
Normal file
129
machine-learning/export/ann/onnx2ann/export.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import os
|
||||
import subprocess
|
||||
from enum import StrEnum
|
||||
|
||||
from onnx2ann.helpers import onnx_make_armnn_compatible, onnx_make_inputs_fixed
|
||||
|
||||
|
||||
class ModelType(StrEnum):
|
||||
VISUAL = "visual"
|
||||
TEXTUAL = "textual"
|
||||
RECOGNITION = "recognition"
|
||||
DETECTION = "detection"
|
||||
|
||||
|
||||
class Precision(StrEnum):
|
||||
FLOAT16 = "float16"
|
||||
FLOAT32 = "float32"
|
||||
|
||||
|
||||
class Exporter:
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
model_type: str,
|
||||
input_shapes: list[tuple[int, ...]],
|
||||
optimization_level: int = 5,
|
||||
cache_dir: str = os.environ.get("CACHE_DIR", "~/.cache/huggingface"),
|
||||
force_export: bool = False,
|
||||
):
|
||||
self.model_name = model_name.split("/")[-1]
|
||||
self.model_type = model_type
|
||||
self.optimize = optimization_level
|
||||
self.input_shapes = input_shapes
|
||||
self.cache_dir = os.path.join(cache_dir, self.repo_name)
|
||||
self.force_export = force_export
|
||||
|
||||
def download(self) -> str:
|
||||
model_path = os.path.join(self.cache_dir, self.model_type, "model.onnx")
|
||||
if os.path.isfile(model_path):
|
||||
print(f"Model is already downloaded at {model_path}")
|
||||
return model_path
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
snapshot_download(
|
||||
self.repo_name, cache_dir=self.cache_dir, local_dir=self.cache_dir, local_dir_use_symlinks=False
|
||||
)
|
||||
return model_path
|
||||
|
||||
def to_onnx_static(self, precision: Precision) -> str:
|
||||
import onnx
|
||||
from onnxconverter_common import float16
|
||||
onnx_path_original = self.download()
|
||||
static_dir = os.path.join(self.cache_dir, self.model_type, "static")
|
||||
|
||||
static_path = os.path.join(static_dir, f"model.onnx")
|
||||
if self.force_export and not os.path.isfile(static_path):
|
||||
print(f"Making {self} static")
|
||||
os.makedirs(static_dir, exist_ok=True)
|
||||
onnx_make_inputs_fixed(onnx_path_original, static_path, self.input_shapes)
|
||||
onnx_make_armnn_compatible(static_path)
|
||||
print(f"Finished making {self} static")
|
||||
|
||||
model = onnx.load(static_path)
|
||||
self.inputs = [input_.name for input_ in model.graph.input]
|
||||
self.outputs = [output_.name for output_ in model.graph.output]
|
||||
if precision == Precision.FLOAT16:
|
||||
static_path = os.path.join(static_dir, f"model_{precision}.onnx")
|
||||
print(f"Converting {self} to {precision} precision")
|
||||
model = float16.convert_float_to_float16(model, keep_io_types=True, disable_shape_infer=True)
|
||||
onnx.save(model, static_path)
|
||||
print(f"Finished converting {self} to {precision} precision")
|
||||
# self.inputs, self.outputs = onnx_get_inputs_outputs(static_path)
|
||||
return static_path
|
||||
|
||||
def to_tflite(self, output_dir: str, precision: Precision) -> str:
|
||||
onnx_model = self.to_onnx_static(precision)
|
||||
tflite_dir = os.path.join(output_dir, precision)
|
||||
tflite_model = os.path.join(tflite_dir, f"model_{precision}.tflite")
|
||||
if self.force_export or not os.path.isfile(tflite_model):
|
||||
import onnx2tf
|
||||
|
||||
print(f"Exporting {self} to TFLite with {precision} precision (this might take a few minutes)")
|
||||
onnx2tf.convert(
|
||||
input_onnx_file_path=onnx_model,
|
||||
output_folder_path=tflite_dir,
|
||||
keep_shape_absolutely_input_names=self.inputs,
|
||||
# verbosity="warn",
|
||||
copy_onnx_input_output_names_to_tflite=True,
|
||||
output_signaturedefs=True,
|
||||
not_use_onnxsim=True,
|
||||
)
|
||||
print(f"Finished exporting {self} to TFLite with {precision} precision")
|
||||
|
||||
return tflite_model
|
||||
|
||||
def to_armnn(self, output_dir: str, precision: Precision) -> tuple[str, str]:
|
||||
armnn_model = os.path.join(output_dir, "model.armnn")
|
||||
if not self.force_export and os.path.isfile(armnn_model):
|
||||
return armnn_model
|
||||
|
||||
tflite_model_dir = os.path.join(output_dir, "tflite")
|
||||
tflite_model = self.to_tflite(tflite_model_dir, precision)
|
||||
|
||||
args = ["./armnnconverter", "-f", "tflite-binary", "-m", tflite_model, "-p", armnn_model]
|
||||
args.append("-i")
|
||||
args.extend(self.inputs)
|
||||
args.append("-o")
|
||||
args.extend(self.outputs)
|
||||
|
||||
print(f"Exporting {self} to ARM NN with {precision} precision")
|
||||
try:
|
||||
if (stdout := subprocess.check_output(args, stderr=subprocess.STDOUT).decode()):
|
||||
print(stdout)
|
||||
print(f"Finished exporting {self} to ARM NN with {precision} precision")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output.decode())
|
||||
try:
|
||||
from shutil import rmtree
|
||||
|
||||
rmtree(tflite_model_dir, ignore_errors=True)
|
||||
finally:
|
||||
raise e
|
||||
|
||||
@property
|
||||
def repo_name(self) -> str:
|
||||
return f"immich-app/{self.model_name}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.model_name} ({self.model_type})"
|
||||
260
machine-learning/export/ann/onnx2ann/helpers.py
Normal file
260
machine-learning/export/ann/onnx2ann/helpers.py
Normal file
@@ -0,0 +1,260 @@
|
||||
from typing import Any
|
||||
|
||||
|
||||
def onnx_make_armnn_compatible(model_path: str) -> None:
|
||||
"""
|
||||
i can explain
|
||||
armnn only supports up to 4d tranposes, but the model has a 5d transpose due to a redundant unsqueeze
|
||||
this function folds the unsqueeze+transpose+squeeze into a single 4d transpose
|
||||
it also switches from gather ops to slices since armnn has different dimension semantics for gathers
|
||||
also fixes batch normalization being in training mode
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
from onnx_graphsurgeon import Constant, Node, Variable, export_onnx, import_onnx
|
||||
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
|
||||
gather_idx = 1
|
||||
squeeze_idx = 1
|
||||
for node in graph.nodes:
|
||||
for link1 in node.outputs:
|
||||
if "Unsqueeze" in link1.name:
|
||||
for node1 in link1.outputs:
|
||||
for link2 in node1.outputs:
|
||||
if "Transpose" in link2.name:
|
||||
for node2 in link2.outputs:
|
||||
if node2.attrs.get("perm") == [3, 1, 2, 0, 4]:
|
||||
node2.attrs["perm"] = [2, 0, 1, 3]
|
||||
link2.shape = link1.shape
|
||||
for link3 in node2.outputs:
|
||||
if "Squeeze" in link3.name:
|
||||
link3.shape = [link3.shape[x] for x in [0, 1, 2, 4]]
|
||||
for node3 in link3.outputs:
|
||||
for link4 in node3.outputs:
|
||||
link4.shape = link3.shape
|
||||
try:
|
||||
idx = link2.inputs.index(node1)
|
||||
link2.inputs[idx] = node
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
node.outputs = [link2]
|
||||
if "Gather" in link4.name:
|
||||
for node4 in link4.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node4.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link4.dtype,
|
||||
shape=[1] + link3.shape[1:],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
link3,
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
for link5 in node4.outputs:
|
||||
for node5 in link5.outputs:
|
||||
try:
|
||||
idx = node5.inputs.index(link5)
|
||||
node5.inputs[idx] = slice_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "LayerNormalization":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
for link2 in node1.outputs:
|
||||
for node2 in link2.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=[1, *link2.shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=link2.shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
try:
|
||||
idx = node2.inputs.index(link2)
|
||||
node2.inputs[idx] = squeeze_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "Reshape":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
node2s = [n for link in node1.outputs for n in link.outputs]
|
||||
if any(n.op == "Abs" for n in node2s):
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=[1, *node1.outputs[0].shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=node1.outputs[0].shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
for node2 in node2s:
|
||||
node2.inputs[0] = squeeze_link
|
||||
elif node.op == "BatchNormalization" and node.attrs.get("training_mode") == 1:
|
||||
node.attrs["training_mode"] = 0
|
||||
node.outputs = node.outputs[:1]
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
|
||||
# for some reason, reloading the model is necessary to apply the correct shape
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
for node in graph.nodes:
|
||||
if node.op == "Slice":
|
||||
for link in node.outputs:
|
||||
if "Slice_123" in link.name and link.shape[0] == 3: # noqa: PLR2004
|
||||
link.shape[0] = 1
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
onnx.shape_inference.infer_shapes_path(model_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_make_inputs_fixed(input_path: str, output_path: str, input_shapes: list[tuple[int, ...]]) -> None:
|
||||
import onnx
|
||||
import onnxsim
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
|
||||
model, success = onnxsim.simplify(input_path)
|
||||
if not success:
|
||||
msg = f"Failed to simplify {input_path}"
|
||||
raise RuntimeError(msg)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
model = onnx.load_model(output_path)
|
||||
for input_node, shape in zip(model.graph.input, input_shapes, strict=False):
|
||||
make_input_shape_fixed(model.graph, input_node.name, shape)
|
||||
fix_output_shapes(model)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_get_inputs_outputs(model_path: str) -> tuple[list[str], list[str]]:
|
||||
import onnx
|
||||
|
||||
model = onnx.load(model_path)
|
||||
inputs = [input_.name for input_ in model.graph.input]
|
||||
outputs = [output_.name for output_ in model.graph.output]
|
||||
return inputs, outputs
|
||||
|
||||
|
||||
def onnx_save(model: Any, output_path: str) -> None:
|
||||
import onnx
|
||||
|
||||
try:
|
||||
onnx.save(model, output_path)
|
||||
except:
|
||||
onnx.save(model, output_path, save_as_external_data=True, all_tensors_to_one_file=False, size_threshold=1_000_000)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user