Compare commits
428 Commits
fix/folder
...
fix/mobile
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9bd298a160 | ||
|
|
f78b151b64 | ||
|
|
dfd9ed988e | ||
|
|
a25f14e1b9 | ||
|
|
a896c5a4dd | ||
|
|
c74989d304 | ||
|
|
1283491cc2 | ||
|
|
89522daaac | ||
|
|
011a667314 | ||
|
|
df2525ee08 | ||
|
|
01a9f735c8 | ||
|
|
af10c3bc2f | ||
|
|
395f2e155d | ||
|
|
10cbed55c4 | ||
|
|
325d5f7ba9 | ||
|
|
746252fe39 | ||
|
|
f36efd128b | ||
|
|
f1c494ef97 | ||
|
|
9c8c52874a | ||
|
|
68b617130a | ||
|
|
89292fecb4 | ||
|
|
1193a23282 | ||
|
|
bbfff64927 | ||
|
|
c5c9a522c1 | ||
|
|
3cd7f5ab90 | ||
|
|
f2067221c5 | ||
|
|
89598cf0be | ||
|
|
0121043d7d | ||
|
|
1ca46fbd98 | ||
|
|
6ddef3a7e4 | ||
|
|
0d9ebdc46a | ||
|
|
fa26d0de33 | ||
|
|
a5760129f0 | ||
|
|
d430b869ac | ||
|
|
4179c8a17d | ||
|
|
0a9cbf01d2 | ||
|
|
9567a2a560 | ||
|
|
58dd6f094c | ||
|
|
02381343ff | ||
|
|
09a5963eee | ||
|
|
a573a23c83 | ||
|
|
7118dca559 | ||
|
|
13d43e193e | ||
|
|
7a7843467c | ||
|
|
9e6fee4064 | ||
|
|
9680f1290d | ||
|
|
ce2ea98926 | ||
|
|
5c76cc34e1 | ||
|
|
eb2f4c866e | ||
|
|
2a370087e8 | ||
|
|
272c8a5812 | ||
|
|
08fe549ed8 | ||
|
|
ae15efdf2a | ||
|
|
8e003f95db | ||
|
|
3e92e837f1 | ||
|
|
081307ced2 | ||
|
|
a91bb399f0 | ||
|
|
42b78c59b5 | ||
|
|
750d21aeba | ||
|
|
990d9ba9a8 | ||
|
|
4d0c9172e5 | ||
|
|
094e3a2757 | ||
|
|
278668b8c5 | ||
|
|
10141504a2 | ||
|
|
67736c8fce | ||
|
|
b56a272f64 | ||
|
|
5901c2e963 | ||
|
|
be85832b20 | ||
|
|
c8f9a72d3e | ||
|
|
3d633a81c4 | ||
|
|
4efbf36d82 | ||
|
|
e2c3c39597 | ||
|
|
007ba1d9ef | ||
|
|
4d5cd1a6b5 | ||
|
|
8108f50c4e | ||
|
|
1b8354ed36 | ||
|
|
9242afb4b0 | ||
|
|
c5f14adff0 | ||
|
|
1378f22368 | ||
|
|
4bd465e752 | ||
|
|
a07531be3b | ||
|
|
3cdc6844a1 | ||
|
|
c3263e50fc | ||
|
|
7391ea6ff9 | ||
|
|
f972b8d514 | ||
|
|
6b50d958f4 | ||
|
|
27c456eb75 | ||
|
|
e7d051db3c | ||
|
|
86d31d7d29 | ||
|
|
f416342eff | ||
|
|
d73335ecbc | ||
|
|
641a3baadd | ||
|
|
f85d8add01 | ||
|
|
c278b7ad17 | ||
|
|
10e9c278ee | ||
|
|
47a025f39f | ||
|
|
749f999f2a | ||
|
|
d5a01c0310 | ||
|
|
097e132fba | ||
|
|
da5deffd03 | ||
|
|
9f20522df5 | ||
|
|
baadf9db20 | ||
|
|
4ea4ee40af | ||
|
|
d8a6552811 | ||
|
|
444133a72b | ||
|
|
29f16c6a47 | ||
|
|
268b411a6f | ||
|
|
07ed060c32 | ||
|
|
2f5d543ad9 | ||
|
|
9b65cd4d7b | ||
|
|
290e325c5c | ||
|
|
58521c9efb | ||
|
|
4cae15f28d | ||
|
|
e6ec019852 | ||
|
|
3b5e00131b | ||
|
|
a0fa7318ed | ||
|
|
2a005629a0 | ||
|
|
59a50b8697 | ||
|
|
90eac40e02 | ||
|
|
ad6f7f8089 | ||
|
|
056b262cba | ||
|
|
cfae134ecf | ||
|
|
fbbb6af27a | ||
|
|
1804a8fe58 | ||
|
|
ae1d60e259 | ||
|
|
7d759edfcc | ||
|
|
34974b036c | ||
|
|
e52b9d15b5 | ||
|
|
9b3718120b | ||
|
|
16b14b390f | ||
|
|
7e7b8da128 | ||
|
|
66ea75072d | ||
|
|
d34670bae6 | ||
|
|
1e1c2ea627 | ||
|
|
c7fcb23a23 | ||
|
|
708e42d8a3 | ||
|
|
d15f67da5d | ||
|
|
6becf409da | ||
|
|
ee4ae40d61 | ||
|
|
ebd644eedd | ||
|
|
7c36cbaf0f | ||
|
|
3a5d82f790 | ||
|
|
b14c768208 | ||
|
|
07cb2fb04e | ||
|
|
9bbad45990 | ||
|
|
e85655d34c | ||
|
|
d0576697c3 | ||
|
|
f9847bee51 | ||
|
|
f2141de5bb | ||
|
|
cb344cb014 | ||
|
|
c6b25ef111 | ||
|
|
0fdeac0417 | ||
|
|
153bb70f6e | ||
|
|
da80b69062 | ||
|
|
f9292c9c96 | ||
|
|
2e0ee6ec05 | ||
|
|
7f2e4f85f8 | ||
|
|
c63f805cb4 | ||
|
|
03a13828e1 | ||
|
|
e5ee1c8db6 | ||
|
|
25e2d37490 | ||
|
|
ed5759fe07 | ||
|
|
edefed56ae | ||
|
|
13281f8531 | ||
|
|
b48406bd20 | ||
|
|
06c78dfa91 | ||
|
|
de67d22bc0 | ||
|
|
b4780e89af | ||
|
|
ad65e9011a | ||
|
|
977c9b96ba | ||
|
|
aa2828ab33 | ||
|
|
a36840d7cc | ||
|
|
e34f46fa0d | ||
|
|
6170a3843c | ||
|
|
563e2ab503 | ||
|
|
79157e1043 | ||
|
|
02688a2a03 | ||
|
|
3b9bfceef0 | ||
|
|
089085fcdb | ||
|
|
fc68cf4f32 | ||
|
|
0051a9bba5 | ||
|
|
f27bdf7523 | ||
|
|
c1c9f30ea4 | ||
|
|
bc8cb9b671 | ||
|
|
a675922172 | ||
|
|
2bead445bd | ||
|
|
0e1c8c2b80 | ||
|
|
0174de19dd | ||
|
|
1a35a01149 | ||
|
|
08122d6871 | ||
|
|
92384c28de | ||
|
|
ab597155fa | ||
|
|
1d9cc4ca5f | ||
|
|
05d26dc683 | ||
|
|
c91382625c | ||
|
|
c7853fbe9d | ||
|
|
1a70896113 | ||
|
|
1011cdb376 | ||
|
|
f1cac122ed | ||
|
|
3c7f0a2900 | ||
|
|
277e39ac98 | ||
|
|
b3061f1e4f | ||
|
|
250548dea6 | ||
|
|
ac44f6d1e0 | ||
|
|
ab61bcfcc8 | ||
|
|
aa344a3989 | ||
|
|
2efca67217 | ||
|
|
97daf42fd5 | ||
|
|
5548033cae | ||
|
|
637eba6e08 | ||
|
|
f16457d2f9 | ||
|
|
7b41c6348c | ||
|
|
bd92f6b12d | ||
|
|
496b0c7076 | ||
|
|
166452640d | ||
|
|
30f6dc3a6e | ||
|
|
02c423b326 | ||
|
|
df318ac641 | ||
|
|
826eaedae6 | ||
|
|
737e768212 | ||
|
|
99e5b33969 | ||
|
|
7ea8783593 | ||
|
|
4d27f187ea | ||
|
|
1dc62fce5f | ||
|
|
5fc4393e7a | ||
|
|
dee6d072fb | ||
|
|
261818ddd9 | ||
|
|
fafb88d31c | ||
|
|
f929dc0816 | ||
|
|
9e94f52b05 | ||
|
|
5d244c6fec | ||
|
|
dcfe8d5ade | ||
|
|
635f5de186 | ||
|
|
9719965caf | ||
|
|
f33e1ad94c | ||
|
|
576f681b5c | ||
|
|
493d85b021 | ||
|
|
f32d4f15b6 | ||
|
|
7bae49ebd5 | ||
|
|
2e63b9d951 | ||
|
|
137f0d48c0 | ||
|
|
53acf08263 | ||
|
|
f32cd74232 | ||
|
|
546f841b2c | ||
|
|
8491fe459d | ||
|
|
2046dcc5b4 | ||
|
|
03ff425664 | ||
|
|
055b930066 | ||
|
|
531515daf9 | ||
|
|
b256c51b6b | ||
|
|
238dc7c085 | ||
|
|
184c7390a1 | ||
|
|
649221176c | ||
|
|
eae2471ab5 | ||
|
|
bfceed15da | ||
|
|
d9891f759e | ||
|
|
32f23b8d38 | ||
|
|
743b6644e9 | ||
|
|
34620e1e9a | ||
|
|
bcb968e3d1 | ||
|
|
e73abe0762 | ||
|
|
920d7de349 | ||
|
|
351701c4d6 | ||
|
|
68f249bc03 | ||
|
|
eca54871d0 | ||
|
|
b359eea124 | ||
|
|
c18f167e29 | ||
|
|
ba262fbaa8 | ||
|
|
59e7754bdc | ||
|
|
0acbf1199a | ||
|
|
daea57f7d2 | ||
|
|
82c3165247 | ||
|
|
3a854d77ac | ||
|
|
ccd0c35ca1 | ||
|
|
5f10a4cae7 | ||
|
|
9abb95d34a | ||
|
|
805ec3e351 | ||
|
|
a97ba4862f | ||
|
|
c699df002a | ||
|
|
33c29e4305 | ||
|
|
b0098d6d23 | ||
|
|
04aab6ecce | ||
|
|
47c0dc0d7e | ||
|
|
df581cc0d5 | ||
|
|
9e48ae3052 | ||
|
|
1d19d308e2 | ||
|
|
de4217cefc | ||
|
|
617a2f146d | ||
|
|
2b07d7ac63 | ||
|
|
1cc5ca14ca | ||
|
|
a625921e8f | ||
|
|
a17bba3328 | ||
|
|
4b3a4725c6 | ||
|
|
34f0f6c813 | ||
|
|
906d14c172 | ||
|
|
d087f7c870 | ||
|
|
de345a9524 | ||
|
|
badd7ea2a9 | ||
|
|
7d8f56b483 | ||
|
|
70b73145f1 | ||
|
|
d178c52ba6 | ||
|
|
55fe67dd20 | ||
|
|
ed4c7817e7 | ||
|
|
39c95f1280 | ||
|
|
4ddd3764b4 | ||
|
|
68db17028b | ||
|
|
1f50a0075e | ||
|
|
b19884d01e | ||
|
|
feff1899ee | ||
|
|
977d6452f6 | ||
|
|
f778adea92 | ||
|
|
818bdde317 | ||
|
|
fd48a33686 | ||
|
|
a918481c0b | ||
|
|
a201665b7e | ||
|
|
2a222fcfba | ||
|
|
d902e7f87d | ||
|
|
6278fe43c0 | ||
|
|
dfe6d27bbd | ||
|
|
51ab7498e9 | ||
|
|
4db76ddcf0 | ||
|
|
d03eb87058 | ||
|
|
a556de67b0 | ||
|
|
e703685d8d | ||
|
|
172388c455 | ||
|
|
df4a27e8a7 | ||
|
|
1f9813a28e | ||
|
|
bbfff45058 | ||
|
|
87dd09d103 | ||
|
|
dd94ad17aa | ||
|
|
a87c2e82cd | ||
|
|
a11ab4c3f7 | ||
|
|
ebf2f9fd7b | ||
|
|
683af67344 | ||
|
|
d149d6fa3f | ||
|
|
8c5269c002 | ||
|
|
cf91d9bdfc | ||
|
|
5579554532 | ||
|
|
7e35e6985e | ||
|
|
56756baea2 | ||
|
|
d5923241b5 | ||
|
|
cc471806fe | ||
|
|
4ce9bce414 | ||
|
|
2f5d75ce21 | ||
|
|
fb384fe90b | ||
|
|
73733370a2 | ||
|
|
4a2cf28882 | ||
|
|
181efb9010 | ||
|
|
b00d44a00c | ||
|
|
6044663e26 | ||
|
|
484529e61e | ||
|
|
445f9174ea | ||
|
|
7855974a29 | ||
|
|
ec603a008c | ||
|
|
14276f41d8 | ||
|
|
a644cabab6 | ||
|
|
b8e67d0ef9 | ||
|
|
ca78bc91b6 | ||
|
|
f2f3db3a79 | ||
|
|
c435bdb5d3 | ||
|
|
15da0d5a71 | ||
|
|
090d87f82e | ||
|
|
25efba8fe6 | ||
|
|
83afd49f5c | ||
|
|
639ede78c2 | ||
|
|
15be3437bf | ||
|
|
f59b0bab5a | ||
|
|
fa418d778b | ||
|
|
e0c4b8df6f | ||
|
|
7f9689b4bc | ||
|
|
e6f8bfdf5e | ||
|
|
8ccca04e27 | ||
|
|
53f80393bf | ||
|
|
e5e857edc3 | ||
|
|
590f96246d | ||
|
|
38d73f2bc6 | ||
|
|
96e3b96d57 | ||
|
|
36b018e355 | ||
|
|
214ca50406 | ||
|
|
29b3981609 | ||
|
|
a068a41c06 | ||
|
|
3c6e9e1191 | ||
|
|
db0415bbcc | ||
|
|
a5c431fbf5 | ||
|
|
a3d588f6bd | ||
|
|
21f500191a | ||
|
|
5011636d95 | ||
|
|
3f330c6476 | ||
|
|
bb8755021d | ||
|
|
93f9e118ad | ||
|
|
58ca1402ed | ||
|
|
32a7087883 | ||
|
|
53020852ec | ||
|
|
181a7e115f | ||
|
|
095ace8687 | ||
|
|
4c3fcdc745 | ||
|
|
fa5f30d9ca | ||
|
|
e60bc3c304 | ||
|
|
09cbc5d3f4 | ||
|
|
a2a9797fab | ||
|
|
3d35e65f27 | ||
|
|
df76735f4a | ||
|
|
6feca56da8 | ||
|
|
97aabe466e | ||
|
|
72a53f43c8 | ||
|
|
30b4f334d8 | ||
|
|
6c6a32c63e | ||
|
|
6fed223405 | ||
|
|
3105094a3d | ||
|
|
b96c95beda | ||
|
|
926ff075a3 | ||
|
|
934649c8df | ||
|
|
a43159f4ba | ||
|
|
ea3a14ed25 | ||
|
|
24a4cba953 | ||
|
|
fda22c83b9 | ||
|
|
2a8019726c | ||
|
|
5f76cdddc7 | ||
|
|
48be10e48b | ||
|
|
6c11ef62e8 | ||
|
|
65dce58aa4 | ||
|
|
64cc7239fe | ||
|
|
5f89c2d111 | ||
|
|
4621ec5ea2 | ||
|
|
881a96cdf9 | ||
|
|
b001ba44f5 | ||
|
|
afb444c92c | ||
|
|
027c4a8b34 |
@@ -11,8 +11,8 @@ services:
|
||||
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
|
||||
- server_node_modules:/workspaces/immich/server/node_modules
|
||||
- web_node_modules:/workspaces/immich/web/node_modules
|
||||
- ${UPLOAD_LOCATION}/photos:/workspaces/immich/server/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/workspaces/immich/server/upload/upload
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/data/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
database:
|
||||
|
||||
@@ -73,10 +73,8 @@ install_dependencies() {
|
||||
log "Installing dependencies"
|
||||
(
|
||||
cd "${IMMICH_WORKSPACE}" || exit 1
|
||||
run_cmd make ci-server
|
||||
run_cmd make ci-sdk
|
||||
run_cmd make build-sdk
|
||||
run_cmd make ci-web
|
||||
export CI=1 FROZEN=1 OFFLINE=1
|
||||
run_cmd make setup-web-dev setup-server-dev
|
||||
)
|
||||
log ""
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ services:
|
||||
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
|
||||
- server_node_modules:/workspaces/immich/server/node_modules
|
||||
- web_node_modules:/workspaces/immich/web/node_modules
|
||||
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/workspaces/immich/server/upload
|
||||
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/workspaces/immich/server/upload/upload
|
||||
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
|
||||
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/data/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
immich-web:
|
||||
@@ -22,7 +22,7 @@ services:
|
||||
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
|
||||
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
@@ -31,7 +31,7 @@ services:
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
|
||||
@@ -1,37 +1,41 @@
|
||||
.vscode/
|
||||
.github/
|
||||
.git/
|
||||
.env*
|
||||
*.log
|
||||
*.tmp
|
||||
*.temp
|
||||
|
||||
**/Dockerfile
|
||||
**/node_modules/
|
||||
**/.pnpm-store/
|
||||
**/dist/
|
||||
**/coverage/
|
||||
**/build/
|
||||
|
||||
design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
|
||||
docs/
|
||||
!docs/package.json
|
||||
!docs/package-lock.json
|
||||
|
||||
e2e/
|
||||
!e2e/package.json
|
||||
!e2e/package-lock.json
|
||||
|
||||
fastlane/
|
||||
machine-learning/
|
||||
misc/
|
||||
mobile/
|
||||
|
||||
cli/coverage/
|
||||
cli/dist/
|
||||
cli/node_modules/
|
||||
|
||||
open-api/typescript-sdk/build/
|
||||
open-api/typescript-sdk/node_modules/
|
||||
!open-api/typescript-sdk/package.json
|
||||
!open-api/typescript-sdk/package-lock.json
|
||||
|
||||
server/coverage/
|
||||
server/node_modules/
|
||||
server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
|
||||
6
.gitattributes
vendored
@@ -12,6 +12,12 @@ mobile/lib/**/*.drift.dart linguist-generated=true
|
||||
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
|
||||
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
|
||||
|
||||
mobile/lib/infrastructure/repositories/db.repository.steps.dart -diff -merge
|
||||
mobile/lib/infrastructure/repositories/db.repository.steps.dart linguist-generated=true
|
||||
|
||||
mobile/test/drift/main/generated/** -diff -merge
|
||||
mobile/test/drift/main/generated/** linguist-generated=true
|
||||
|
||||
open-api/typescript-sdk/fetch-client.ts -diff -merge
|
||||
open-api/typescript-sdk/fetch-client.ts linguist-generated=true
|
||||
|
||||
|
||||
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.18.0
|
||||
|
||||
4
.github/.prettierignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
6
.github/package-lock.json
generated
vendored
@@ -9,9 +9,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
|
||||
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
|
||||
"integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
|
||||
54
.github/workflows/build-mobile.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -66,24 +66,40 @@ jobs:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: build-mobile-gradle-${{ runner.os }}-main
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: echo $KEY_JKS | base64 -d > android/key.jks
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
|
||||
with:
|
||||
packages: ''
|
||||
|
||||
- name: Get Packages
|
||||
working-directory: ./mobile
|
||||
@@ -103,12 +119,30 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
if [[ $IS_MAIN == 'true' ]]; then
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
else
|
||||
flutter build apk --debug --split-per-abi --target-platform android-arm64
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
96
.github/workflows/close-duplicates.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
discussion:
|
||||
types: [created]
|
||||
|
||||
name: Close likely duplicates
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
get_body:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
EVENT: ${{ toJSON(github.event) }}
|
||||
outputs:
|
||||
body: ${{ steps.get_body.outputs.body }}
|
||||
steps:
|
||||
- id: get_body
|
||||
run: |
|
||||
BODY=$(echo """$EVENT""" | jq -r '.issue // .discussion | .body' | base64 -w 0)
|
||||
echo "body=$BODY" >> $GITHUB_OUTPUT
|
||||
|
||||
get_checkbox_json:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get_body
|
||||
container:
|
||||
image: yshavit/mdq:0.7.2
|
||||
outputs:
|
||||
json: ${{ steps.get_checkbox.outputs.json }}
|
||||
steps:
|
||||
- id: get_checkbox
|
||||
env:
|
||||
BODY: ${{ needs.get_body.outputs.body }}
|
||||
run: |
|
||||
JSON=$(echo "$BODY" | base64 -d | /mdq --output json '# I have searched | - [?] Yes')
|
||||
echo "json=$JSON" >> $GITHUB_OUTPUT
|
||||
|
||||
close_and_comment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get_checkbox_json
|
||||
if: ${{ !fromJSON(needs.get_checkbox_json.outputs.json).items[0].list[0].checked }}
|
||||
permissions:
|
||||
issues: write
|
||||
discussions: write
|
||||
steps:
|
||||
- name: Close issue
|
||||
if: ${{ github.event_name == 'issues' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.issue.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f issueId="$NODE_ID" \
|
||||
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one." \
|
||||
-f query='
|
||||
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
|
||||
addComment(input: {
|
||||
subjectId: $issueId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeIssue(input: {
|
||||
issueId: $issueId,
|
||||
stateReason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
|
||||
- name: Close discussion
|
||||
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.discussion.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f discussionId="$NODE_ID" \
|
||||
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one." \
|
||||
-f query='
|
||||
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: {
|
||||
discussionId: $discussionId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeDiscussion(input: {
|
||||
discussionId: $discussionId,
|
||||
reason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/autobuild@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
@@ -131,7 +131,7 @@ jobs:
|
||||
tag-suffix: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
@@ -154,7 +154,7 @@ jobs:
|
||||
name: Build and Push Server
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
4
.github/workflows/docs-build.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.found_paths.outputs.open-api == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -32,6 +32,8 @@ jobs:
|
||||
- 'docs/**'
|
||||
workflow:
|
||||
- '.github/workflows/docs-build.yml'
|
||||
open-api:
|
||||
- 'open-api/immich-openapi-specs.json'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
13
.github/workflows/org-checks.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Org Checks
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
check-approvals:
|
||||
name: Check for Team/Admin Review
|
||||
uses: immich-app/devtools/.github/workflows/required-approval.yml@main
|
||||
permissions:
|
||||
pull-requests: read
|
||||
contents: read
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
|
||||
14
.github/workflows/preview-label.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
|
||||
remove-label:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
@@ -33,3 +33,15 @@ jobs:
|
||||
repo: context.repo.repo,
|
||||
name: 'preview'
|
||||
})
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'PRs from forks cannot have preview environments.'
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'Preview environment has been removed.'
|
||||
|
||||
31
.github/workflows/static_analysis.yml
vendored
@@ -42,6 +42,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./mobile
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -49,34 +52,29 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Install DCM
|
||||
run: |
|
||||
sudo apt-get update
|
||||
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
|
||||
echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install dcm
|
||||
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: auto
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run Build Runner
|
||||
run: make build
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform API
|
||||
run: make pigeon
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
@@ -92,25 +90,22 @@ jobs:
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||
echo "ERROR: Generated files not up to date! Run 'make build' and 'make pigeon' inside the mobile directory"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
- name: Run dart analyze
|
||||
run: dart analyze --fatal-infos
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run dart format
|
||||
run: dart format lib/ --set-exit-if-changed
|
||||
working-directory: ./mobile
|
||||
run: make format
|
||||
|
||||
- name: Run dart custom_lint
|
||||
run: dart run custom_lint
|
||||
working-directory: ./mobile
|
||||
|
||||
# TODO: Use https://github.com/CQLabs/dcm-action
|
||||
- name: Run DCM
|
||||
run: dcm analyze lib --fatal-style --fatal-warnings
|
||||
working-directory: ./mobile
|
||||
|
||||
zizmor:
|
||||
name: zizmor
|
||||
@@ -134,7 +129,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
uses: github/codeql-action/upload-sarif@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
6
.github/workflows/test.yml
vendored
@@ -516,7 +516,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -609,7 +609,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
@@ -668,7 +668,7 @@ jobs:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:1f5583fe3397210a0fbc7f11b0cec18bacc4a99e3e8ea0548e9bd6bcf26ec37a
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:ec713143dca1a426eba2e03707c319e2ec3cc9d304ef767f777f8e297dee820c
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
|
||||
2
.github/workflows/weblate-lock.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Find Pull Request
|
||||
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0
|
||||
uses: juliangruber/find-pull-request-action@952b3bb1ddb2dcc0aa3479e98bb1c2d1a922f096 # v1.10.0
|
||||
id: find-pr
|
||||
with:
|
||||
branch: chore/translations
|
||||
|
||||
1
.gitignore
vendored
@@ -24,3 +24,4 @@ mobile/android/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
|
||||
vite.config.js.timestamp-*
|
||||
.pnpm-store
|
||||
|
||||
4
.vscode/launch.json
vendored
@@ -7,7 +7,7 @@
|
||||
"restart": true,
|
||||
"port": 9231,
|
||||
"name": "Immich API Server",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"remoteRoot": "/usr/src/app/server",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
},
|
||||
{
|
||||
@@ -16,7 +16,7 @@
|
||||
"restart": true,
|
||||
"port": 9230,
|
||||
"name": "Immich Workers",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"remoteRoot": "/usr/src/app/server",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
}
|
||||
]
|
||||
|
||||
32
Makefile
@@ -1,27 +1,36 @@
|
||||
dev:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
dev-down:
|
||||
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
|
||||
|
||||
dev-update:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
dev-docs:
|
||||
npm --prefix docs run start
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-down:
|
||||
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
|
||||
|
||||
prod:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
.PHONY: open-api
|
||||
open-api:
|
||||
@@ -83,7 +92,7 @@ test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(filter-out .github,$(MODULES)),install-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||
@@ -93,9 +102,12 @@ hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
setup-server-dev: install-server
|
||||
setup-web-dev: install-sdk build-sdk install-web
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.18.0
|
||||
|
||||
2
cli/bin/immich
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
import '../dist/index.js';
|
||||
687
cli/package-lock.json
generated
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.72",
|
||||
"version": "2.2.77",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
"bin": {
|
||||
"immich": "dist/index.js"
|
||||
"immich": "./bin/immich"
|
||||
},
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"keywords": [
|
||||
@@ -21,22 +21,22 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.15.32",
|
||||
"@types/node": "^22.17.0",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^12.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
"vite": "^6.0.0",
|
||||
"vite": "^7.0.0",
|
||||
"vite-tsconfig-paths": "^5.0.0",
|
||||
"vitest": "^3.0.0",
|
||||
"vitest-fetch-mock": "^0.4.0",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.52.0"
|
||||
constraints = "4.52.0"
|
||||
version = "4.52.1"
|
||||
constraints = "4.52.1"
|
||||
hashes = [
|
||||
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=",
|
||||
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=",
|
||||
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=",
|
||||
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=",
|
||||
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=",
|
||||
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=",
|
||||
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=",
|
||||
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=",
|
||||
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=",
|
||||
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=",
|
||||
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=",
|
||||
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=",
|
||||
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=",
|
||||
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=",
|
||||
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0",
|
||||
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8",
|
||||
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238",
|
||||
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f",
|
||||
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f",
|
||||
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8",
|
||||
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9",
|
||||
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
|
||||
"h1:2lHvafwGbLdmc9lYkuJFw3nsInaQjRpjX/JfIRKmq/M=",
|
||||
"h1:596JomwjrtUrOSreq9NNCS+rj70+jOV+0pfja5MXiTI=",
|
||||
"h1:7mBOA5TVAIt3qAwPXKCtE0RSYeqij9v30mnksuBbpEg=",
|
||||
"h1:ELVgzh4kHKBCYdL+2A8JjWS0E1snLUN3Mmz3Vo6qSfw=",
|
||||
"h1:FGGM5yLFf72g3kSXM3LAN64Gf/AkXr5WCmhixgnP+l4=",
|
||||
"h1:JupkJbQALcIVoMhHImrLeLDsQR1ET7VJLGC7ONxjqGU=",
|
||||
"h1:KsaE4JNq+1uV1nJsuTcYar/8lyY6zKS5UBEpfYg3wvc=",
|
||||
"h1:NHZ5RJIzQDLhie/ykl3uI6UPfNQR9Lu5Ti7JPR6X904=",
|
||||
"h1:NfAuMbn6LQPLDtJhbzO1MX9JMIGLMa8K6CpekvtsuX8=",
|
||||
"h1:e+vNKokamDsp/kJvFr2pRudzwEz2r49iZ/oSggw+1LY=",
|
||||
"h1:jnb4VdfNZ79I3yj7Q8x+JmOT+FxbfjjRfrF0dL0yCW8=",
|
||||
"h1:kmF//O539d7NuHU7qIxDj7Wz4eJmLKFiI5glwQivldU=",
|
||||
"h1:s6XriaKwOgV4jvKAGPXkrxhhOQxpNU5dceZwi9Z/1k8=",
|
||||
"h1:wt3WBEBAeSGTlC9OlnTlAALxRiK4SQgLy0KgBIS7qzs=",
|
||||
"zh:2fb95e1d3229b9b6c704e1a413c7481c60f139780d9641f657b6eb9b633b90f2",
|
||||
"zh:379c7680983383862236e9e6e720c3114195c40526172188e88d0ffcf50dfe2e",
|
||||
"zh:55533beb6cfc02d22ffda8cba8027bc2c841bb172cd637ed0d28323d41395f8f",
|
||||
"zh:5abd70760e4eb1f37a1c307cbd2989ea7c9ba0afb93818c67c1d363a31f75703",
|
||||
"zh:699f1c8cd66129176fe659ebf0e6337632a8967a28d2630b6ae5948665c0c2ae",
|
||||
"zh:69c15acd73c451e89de6477059cda2f3ec200b48ae4b9ff3646c4d389fd3205e",
|
||||
"zh:6e02b687de21b844f8266dff99e93e7c61fc8eb688f4bbb23803caceb251839e",
|
||||
"zh:7a51d17b87ed87b7bebf2ad9fc7c3a74f16a1b44eee92c779c08eb89258c0496",
|
||||
"zh:88ad84436837b0f55302f22748505972634e87400d6902260fd6b7ba1610f937",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6",
|
||||
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb",
|
||||
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b",
|
||||
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380",
|
||||
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7",
|
||||
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672",
|
||||
"zh:8d46c3d9f4f7ad20ac6ef01daa63f4e30a2d16dcb1bb5c7c7ee3dc6be38e9ca1",
|
||||
"zh:913d64e72a4929dae1d4793e2004f4f9a58b138ea337d9d94fa35cafbf06550a",
|
||||
"zh:c8d93cf86e2e49f6cec665cfe78b82c144cce15a8b2e30f343385fadd1251849",
|
||||
"zh:cc4f69397d9bc34a528a5609a024c3a48f54f21616c0008792dd417297add955",
|
||||
"zh:df99cdb8b064aad35ffea77e645cf6541d0b1b2ebc51b6d26c42031de60ab69e",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.52.0"
|
||||
version = "4.52.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.52.0"
|
||||
constraints = "4.52.0"
|
||||
version = "4.52.1"
|
||||
constraints = "4.52.1"
|
||||
hashes = [
|
||||
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=",
|
||||
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=",
|
||||
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=",
|
||||
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=",
|
||||
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=",
|
||||
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=",
|
||||
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=",
|
||||
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=",
|
||||
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=",
|
||||
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=",
|
||||
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=",
|
||||
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=",
|
||||
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=",
|
||||
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=",
|
||||
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0",
|
||||
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8",
|
||||
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238",
|
||||
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f",
|
||||
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f",
|
||||
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8",
|
||||
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9",
|
||||
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
|
||||
"h1:2lHvafwGbLdmc9lYkuJFw3nsInaQjRpjX/JfIRKmq/M=",
|
||||
"h1:596JomwjrtUrOSreq9NNCS+rj70+jOV+0pfja5MXiTI=",
|
||||
"h1:7mBOA5TVAIt3qAwPXKCtE0RSYeqij9v30mnksuBbpEg=",
|
||||
"h1:ELVgzh4kHKBCYdL+2A8JjWS0E1snLUN3Mmz3Vo6qSfw=",
|
||||
"h1:FGGM5yLFf72g3kSXM3LAN64Gf/AkXr5WCmhixgnP+l4=",
|
||||
"h1:JupkJbQALcIVoMhHImrLeLDsQR1ET7VJLGC7ONxjqGU=",
|
||||
"h1:KsaE4JNq+1uV1nJsuTcYar/8lyY6zKS5UBEpfYg3wvc=",
|
||||
"h1:NHZ5RJIzQDLhie/ykl3uI6UPfNQR9Lu5Ti7JPR6X904=",
|
||||
"h1:NfAuMbn6LQPLDtJhbzO1MX9JMIGLMa8K6CpekvtsuX8=",
|
||||
"h1:e+vNKokamDsp/kJvFr2pRudzwEz2r49iZ/oSggw+1LY=",
|
||||
"h1:jnb4VdfNZ79I3yj7Q8x+JmOT+FxbfjjRfrF0dL0yCW8=",
|
||||
"h1:kmF//O539d7NuHU7qIxDj7Wz4eJmLKFiI5glwQivldU=",
|
||||
"h1:s6XriaKwOgV4jvKAGPXkrxhhOQxpNU5dceZwi9Z/1k8=",
|
||||
"h1:wt3WBEBAeSGTlC9OlnTlAALxRiK4SQgLy0KgBIS7qzs=",
|
||||
"zh:2fb95e1d3229b9b6c704e1a413c7481c60f139780d9641f657b6eb9b633b90f2",
|
||||
"zh:379c7680983383862236e9e6e720c3114195c40526172188e88d0ffcf50dfe2e",
|
||||
"zh:55533beb6cfc02d22ffda8cba8027bc2c841bb172cd637ed0d28323d41395f8f",
|
||||
"zh:5abd70760e4eb1f37a1c307cbd2989ea7c9ba0afb93818c67c1d363a31f75703",
|
||||
"zh:699f1c8cd66129176fe659ebf0e6337632a8967a28d2630b6ae5948665c0c2ae",
|
||||
"zh:69c15acd73c451e89de6477059cda2f3ec200b48ae4b9ff3646c4d389fd3205e",
|
||||
"zh:6e02b687de21b844f8266dff99e93e7c61fc8eb688f4bbb23803caceb251839e",
|
||||
"zh:7a51d17b87ed87b7bebf2ad9fc7c3a74f16a1b44eee92c779c08eb89258c0496",
|
||||
"zh:88ad84436837b0f55302f22748505972634e87400d6902260fd6b7ba1610f937",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6",
|
||||
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb",
|
||||
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b",
|
||||
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380",
|
||||
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7",
|
||||
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672",
|
||||
"zh:8d46c3d9f4f7ad20ac6ef01daa63f4e30a2d16dcb1bb5c7c7ee3dc6be38e9ca1",
|
||||
"zh:913d64e72a4929dae1d4793e2004f4f9a58b138ea337d9d94fa35cafbf06550a",
|
||||
"zh:c8d93cf86e2e49f6cec665cfe78b82c144cce15a8b2e30f343385fadd1251849",
|
||||
"zh:cc4f69397d9bc34a528a5609a024c3a48f54f21616c0008792dd417297add955",
|
||||
"zh:df99cdb8b064aad35ffea77e645cf6541d0b1b2ebc51b6d26c42031de60ab69e",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.52.0"
|
||||
version = "4.52.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ name: immich-dev
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
command: ['immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
@@ -27,11 +27,11 @@ services:
|
||||
target: dev
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- ../server:/usr/src/app/server
|
||||
- ../open-api:/usr/src/app/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/data/upload
|
||||
- /usr/src/app/server/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -69,19 +69,20 @@ services:
|
||||
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
|
||||
# user: 0:0
|
||||
build:
|
||||
context: ../web
|
||||
command: ['/usr/src/app/bin/immich-web']
|
||||
context: ../
|
||||
dockerfile: web/Dockerfile
|
||||
command: ['immich-web']
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 3000:3000
|
||||
- 24678:24678
|
||||
volumes:
|
||||
- ../web:/usr/src/app
|
||||
- ../i18n:/usr/src/i18n
|
||||
- ../open-api/:/usr/src/open-api/
|
||||
- ../web:/usr/src/app/web
|
||||
- ../i18n:/usr/src/app/i18n
|
||||
- ../open-api/:/usr/src/app/open-api/
|
||||
# - ../../ui:/usr/ui
|
||||
- /usr/src/app/node_modules
|
||||
- /usr/src/app/web/node_modules
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
@@ -116,13 +117,13 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -134,6 +135,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -20,7 +20,7 @@ services:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -56,14 +56,14 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -75,6 +75,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -82,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:9abc6cf6aea7710d163dbb28d8eeb7dc5baef01e38fa4cd146a406dd9f07f70d
|
||||
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -94,7 +95,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.0.2-ubuntu@sha256:0512d81cdeaaff0e370a9aa66027b465d1f1f04379c3a9c801a905fabbdbc7a5
|
||||
image: grafana/grafana:12.1.0-ubuntu@sha256:397aa30dd1af16cb6c5c9879498e467973a7f87eacf949f6d5a29407a3843809
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ services:
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -49,14 +49,14 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
@@ -67,6 +67,7 @@ services:
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
4
docs/.gitignore
vendored
@@ -18,4 +18,6 @@
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
yarn.lock
|
||||
yarn.lock
|
||||
|
||||
/static/openapi.json
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.18.0
|
||||
|
||||
@@ -180,7 +180,7 @@ services:
|
||||
...
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
+ - originals:/usr/src/app/originals
|
||||
...
|
||||
@@ -490,7 +490,7 @@ You can also scan the Postgres database file structure for errors:
|
||||
<details>
|
||||
<summary>Scan for file structure errors</summary>
|
||||
```bash
|
||||
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
```
|
||||
|
||||
A normal result will end something like this and return with an exit code of `0`:
|
||||
|
||||
@@ -57,7 +57,7 @@ Then please follow the steps in the following section for restoring the database
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@@ -79,7 +79,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres))
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -150,12 +150,10 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
|
||||
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
@@ -201,7 +199,6 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `DB_DATA_LOCATION`.
|
||||
|
||||
BIN
docs/docs/administration/img/admin-nightly-tasks.webp
Normal file
|
After Width: | Height: | Size: 33 KiB |
@@ -46,6 +46,12 @@ services:
|
||||
|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
<img src={require('./img/admin-jobs.webp').default} width="60%" title="Admin jobs" />
|
||||
|
||||
Additionally, some jobs (such as memories generation) run on a schedule, which is every night at midnight by default. To change when they run or enable/disable a job navigate to System Settings -> [Nightly Tasks Settings](https://my.immich.app/admin/system-settings?isOpen=nightly-tasks).
|
||||
|
||||
<img src={require('./img/admin-nightly-tasks.webp').default} width="60%" title="Admin nightly tasks" />
|
||||
|
||||
:::note
|
||||
Some jobs ([External Libraries](/docs/features/libraries) scanning, Database Dump) are configured in their own sections in System Settings.
|
||||
:::
|
||||
|
||||
@@ -20,7 +20,6 @@ Immich supports 3rd party authentication via [OpenID Connect][oidc] (OIDC), an i
|
||||
Before enabling OAuth in Immich, a new client application needs to be configured in the 3rd-party authentication server. While the specifics of this setup vary from provider to provider, the general approach should be the same.
|
||||
|
||||
1. Create a new (Client) Application
|
||||
|
||||
1. The **Provider** type should be `OpenID Connect` or `OAuth2`
|
||||
2. The **Client type** should be `Confidential`
|
||||
3. The **Application** type should be `Web`
|
||||
@@ -29,7 +28,6 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
2. Configure Redirect URIs/Origins
|
||||
|
||||
The **Sign-in redirect URIs** should include:
|
||||
|
||||
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
|
||||
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
|
||||
@@ -37,21 +35,17 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
Redirect URIs should contain all the domains you will be using to access Immich. Some examples include:
|
||||
|
||||
Mobile
|
||||
|
||||
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
|
||||
|
||||
Localhost
|
||||
|
||||
- `http://localhost:2283/auth/login`
|
||||
- `http://localhost:2283/user-settings`
|
||||
|
||||
Local IP
|
||||
|
||||
- `http://192.168.0.200:2283/auth/login`
|
||||
- `http://192.168.0.200:2283/user-settings`
|
||||
|
||||
Hostname
|
||||
|
||||
- `https://immich.example.com/auth/login`
|
||||
- `https://immich.example.com/user-settings`
|
||||
|
||||
@@ -68,8 +62,9 @@ Once you have a new OAuth client application configured, Immich can be configure
|
||||
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
||||
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
||||
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label**¹** |
|
||||
| Role Claim | string | immich_role | Claim mapping for the user's role. (should return "user" or "admin")**¹** |
|
||||
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage**¹** |
|
||||
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (Enter 0 for unlimited quota) |
|
||||
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (empty for unlimited quota) |
|
||||
| Button Text | string | Login with OAuth | Text for the OAuth button on the web |
|
||||
| Auto Register | boolean | true | When true, will automatically register a user the first time they sign in |
|
||||
| [Auto Launch](#auto-launch) | boolean | false | When true, will skip the login page and automatically start the OAuth login process |
|
||||
@@ -111,6 +106,89 @@ Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to
|
||||
|
||||
## Example Configuration
|
||||
|
||||
<details>
|
||||
<summary>Authelia Example</summary>
|
||||
|
||||
### Authelia Example
|
||||
|
||||
Here's an example of OAuth configured for Authelia:
|
||||
|
||||
This assumes there exist an attribute `immichquota` in the user schema, which is used to set the user's storage quota in Immich.
|
||||
The configuration concerning the quota is optional.
|
||||
|
||||
```yaml
|
||||
authentication_backend:
|
||||
ldap:
|
||||
# The LDAP server configuration goes here.
|
||||
# See: https://www.authelia.com/c/ldap
|
||||
attributes:
|
||||
extra:
|
||||
immichquota: # The attribute name from LDAP
|
||||
name: 'immich_quota'
|
||||
multi_valued: false
|
||||
value_type: 'integer'
|
||||
identity_providers:
|
||||
oidc:
|
||||
## The other portions of the mandatory OpenID Connect 1.0 configuration go here.
|
||||
## See: https://www.authelia.com/c/oidc
|
||||
claims_policies:
|
||||
immich_policy:
|
||||
custom_claims:
|
||||
immich_quota:
|
||||
attribute: 'immich_quota'
|
||||
scopes:
|
||||
immich_scope:
|
||||
claims:
|
||||
- 'immich_quota'
|
||||
|
||||
clients:
|
||||
- client_id: 'immich'
|
||||
client_name: 'Immich'
|
||||
# https://www.authelia.com/integration/openid-connect/frequently-asked-questions/#how-do-i-generate-a-client-identifier-or-client-secret
|
||||
client_secret: $pbkdf2-sha512$310000$c8p78n7pUMln0jzvd4aK4Q$JNRBzwAo0ek5qKn50cFzzvE9RXV88h1wJn5KGiHrD0YKtZaR/nCb2CJPOsKaPK0hjf.9yHxzQGZziziccp6Yng'
|
||||
public: false
|
||||
require_pkce: false
|
||||
redirect_uris:
|
||||
- 'https://example.immich.app/auth/login'
|
||||
- 'https://example.immich.app/user-settings'
|
||||
- 'app.immich:///oauth-callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'email'
|
||||
- 'immich_scope'
|
||||
claims_policy: 'immich_policy'
|
||||
response_types:
|
||||
- 'code'
|
||||
grant_types:
|
||||
- 'authorization_code'
|
||||
id_token_signed_response_alg: 'RS256'
|
||||
userinfo_signed_response_alg: 'RS256'
|
||||
token_endpoint_auth_method: 'client_secret_post'
|
||||
```
|
||||
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------------- | ------------------------------------------------------------------- |
|
||||
| Issuer URL | `https://example.immich.app/.well-known/openid-configuration` |
|
||||
| Client ID | immich |
|
||||
| Client Secret | 0v89FXkQOWO\***\*\*\*\*\***\*\*\***\*\*\*\*\***mprbvXD549HH6s1iw... |
|
||||
| Token Endpoint Auth Method | client_secret_post |
|
||||
| Scope | openid email profile immich_scope |
|
||||
| ID Token Signed Response Algorithm | RS256 |
|
||||
| Userinfo Signed Response Algorithm | RS256 |
|
||||
| Storage Label Claim | uid |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (empty for unlimited quota) |
|
||||
| Button Text | Sign in with Authelia (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled (optional) |
|
||||
| Mobile Redirect URI Override | Disable |
|
||||
| Mobile Redirect URI | |
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Authentik Example</summary>
|
||||
|
||||
@@ -133,7 +211,7 @@ Configuration of OAuth in Immich System Settings
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Default Storage Quota (GiB) | 0 (empty for unlimited quota) |
|
||||
| Button Text | Sign in with Authentik (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled (optional) |
|
||||
@@ -164,7 +242,7 @@ Configuration of OAuth in Immich System Settings
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Default Storage Quota (GiB) | 0 (empty for unlimited quota) |
|
||||
| Button Text | Sign in with Google (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled |
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
:::note
|
||||
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
|
||||
:::
|
||||
|
||||
:::caution
|
||||
Immich does not support being served on a sub-path such as `location /immich {`. It has to be served on the root path of a (sub)domain.
|
||||
:::
|
||||
|
||||
@@ -2,16 +2,17 @@
|
||||
|
||||
The `immich-server` docker image comes preinstalled with an administrative CLI (`immich-admin`) that supports the following commands:
|
||||
|
||||
| Command | Description |
|
||||
| ------------------------ | ------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| Command | Description |
|
||||
| ------------------------ | ------------------------------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| `change-media-location` | Change database file paths to align with a new media location |
|
||||
|
||||
## How to run a command
|
||||
|
||||
@@ -88,3 +89,21 @@ Print Immich Version
|
||||
immich-admin version
|
||||
v1.129.0
|
||||
```
|
||||
|
||||
Change media location
|
||||
|
||||
```
|
||||
immich-admin change-media-location
|
||||
? Enter the previous value of IMMICH_MEDIA_LOCATION: /data
|
||||
? Enter the new value of IMMICH_MEDIA_LOCATION: /my-data
|
||||
...
|
||||
Previous value: /data
|
||||
Current value: /my-data
|
||||
|
||||
Changing database paths from "/data/*" to "/my-data/*"
|
||||
|
||||
? Do you want to proceed? [Y/n] y
|
||||
|
||||
Database file paths updated successfully! 🎉
|
||||
...
|
||||
```
|
||||
|
||||
@@ -7,7 +7,7 @@ sidebar_position: 3
|
||||
|
||||
Dev Containers provide a consistent, reproducible development environment using Docker containers. With a single click, you can get started with an Immich development environment on Mac, Linux, Windows, or in the cloud using GitHub Codespaces.
|
||||
|
||||
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/immich-app/immich/)
|
||||
Get started fast!
|
||||
|
||||
[](https://codespaces.new/immich-app/immich/)
|
||||
|
||||
@@ -71,7 +71,7 @@ cd immich
|
||||
|
||||
The immich dev containers read environment variables from your shell environment, not from `.env` files. This allows them to work in cloud environments without pre-configuration.
|
||||
|
||||
:::important Required Configuration
|
||||
:::important Configuration
|
||||
When running locally, and if you want to create (or use an existing) DB and/or photo storage folder, you must set the `UPLOAD_LOCATION` variable in your shell environment before launching the Dev Container. This determines where uploaded files are stored and also where the DB stores it data.
|
||||
|
||||
```bash
|
||||
@@ -88,6 +88,10 @@ source ~/.bashrc
|
||||
|
||||
### Step 3: Launch the Dev Container
|
||||
|
||||
:::tip
|
||||
Immich development makes extensive use of specialized [base images](https://github.com/immich-app/base-images) for its docker-compose based development. For this reason, you won't be able to use VSCode's **_Clone Repository in a Container Volume_** command.
|
||||
:::
|
||||
|
||||
#### Using VS Code UI:
|
||||
|
||||
1. Open the cloned repository in VS Code
|
||||
@@ -199,13 +203,11 @@ To use your SSH key for commit signing, see the [GitHub guide on SSH commit sign
|
||||
When the Dev Container starts, it automatically:
|
||||
|
||||
1. **Runs post-create script** (`container-server-post-create.sh`):
|
||||
|
||||
- Adjusts file permissions for the `node` user
|
||||
- Installs dependencies: `npm install` in all packages
|
||||
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
|
||||
|
||||
2. **Starts development servers** via VS Code tasks:
|
||||
|
||||
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
|
||||
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
|
||||
- Both servers watch for file changes and recompile automatically
|
||||
@@ -335,14 +337,12 @@ make install-all # Install all dependencies
|
||||
The Dev Container is pre-configured for debugging:
|
||||
|
||||
1. **API Server Debugging**:
|
||||
|
||||
- Set breakpoints in VS Code
|
||||
- Press `F5` or use "Run and Debug" panel
|
||||
- Select "Attach to Server" configuration
|
||||
- Debug port: 9231
|
||||
|
||||
2. **Worker Debugging**:
|
||||
|
||||
- Use "Attach to Workers" configuration
|
||||
- Debug port: 9230
|
||||
|
||||
@@ -428,7 +428,6 @@ While the Dev Container focuses on server and web development, you can connect m
|
||||
```
|
||||
|
||||
2. **Configure mobile app**:
|
||||
|
||||
- Server URL: `http://YOUR_IP:2283/api`
|
||||
- Ensure firewall allows port 2283
|
||||
|
||||
|
||||
@@ -38,6 +38,19 @@ Run all server checks with `npm run check:all`
|
||||
You can use `npm run __:fix` to potentially correct some issues automatically for `npm run format` and `lint`.
|
||||
:::
|
||||
|
||||
## Mobile Checks
|
||||
|
||||
The following commands must be executed from within the mobile app directory of the codebase.
|
||||
|
||||
- [ ] `make build` (auto-generate files using build_runner)
|
||||
- [ ] `make analyze` (static analysis via Dart Analyzer and DCM)
|
||||
- [ ] `make format` (formatting via Dart Formatter)
|
||||
- [ ] `make test` (unit tests)
|
||||
|
||||
:::info Auto Fix
|
||||
You can use `dart fix --apply` and `dcm fix lib` to potentially correct some issues automatically for `make analyze`.
|
||||
:::
|
||||
|
||||
## OpenAPI
|
||||
|
||||
The OpenAPI client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. Note that you should not modify this file directly as it is auto-generated. See [OpenAPI](/docs/developer/open-api.md) for more details.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Folder view provides an additional view besides the timeline that is similar to a file explorer. It allows you to navigate through the folders and files in the library. This feature is handy for a highly curated and customized external library or a nicely configured storage template.
|
||||
|
||||
You can enable this feature under [`Account Settings > Features > Folder View`](https://my.immich.app/user-settings?isOpen=feature+folders)
|
||||
You can enable this feature under [`Account Settings > Features > Folders`](https://my.immich.app/user-settings?isOpen=feature+folders)
|
||||
|
||||
## Enable folder view
|
||||
|
||||
|
||||
@@ -56,9 +56,9 @@ Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package t
|
||||
|
||||
### Automatic watching (EXPERIMENTAL)
|
||||
|
||||
This feature - currently hidden in the config file - is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan.
|
||||
This feature is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan.
|
||||
|
||||
If your photos are on a network drive, automatic file watching likely won't work. In that case, you will have to rely on a periodic library refresh to pull in your changes.
|
||||
If your photos are on a network drive, automatic file watching likely won't work. In that case, you will have to rely on a [periodic library refresh](#set-custom-scan-interval) to pull in your changes.
|
||||
|
||||
#### Troubleshooting
|
||||
|
||||
@@ -72,7 +72,9 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
||||
|
||||
### Nightly job
|
||||
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page.
|
||||
There is an automatic scan job that is scheduled to run once a day. Its schedule is configurable, see [Set Custom Scan Interval](#set-custom-scan-interval).
|
||||
|
||||
This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -91,7 +93,7 @@ The `immich-server` container will need access to the gallery. Modify your docke
|
||||
```diff title="docker-compose.yml"
|
||||
immich-server:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
@@ -112,7 +114,7 @@ _Remember to run `docker compose up -d` to register the changes. Make sure you c
|
||||
|
||||
These actions must be performed by the Immich administrator.
|
||||
|
||||
- Click on your avatar on the upper right corner
|
||||
- Click on your avatar in the upper right corner
|
||||
- Click on Administration -> External Libraries
|
||||
- Click on Create an external library…
|
||||
- Select which user owns the library, this can not be changed later
|
||||
@@ -159,9 +161,7 @@ Within seconds, the assets from the old-pics and videos folders should show up i
|
||||
|
||||
Folder view provides an additional view besides the timeline that is similar to a file explorer. It allows you to navigate through the folders and files in the library. This feature is handy for a highly curated and customized external library or a nicely configured storage template.
|
||||
|
||||
You can enable this feature under [`Account Settings > Features > Folder View`](https://my.immich.app/user-settings?isOpen=feature+folders)
|
||||
|
||||
The UI is currently only available for the web; mobile will come in a subsequent release.
|
||||
You can enable this feature under [`Account Settings > Features > Folders`](https://my.immich.app/user-settings?isOpen=feature+folders)
|
||||
|
||||
<img src={require('./img/folder-view-1.webp').default} width="100%" title='Folder-view' />
|
||||
|
||||
@@ -171,7 +171,7 @@ The UI is currently only available for the web; mobile will come in a subsequent
|
||||
Only an admin can do this.
|
||||
:::
|
||||
|
||||
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
|
||||
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> External Library.
|
||||
You can set the scanning interval using the preset or cron format. For more information you can refer to [Crontab Guru](https://crontab.guru/).
|
||||
|
||||
<img src={require('./img/library-custom-scan-interval.webp').default} width="75%" title='Set custom scan interval for external library' />
|
||||
|
||||
@@ -88,9 +88,9 @@ It will only reflect files you add.
|
||||
:::
|
||||
|
||||
If the same asset is in more than one album it will only sync to the first album it's in, after that it won't sync again even if the user clicks sync albums manually.
|
||||
To overcome this limitation, the files must be removed from the blacklist by
|
||||
To overcome this limitation, the files must be removed from the ignore list by
|
||||
App settings -> Advanced -> Duplicate Assets -> Clear
|
||||
|
||||
:::info
|
||||
Cleaning duplicate assets from the list will cause all the previously uploaded duplicate files to be re-uploaded, the files will not actually be uploaded and will be rejected on the server side (due to duplication) but will be synchronized to the album and at the end will be added to the black list again at the end of the synchronization.
|
||||
Cleaning duplicate assets from the list will cause all the previously uploaded duplicate files to be re-uploaded, the files will not actually be uploaded and will be rejected on the server side (due to duplication) but will be synchronized to the album and at the end will be added to the ignore list again at the end of the synchronization.
|
||||
:::
|
||||
|
||||
@@ -16,7 +16,7 @@ For the full list, refer to the [Immich source code](https://github.com/immich-a
|
||||
| `HEIC` | `.heic` | :white_check_mark: | |
|
||||
| `HEIF` | `.heif` | :white_check_mark: | |
|
||||
| `JPEG 2000` | `.jp2` | :white_check_mark: | |
|
||||
| `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
||||
| `JPEG` | `.jpeg` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
||||
| `JPEG XL` | `.jxl` | :white_check_mark: | |
|
||||
| `PNG` | `.png` | :white_check_mark: | |
|
||||
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
|
||||
|
||||
@@ -27,11 +27,11 @@ After defining the locations of these files, we will edit the `docker-compose.ym
|
||||
services:
|
||||
immich-server:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
+ - ${BACKUP_LOCATION}:/usr/src/app/upload/backups
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
+ - ${THUMB_LOCATION}:/data/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/data/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/data/profile
|
||||
+ - ${BACKUP_LOCATION}:/data/backups
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
```
|
||||
|
||||
@@ -44,7 +44,7 @@ docker compose up -d
|
||||
|
||||
:::note
|
||||
Because of the underlying properties of docker bind mounts, it is not recommended to mount the `upload/` and `library/` folders as separate bind mounts if they are on the same device.
|
||||
For this reason, we mount the HDD or the network storage (NAS) to `/usr/src/app/upload` and then mount the folders we want to access under that folder.
|
||||
For this reason, we mount the HDD or the network storage (NAS) to `/data` and then mount the folders we want to access under that folder.
|
||||
|
||||
The `thumbs/` folder contains both the small thumbnails displayed in the timeline and the larger previews shown when clicking into an image. These cannot be separated.
|
||||
|
||||
|
||||
@@ -12,118 +12,148 @@ Run `docker exec -it immich_postgres psql --dbname=<DB_DATABASE_NAME> --username
|
||||
|
||||
## Assets
|
||||
|
||||
### Name
|
||||
|
||||
:::note
|
||||
The `"originalFileName"` column is the name of the file at time of upload, including the extension.
|
||||
:::
|
||||
|
||||
```sql title="Find by original filename"
|
||||
SELECT * FROM "assets" WHERE "originalFileName" = 'PXL_20230903_232542848.jpg';
|
||||
SELECT * FROM "assets" WHERE "originalFileName" LIKE 'PXL_%'; -- all files starting with PXL_
|
||||
SELECT * FROM "assets" WHERE "originalFileName" LIKE '%_2023_%'; -- all files with _2023_ in the middle
|
||||
SELECT * FROM "asset" WHERE "originalFileName" = 'PXL_20230903_232542848.jpg';
|
||||
SELECT * FROM "asset" WHERE "originalFileName" LIKE 'PXL_%'; -- all files starting with PXL_
|
||||
SELECT * FROM "asset" WHERE "originalFileName" LIKE '%_2023_%'; -- all files with _2023_ in the middle
|
||||
```
|
||||
|
||||
```sql title="Find by path"
|
||||
SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_2023.jpg';
|
||||
SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
SELECT * FROM "asset" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_2023.jpg';
|
||||
SELECT * FROM "asset" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
```
|
||||
|
||||
### ID
|
||||
|
||||
```sql title="Find by ID"
|
||||
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||
SELECT * FROM "asset" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||
```
|
||||
|
||||
```sql title="Find by partial ID"
|
||||
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
|
||||
SELECT * FROM "asset" WHERE "id"::text LIKE '%ab431d3a%';
|
||||
```
|
||||
|
||||
### Checksum
|
||||
|
||||
:::note
|
||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||
:::
|
||||
|
||||
```sql title="Find by checksum (SHA-1)"
|
||||
SELECT encode("checksum", 'hex') FROM "assets";
|
||||
SELECT * FROM "assets" WHERE "checksum" = decode('69de19c87658c4c15d9cacb9967b8e033bf74dd1', 'hex');
|
||||
SELECT * FROM "assets" WHERE "checksum" = '\x69de19c87658c4c15d9cacb9967b8e033bf74dd1'; -- alternate notation
|
||||
SELECT encode("checksum", 'hex') FROM "asset";
|
||||
SELECT * FROM "asset" WHERE "checksum" = decode('69de19c87658c4c15d9cacb9967b8e033bf74dd1', 'hex');
|
||||
SELECT * FROM "asset" WHERE "checksum" = '\x69de19c87658c4c15d9cacb9967b8e033bf74dd1'; -- alternate notation
|
||||
```
|
||||
|
||||
```sql title="Find duplicate assets with identical checksum (SHA-1) (excluding trashed files)"
|
||||
SELECT T1."checksum", array_agg(T2."id") ids FROM "assets" T1
|
||||
INNER JOIN "assets" T2 ON T1."checksum" = T2."checksum" AND T1."id" != T2."id" AND T2."deletedAt" IS NULL
|
||||
SELECT T1."checksum", array_agg(T2."id") ids FROM "asset" T1
|
||||
INNER JOIN "asset" T2 ON T1."checksum" = T2."checksum" AND T1."id" != T2."id" AND T2."deletedAt" IS NULL
|
||||
WHERE T1."deletedAt" IS NULL GROUP BY T1."checksum";
|
||||
```
|
||||
|
||||
### Metadata
|
||||
|
||||
```sql title="Live photos"
|
||||
SELECT * FROM "assets" WHERE "livePhotoVideoId" IS NOT NULL;
|
||||
SELECT * FROM "asset" WHERE "livePhotoVideoId" IS NOT NULL;
|
||||
```
|
||||
|
||||
```sql title="By description"
|
||||
SELECT "assets".*, "exif"."description" FROM "exif"
|
||||
JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE TRIM("exif"."description") <> ''; -- all files with a description
|
||||
SELECT "assets".*, "exif"."description" FROM "exif"
|
||||
JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."description" ILIKE '%string to match%'; -- search by string
|
||||
SELECT "asset".*, "asset_exif"."description" FROM "asset_exif"
|
||||
JOIN "asset" ON "asset"."id" = "asset_exif"."assetId"
|
||||
WHERE TRIM("asset_exif"."description") <> ''; -- all files with a description
|
||||
SELECT "asset".*, "asset_exif"."description" FROM "asset_exif"
|
||||
JOIN "asset" ON "asset"."id" = "asset_exif"."assetId"
|
||||
WHERE "asset_exif"."description" ILIKE '%string to match%'; -- search by string
|
||||
```
|
||||
|
||||
```sql title="Without metadata"
|
||||
SELECT "assets".* FROM "exif"
|
||||
LEFT JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."assetId" IS NULL;
|
||||
SELECT "asset".* FROM "asset_exif"
|
||||
LEFT JOIN "asset" ON "asset"."id" = "asset_exif"."assetId"
|
||||
WHERE "asset_exif"."assetId" IS NULL;
|
||||
```
|
||||
|
||||
```sql title="size < 100,000 bytes, smallest to largest"
|
||||
SELECT * FROM "assets"
|
||||
JOIN "exif" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."fileSizeInByte" < 100000
|
||||
ORDER BY "exif"."fileSizeInByte" ASC;
|
||||
SELECT * FROM "asset"
|
||||
JOIN "asset_exif" ON "asset"."id" = "asset_exif"."assetId"
|
||||
WHERE "asset_exif"."fileSizeInByte" < 100000
|
||||
ORDER BY "asset_exif"."fileSizeInByte" ASC;
|
||||
```
|
||||
|
||||
```sql title="Without thumbnails"
|
||||
SELECT * FROM "assets" WHERE "assets"."previewPath" IS NULL OR "assets"."thumbnailPath" IS NULL;
|
||||
```
|
||||
### Type
|
||||
|
||||
```sql title="By type"
|
||||
SELECT * FROM "assets" WHERE "assets"."type" = 'VIDEO';
|
||||
SELECT * FROM "assets" WHERE "assets"."type" = 'IMAGE';
|
||||
SELECT * FROM "asset" WHERE "asset"."type" = 'VIDEO';
|
||||
SELECT * FROM "asset" WHERE "asset"."type" = 'IMAGE';
|
||||
```
|
||||
|
||||
```sql title="Count by type"
|
||||
SELECT "assets"."type", COUNT(*) FROM "assets" GROUP BY "assets"."type";
|
||||
SELECT "asset"."type", COUNT(*) FROM "asset" GROUP BY "asset"."type";
|
||||
```
|
||||
|
||||
```sql title="Count by type (per user)"
|
||||
SELECT "users"."email", "assets"."type", COUNT(*) FROM "assets"
|
||||
JOIN "users" ON "assets"."ownerId" = "users"."id"
|
||||
GROUP BY "assets"."type", "users"."email" ORDER BY "users"."email";
|
||||
SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
|
||||
JOIN "user" ON "asset"."ownerId" = "user"."id"
|
||||
GROUP BY "asset"."type", "user"."email" ORDER BY "user"."email";
|
||||
```
|
||||
|
||||
```sql title="Failed file movements"
|
||||
SELECT * FROM "move_history";
|
||||
## Tags
|
||||
|
||||
```sql title="Count by tag"
|
||||
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
```sql title="Count by tag (per user)"
|
||||
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
## Users
|
||||
|
||||
```sql title="List all users"
|
||||
SELECT * FROM "users";
|
||||
SELECT * FROM "user";
|
||||
```
|
||||
|
||||
```sql title="Get owner info from asset ID"
|
||||
SELECT "users".* FROM "users" JOIN "assets" ON "users"."id" = "assets"."ownerId" WHERE "assets"."id" = 'fa310b01-2f26-4b7a-9042-d578226e021f';
|
||||
SELECT "user".* FROM "user" JOIN "asset" ON "user"."id" = "asset"."ownerId" WHERE "asset"."id" = 'fa310b01-2f26-4b7a-9042-d578226e021f';
|
||||
```
|
||||
|
||||
## System Config
|
||||
|
||||
```sql title="Custom settings"
|
||||
SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
|
||||
```
|
||||
|
||||
(Only used when not using the [config file](/docs/install/config-file))
|
||||
|
||||
## Persons
|
||||
|
||||
```sql title="Delete person and unset it for the faces it was associated with"
|
||||
DELETE FROM "person" WHERE "name" = 'PersonNameHere';
|
||||
```
|
||||
|
||||
## System
|
||||
|
||||
### Config
|
||||
|
||||
```sql title="Custom settings"
|
||||
SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
|
||||
```
|
||||
|
||||
(Only used when not using the [config file](/docs/install/config-file))
|
||||
|
||||
### File properties
|
||||
|
||||
```sql title="Without thumbnails"
|
||||
SELECT * FROM "asset" WHERE "asset"."previewPath" IS NULL OR "asset"."thumbnailPath" IS NULL;
|
||||
```
|
||||
|
||||
```sql title="Failed file movements"
|
||||
SELECT * FROM "move_history";
|
||||
```
|
||||
|
||||
## Postgres internal
|
||||
|
||||
```sql title="Change DB_PASSWORD"
|
||||
|
||||
@@ -12,7 +12,7 @@ If you want Immich to be able to delete the images in the external library or ad
|
||||
```diff
|
||||
immich-server:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
+ - /home/user/photos1:/home/user/photos1:ro
|
||||
+ - /mnt/photos2:/mnt/photos2:ro # you can delete this line if you only have one mount point, or you can add more lines if you have more than two
|
||||
```
|
||||
@@ -41,7 +41,7 @@ In the Immich web UI:
|
||||
- Click Add path
|
||||
<img src={require('./img/add-path-button.webp').default} width="50%" title="Add Path button" />
|
||||
|
||||
- Enter **/usr/src/app/external** as the path and click Add
|
||||
- Enter **/home/user/photos1** as the path and click Add
|
||||
<img src={require('./img/add-path-field.webp').default} width="50%" title="Add Path field" />
|
||||
|
||||
- Save the new path
|
||||
|
||||
@@ -52,9 +52,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||
### Local
|
||||
|
||||
# Backup Immich database
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
|
||||
### Append to local Borg repository
|
||||
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/
|
||||
|
||||
@@ -34,7 +34,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `/data` | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
|
||||
@@ -49,9 +49,6 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
|
||||
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||
|
||||
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only needs to be set if the Immich deployment method is changing.
|
||||
|
||||
## Workers
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
@@ -72,22 +69,25 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :------------------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
|
||||
|
||||
\*3: Uses either [`postgresql.ssd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.ssd.conf) or [`postgresql.hdd.conf`](https://github.com/immich-app/base-images/blob/main/postgres/postgresql.hdd.conf) which mainly controls the Postgres `effective_io_concurrency` setting to allow for concurrenct IO on SSDs and sequential IO on HDDs.
|
||||
|
||||
:::info
|
||||
|
||||
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
@@ -199,12 +199,11 @@ Additional machine learning parameters can be tuned from the admin UI.
|
||||
| `IMMICH_TELEMETRY_INCLUDE` | Collect these telemetries. List of `host`, `api`, `io`, `repo`, `job`. Note: You can also specify `all` to enable all | | server | api, microservices |
|
||||
| `IMMICH_TELEMETRY_EXCLUDE` | Do not collect these telemetries. List of `host`, `api`, `io`, `repo`, `job` | | server | api, microservices |
|
||||
|
||||
## Docker Secrets
|
||||
## Secrets
|
||||
|
||||
The following variables support the use of [Docker secrets][docker-secrets] for additional security.
|
||||
The following variables support reading from files, either via [Systemd Credentials][systemd-creds] or [Docker secrets][docker-secrets] for additional security.
|
||||
|
||||
To use any of these, replace the regular environment variable with the equivalent `_FILE` environment variable. The value of
|
||||
the `_FILE` variable should be set to the path of a file containing the variable value.
|
||||
To use any of these, either set `CREDENTIALS_DIRECTORY` to a directory that contains files whose name is the “regular variable” name, and whose content is the secret. If using Docker Secrets, setting `CREDENTIALS_DIRECTORY=/run/secrets` will cause all secrets present to be used. Alternatively, replace the regular variable with the equivalent `_FILE` environment variable as below. The value of the `_FILE` variable should be set to the path of a file containing the variable value.
|
||||
|
||||
| Regular Variable | Equivalent Docker Secrets '\_FILE' Variable |
|
||||
| :----------------- | :------------------------------------------ |
|
||||
@@ -226,3 +225,4 @@ to use a Docker secret for the password in the Redis container.
|
||||
[docker-secrets-docs]: https://github.com/docker-library/docs/tree/master/postgres#docker-secrets
|
||||
[docker-secrets]: https://docs.docker.com/engine/swarm/secrets/
|
||||
[ioredis]: https://ioredis.readthedocs.io/en/latest/README/#connect-to-redis
|
||||
[systemd-creds]: https://systemd.io/CREDENTIALS/
|
||||
|
||||
BIN
docs/docs/install/img/truenas/truenas00.webp
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 4.7 KiB |
|
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
BIN
docs/docs/install/img/truenas/truenas04.webp
Normal file
|
After Width: | Height: | Size: 101 KiB |
|
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
BIN
docs/docs/install/img/truenas/truenas06.webp
Normal file
|
After Width: | Height: | Size: 5.1 KiB |
BIN
docs/docs/install/img/truenas/truenas07.webp
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
docs/docs/install/img/truenas/truenas08.webp
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/docs/install/img/truenas/truenas09.webp
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
docs/docs/install/img/truenas/truenas10.webp
Normal file
|
After Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 4.0 KiB After Width: | Height: | Size: 4.0 KiB |
|
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 8.9 KiB |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 9.7 KiB |
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 4.8 KiB |
@@ -2,6 +2,9 @@
|
||||
sidebar_position: 80
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
# TrueNAS [Community]
|
||||
|
||||
:::note
|
||||
@@ -9,211 +12,324 @@ This is a community contribution and not officially supported by the Immich team
|
||||
|
||||
Community support can be found in the dedicated channel on the [Discord Server](https://discord.immich.app/).
|
||||
|
||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
|
||||
**Please report app issues to the corresponding [GitHub Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
|
||||
:::
|
||||
|
||||
:::warning
|
||||
This guide covers the installation of Immich on TrueNAS Community Edition 24.10.2.2 (Electric Eel) and later.
|
||||
|
||||
We recommend keeping TrueNAS Community Edition and Immich relatively up to date with the latest versions to avoid any issues.
|
||||
|
||||
If you are using an older version of TrueNAS, we ask that you upgrade to the latest version before installing Immich. Check the [TrueNAS Community Edition Release Notes](https://www.truenas.com/docs/softwarereleases/) for more information on breaking changes, new features, and how to upgrade your system.
|
||||
:::
|
||||
|
||||
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
|
||||
|
||||
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
|
||||
## First Steps
|
||||
|
||||
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
|
||||
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
You may also configure environment variables at any time after deploying the application.
|
||||
|
||||
### Setting up Storage Datasets
|
||||
|
||||
Before beginning app installation, [create the datasets](https://www.truenas.com/docs/scale/scaletutorials/storage/datasets/datasetsscale/) to use in the **Storage Configuration** section during installation.
|
||||
Immich requires seven datasets: `library`, `upload`, `thumbs`, `profile`, `video`, `backups`, and `pgData`.
|
||||
You can organize these as one parent with seven child datasets, for example `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, and so on.
|
||||
|
||||
In TrueNAS, Immich requires 2 datasets for the application to function correctly: `data` and `pgData`. You can set the datasets to any names to match your naming conventions or preferences.
|
||||
You can organize these as one parent with two child datasets, for example `/mnt/tank/immich/data` and `/mnt/tank/immich/pgData`.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas12.webp').default}
|
||||
width="30%"
|
||||
src={require('./img/truenas/truenas00.webp').default}
|
||||
width="40%"
|
||||
alt="Immich App Widget"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info Permissions
|
||||
The **pgData** dataset must be owned by the user `netdata` (UID 999) for postgres to start. The other datasets must be owned by the user `root` (UID 0) or a group that includes the user `root` (UID 0) for immich to have the necessary permissions.
|
||||
:::info Datasets Permissions
|
||||
|
||||
If the **library** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/core/coretutorials/storage/pools/permissions/#access-control-lists) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library**, Immich performs `chmod` internally and needs to be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
|
||||
The **pgData** dataset must be owned by the user `netdata` (UID 999) for Postgres to start.
|
||||
|
||||
The `data` dataset must have given the **_modify_** permission to the user who will run Immich.
|
||||
|
||||
Since TrueNAS Community Edition 24.10.2.2 and later, Immich can be run as any user and group, the default user being `apps` (UID 568) and the default group being `apps` (GID 568). This user, either `apps` or another user you choose, must have **_modify_** permissions on the **data** dataset.
|
||||
|
||||
For an easy setup:
|
||||
|
||||
- Create the parent dataset `immich` keeping the default **Generic** preset.
|
||||
- Select `Dataset Preset` **Apps** instead of **Generic** when creating the `data` dataset. This will automatically give the correct permissions to the dataset. If you want to use another user for Immich, you can keep the **Generic** preset, but you will need to give the **_modify_** permission to that other user.
|
||||
- For the `pgData` dataset, you can keep the default preset **Generic** as permissions can be set during the installation of the Immich app (See [Storage Configuration](#storage-configuration) section).
|
||||
:::
|
||||
|
||||
:::tip
|
||||
To improve performance, Immich recommends using SSDs for the database. If you have a pool made of SSDs, you can create the `pgData` dataset on that pool.
|
||||
|
||||
Thumbnails can also be stored on the SSDs for faster access. This is an advanced option and not required for Immich to run. More information on how you can use multiple datasets to manage Immich storage in a finer-grained manner can be found in the [Advanced: Multiple Datasets for Immich Storage](#advanced-multiple-datasets-for-immich-storage) section below.
|
||||
:::
|
||||
|
||||
:::warning
|
||||
If you just created the datasets using the **Apps** preset, you can skip this warning section.
|
||||
|
||||
If the **data** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library** (internal folder created by Immich within the **data** dataset), Immich performs `chmod` internally and must be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
|
||||
|
||||
To change or verify the ACL mode, go to the **Datasets** screen, select the **library** dataset, click on the **Edit** button next to **Dataset Details**, then click on the **Advanced Options** tab, scroll down to the **ACL Mode** section, and select `Passthrough` from the dropdown menu. Click **Save** to apply the changes. If the option is greyed out, set the **ACL Type** to `SMB/NFSv4` first, then you can change the **ACL Mode** to `Passthrough`.
|
||||
:::
|
||||
|
||||
## Installing the Immich Application
|
||||
|
||||
To install the **Immich** application, go to **Apps**, click **Discover Apps**, either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
|
||||
To install the **Immich** application, go to **Apps**, click **Discover Apps**, and either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
|
||||
|
||||
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
|
||||
|
||||
Click on the widget to open the **Immich** application details screen.
|
||||
<img
|
||||
src={require('./img/truenas01.webp').default}
|
||||
src={require('./img/truenas/truenas01.webp').default}
|
||||
width="50%"
|
||||
alt="Immich App Widget"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click on the widget to open the **Immich** application details screen.
|
||||
</div>
|
||||
|
||||
<br/><br/>
|
||||
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
|
||||
|
||||
Click **Install** to open the Immich application configuration screen.
|
||||
<img
|
||||
src={require('./img/truenas02.webp').default}
|
||||
src={require('./img/truenas/truenas02.webp').default}
|
||||
width="100%"
|
||||
alt="Immich App Details Screen"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click **Install** to open the Immich application configuration screen.
|
||||
|
||||
<br/><br/>
|
||||
</div>
|
||||
|
||||
Application configuration settings are presented in several sections, each explained below.
|
||||
To find specific fields click in the **Search Input Fields** search field, scroll down to a particular section or click on the section heading on the navigation area in the upper-right corner.
|
||||
To find specific fields, click in the **Search Input Fields** search field, scroll down to a particular section, or click on the section heading on the navigation area in the upper-right corner.
|
||||
|
||||
### Application Name and Version
|
||||
|
||||
<img
|
||||
src={require('./img/truenas03.webp').default}
|
||||
src={require('./img/truenas/truenas03.webp').default}
|
||||
width="100%"
|
||||
alt="Install Immich Screen"
|
||||
className="border rounded-xl"
|
||||
className="border rounded-xl mb-4"
|
||||
/>
|
||||
|
||||
Accept the default value or enter a name in **Application Name** field.
|
||||
In most cases use the default name, but if adding a second deployment of the application you must change this name.
|
||||
Keep the default value or enter a name in the **Application Name** field.
|
||||
Change it if you’re deploying a second instance.
|
||||
|
||||
Accept the default version number in **Version**.
|
||||
When a new version becomes available, the application has an update badge.
|
||||
The **Installed Applications** screen shows the option to update applications.
|
||||
Immich version within TrueNAS catalog (Different from Immich release version).
|
||||
|
||||
### Immich Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas05.webp').default}
|
||||
src={require('./img/truenas/truenas04.webp').default}
|
||||
width="40%"
|
||||
alt="Configuration Settings"
|
||||
className="border rounded-xl mb-4"
|
||||
/>
|
||||
|
||||
The **Timezone** is set to the system default, which usually matches your local timezone. You can change it to another timezone if you prefer.
|
||||
|
||||
**Enable Machine Learning** is enabled by default. It allows Immich to use machine learning features such as face recognition, image search, and smart duplicate detection. Untick this option if you do not want to use these features.
|
||||
|
||||
Select the **Machine Learning Image Type** based on the hardware you have. More details here: [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md)
|
||||
|
||||
**Database Password** should be set to a custom value using only the characters `A-Za-z0-9`. This password is used to secure the Postgres database.
|
||||
|
||||
**Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`. Preferably, use a different password from the database password.
|
||||
|
||||
Keep the **Log Level** to the default `Log` value.
|
||||
|
||||
Leave **Hugging Face Endpoint** blank. (This is used to download ML models from a different source.)
|
||||
|
||||
Set **Database Storage Type** to the type of storage (**HDD** or **SSD**) that the pool where the **pgData** dataset is located uses.
|
||||
|
||||
**Additional Environment Variables** can be left blank.
|
||||
|
||||
<details>
|
||||
<summary>Advanced users: Adding Environment Variables</summary>
|
||||
|
||||
Environment variables can be set by clicking the **Add** button and filling in the **Name** and **Value** fields.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas/truenas05.webp').default}
|
||||
width="40%"
|
||||
alt="Environment Variables"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default value in **Timezone** or change to match your local timezone.
|
||||
**Timezone** is only used by the Immich `exiftool` microservice if it cannot be determined from the image metadata.
|
||||
These are used to add custom configuration options or to enable specific features.
|
||||
More information on available environment variables can be found in the **[environment variables documentation](/docs/install/environment-variables/)**.
|
||||
|
||||
Untick **Enable Machine Learning** if you will not use face recognition, image search, and smart duplicate detection.
|
||||
:::info
|
||||
Some environment variables are not available for the TrueNAS Community Edition app as they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Accept the default option or select the **Machine Learning Image Type** for your hardware based on the [Hardware-Accelerated Machine Learning Supported Backends](/docs/features/ml-hardware-acceleration.md#supported-backends).
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
|
||||
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
||||
</details>
|
||||
|
||||
The **Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`.
|
||||
### User and Group Configuration
|
||||
|
||||
Accept the **Log Level** default of **Log**.
|
||||
Application in TrueNAS runs as a specific user and group. Immich uses the default user `apps` (UID 568) and the default group `apps` (GID 568).
|
||||
|
||||
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
||||
<img
|
||||
src={require('./img/truenas/truenas06.webp').default}
|
||||
width="40%"
|
||||
alt="User and Group Configuration"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Leave **Additional Environment Variables** blank or see [Environment Variables](#environment-variables) to set before installing.
|
||||
- **User ID**: Keep the default value `apps` (UID 568) or define a different one if needed.
|
||||
|
||||
- **Group ID**: Keep the default value `apps` (GID 568) or define a different one if needed.
|
||||
|
||||
:::warning
|
||||
If you change the user or group, make sure that the datasets you created for Immich data storage have the correct permissions set for that user and group as specified in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above.
|
||||
:::
|
||||
|
||||
### Network Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas06.webp').default}
|
||||
src={require('./img/truenas/truenas07.webp').default}
|
||||
width="40%"
|
||||
alt="Networking Settings"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
||||
:::info Allowed Port Numbers
|
||||
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
|
||||
- **Port Bind Mode**: This lets you expose the port to the host system, allowing you to access Immich from outside the TrueNAS system. Keep the default **_Publish port on the host for external access_** value unless you have a specific reason to change it.
|
||||
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
|
||||
:::
|
||||
- **Port Number**: Keep the default port `30041` or enter a custom port number.
|
||||
|
||||
- **Host IPs**: Leave the default blank value.
|
||||
|
||||
### Storage Configuration
|
||||
|
||||
Immich requires seven storage datasets.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas07.webp').default}
|
||||
width="20%"
|
||||
alt="Configure Storage ixVolumes"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::note Default Setting (Not recommended)
|
||||
The default setting for datasets is **ixVolume (dataset created automatically by the system)** but this results in your data being harder to access manually and can result in data loss if you delete the immich app. (Not recommended)
|
||||
:::danger Default Settings (Not recommended)
|
||||
The default setting for datasets is **ixVolume (dataset created automatically by the system)**. This is not recommended as this results in your data being harder to access manually and can result in data loss if you delete the immich app. It is also harder to manage snapshots and replication tasks. It is recommended to use the **Host Path (Path that already exists on the system)** option instead.
|
||||
:::
|
||||
|
||||
For each Storage option select **Host Path (Path that already exists on the system)** and then select the matching dataset [created before installing the app](#setting-up-storage-datasets): **Immich Library Storage**: `library`, **Immich Uploads Storage**: `upload`, **Immich Thumbs Storage**: `thumbs`, **Immich Profile Storage**: `profile`, **Immich Video Storage**: `video`, **Immich Backups Storage**: `backups`, **Postgres Data Storage**: `pgData`.
|
||||
The storage configuration section allows you to set up the storage locations for Immich data. You can select the datasets created in the previous step.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas08.webp').default}
|
||||
src={require('./img/truenas/truenas08.webp').default}
|
||||
width="40%"
|
||||
alt="Configure Storage Host Paths"
|
||||
alt="Configure Storage Volumes"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
The image above has example values.
|
||||
|
||||
<br/>
|
||||
For the Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Immich data storage, for example, `data`.
|
||||
|
||||
### Additional Storage [(External Libraries)](/docs/features/libraries)
|
||||
The Machine Learning cache can be left with default _Temporary_
|
||||
|
||||
For the Postgres Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Postgres data storage, for example, `pgData`.
|
||||
|
||||
:::info
|
||||
**Postgres Data Storage**
|
||||
Once **Host Path** is selected, a checkbox appears with **_Automatic Permissions_**. If you have not set the ownership of the **pgData** dataset to `netdata` (UID 999), tick this box as it will set the user ownership to `netdata` (UID 999) and the group ownership to `docker` (GID 999) automatically. If you have set the ownership of the **pgData** dataset to `netdata` (UID 999), you can leave this box unticked.
|
||||
:::
|
||||
|
||||
### Additional Storage (Advanced Users)
|
||||
|
||||
<details>
|
||||
<summary>External Libraries</summary>
|
||||
|
||||
:::danger Advanced Users Only
|
||||
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup. Also, your mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`. The picture below shows a valid example.
|
||||
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup.
|
||||
:::
|
||||
|
||||
<img
|
||||
src={require('./img/truenas10.webp').default}
|
||||
src={require('./img/truenas/truenas09.webp').default}
|
||||
width="40%"
|
||||
alt="Configure Storage Host Paths"
|
||||
alt="Add External Libraries with Additional Storage"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
||||
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
You may configure [external libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
|
||||
<!-- A section for Labels would go here but I don't know what they do. -->
|
||||
The dataset that contains your external library files must at least give **read** access to the user running Immich (Default: `apps` (UID 568), `apps` (GID 568)).
|
||||
If you want to be able to delete files or edit metadata in the external library using Immich, you will need to give the **modify** permission to the user running Immich.
|
||||
|
||||
- **Mount Path** is the location you will need to copy and paste into the external library settings within Immich.
|
||||
- **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
- **Read Only** is a checkbox that you can tick if you want to prevent Immich from modifying the files in the external library. This is useful if you want to use Immich to view and search your external library without modifying it.
|
||||
|
||||
:::warning
|
||||
Each mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`.
|
||||
|
||||
A general recommendation is to mount any external libraries to a path beginning with `/mnt` or `/media` followed by a unique name, such as `/mnt/external-libraries` or `/media/my-external-libraries`. If you plan to mount multiple external libraries, you can use paths like `/mnt/external-libraries/library1`, `/mnt/external-libraries/library2`, etc.
|
||||
:::
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Multiple Datasets for Immich Storage</summary>
|
||||
|
||||
:::danger Advanced Users Only
|
||||
This feature should only be used by advanced users.
|
||||
:::
|
||||
|
||||
Immich can use multiple datasets for its storage, allowing you to manage your data more granularly, similar to the old storage configuration. This is useful if you want to separate your data into different datasets for performance or organizational reasons. There is a general guide for this [here](/docs/guides/custom-locations), but read on for the TrueNAS guide.
|
||||
|
||||
Each additional dataset has to give the permission **_modify_** to the user who will run Immich (Default: `apps` (UID 568), `apps` (GID 568))
|
||||
As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, you have to create the datasets with the **Apps** preset to ensure the correct permissions are set, or you can set the permissions manually after creating the datasets.
|
||||
|
||||
Immich uses 6 folders for its storage: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. You can create a dataset for each of these folders or only for some of them.
|
||||
|
||||
To mount these datasets:
|
||||
|
||||
1. Add an **Additional Storage** entry for each dataset you want to use.
|
||||
2. Select **Type** as **Host Path (Path that already exists on the system)**.
|
||||
3. Enter the **Mount Path** with `/data/<folder-name>`. The `<folder-name>` is the name of the folder you want to mount, for example, `library`, `upload`, `thumbs`, `profile`, `encoded-video`, or `backups`.
|
||||
:::danger Important
|
||||
You have to write the full path, including `/data/`, as Immich expects the data to be in that location.
|
||||
If you do not include this path, Immich will not be able to find the data and will not write the data to the location you specified.
|
||||
:::
|
||||
4. Select the **Host Path** as the dataset you created for that folder, for example, `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, etc.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas/truenas10.webp').default}
|
||||
width="40%"
|
||||
alt="Use Multiple Datasets for Immich Storage with Additional Storage"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
</details>
|
||||
|
||||
<!-- A section for Labels could be added, but I don't think it is needed as they are of no use for Immich. -->
|
||||
|
||||
### Resources Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas09.webp').default}
|
||||
src={require('./img/truenas/truenas11.webp').default}
|
||||
width="40%"
|
||||
alt="Resource Limits"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
||||
- **CPU**: Depending on your system resources, you can keep the default value of `2` threads or specify a different number. Immich recommends at least `8` threads.
|
||||
|
||||
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
- **Memory**: Limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
|
||||
:::info Older TrueNAS Versions
|
||||
Before TrueNAS Community Edition version 24.10 Electric Eel:
|
||||
Both **CPU** and **Memory** are limits, not reservations. This means that Immich can use up to the specified amount of CPU threads and RAM, but it will not reserve that amount of resources at all times. The system will allocate resources as needed, and Immich will use less than the specified amount most of the time.
|
||||
|
||||
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
||||
- Enable **GPU Configuration** options if you have a GPU or CPU with integrated graphics that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md).
|
||||
|
||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||
:::
|
||||
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
The process for NVIDIA GPU passthrough requires additional steps.
|
||||
More details here: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
|
||||
### Install
|
||||
|
||||
Finally, click **Install**.
|
||||
The system opens the **Installed Applications** screen with the Immich app in the **Deploying** state.
|
||||
When the installation completes it changes to **Running**.
|
||||
When the installation completes, it changes to **Running**.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas04.webp').default}
|
||||
src={require('./img/truenas/truenas12.webp').default}
|
||||
width="100%"
|
||||
alt="Immich Installed"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click **Web Portal** on the **Application Info** widget to open the Immich web interface to set up your account and begin uploading photos.
|
||||
Click **Web Portal** on the **Application Info** widget, or go to the URL `http://<your-truenas-ip>:30041` in your web browser to open the Immich web interface. This will show you the onboarding process to set up your first user account, which will be an administrator account.
|
||||
|
||||
After that, you can start using Immich to upload and manage your photos and videos.
|
||||
|
||||
:::tip
|
||||
For more information on how to use the application once installed, please refer to the [Post Install](/docs/install/post-install.mdx) guide.
|
||||
@@ -228,23 +344,6 @@ For more information on how to use the application once installed, please refer
|
||||
- Click **Update** at the very bottom of the page to save changes.
|
||||
- TrueNAS automatically updates, recreates, and redeploys the Immich container with the updated settings.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
You can set [Environment Variables](/docs/install/environment-variables) by clicking **Add** on the **Additional Environment Variables** option and filling in the **Name** and **Value**.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas11.webp').default}
|
||||
width="40%"
|
||||
alt="Environment Variables"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info
|
||||
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
|
||||
## Updating the App
|
||||
|
||||
:::danger
|
||||
@@ -261,3 +360,116 @@ To update the app to the latest version:
|
||||
- You may view the Changelog.
|
||||
- Click **Upgrade** to begin the process and open a counter dialog that shows the upgrade progress.
|
||||
- When complete, the update badge and buttons disappear and the application Update state on the Installed screen changes from Update Available to Up to date.
|
||||
|
||||
## Migration
|
||||
|
||||
:::danger
|
||||
Perform a backup of your Immich data before proceeding with the migration steps below. This is crucial to prevent any data loss if something goes wrong during the migration process.
|
||||
|
||||
The migration should also be performed when the Immich app is not running to ensure no data is being written while you are copying the data.
|
||||
:::
|
||||
|
||||
### Migration from Old Storage Configuration
|
||||
|
||||
There are two ways to migrate from the old storage configuration to the new one, depending on whether you want to keep the old multiple datasets or if you want to move to a double dataset configuration with a single dataset for Immich data storage and a single dataset for Postgres data storage.
|
||||
|
||||
:::note Old TrueNAS Versions Permissions
|
||||
If you were using an older version of TrueNAS (before 24.10.2.2), the datasets, except the one for **pgData** had only to be owned by the `root` user (UID 0). You might have to add the **modify** permission to the `apps` user (UID 568) or the user you want to run Immich as, to all of them, except **pgData**. The steps to add or change ACL permissions are described in the [TrueNAS documentation](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/).
|
||||
:::
|
||||
|
||||
<Tabs groupId="truenas-migration-tabs">
|
||||
<TabItem value="migrate-new-dataset" label="Migrate data to a new dataset (recommended)" default>
|
||||
|
||||
To migrate from the old storage configuration to the new one, you will need to create a new dataset for the Immich data storage and copy the data from the old datasets to the new ones. The steps are as follows:
|
||||
|
||||
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are copying the data.
|
||||
2. **Create a new dataset** for the Immich data storage, for example, `data`. As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, create the dataset with the **Apps** preset to ensure the correct permissions are set.
|
||||
3. **Copy the data** from the old datasets to the new dataset. We advise using the `rsync` command to copy the data, as it will preserve the permissions and ownership of the files. The following commands are examples:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/tank/immich/library/ /mnt/tank/immich/data/library/
|
||||
rsync -av /mnt/tank/immich/upload/ /mnt/tank/immich/data/upload/
|
||||
rsync -av /mnt/tank/immich/thumbs/ /mnt/tank/immich/data/thumbs/
|
||||
rsync -av /mnt/tank/immich/profile/ /mnt/tank/immich/data/profile/
|
||||
rsync -av /mnt/tank/immich/video/ /mnt/tank/immich/data/encoded-video/
|
||||
rsync -av /mnt/tank/immich/backups/ /mnt/tank/immich/data/backups/
|
||||
```
|
||||
|
||||
Make sure to replace `/mnt/tank/immich/` with the correct path to your old datasets and `/mnt/tank/immich/data/` with the correct path to your new dataset.
|
||||
|
||||
:::tip
|
||||
If you were using **ixVolume (dataset created automatically by the system)** for Immich data storage, the path to the data should be `/mnt/.ix-apps/app_mounts/immich/`. You have to use this path instead of `/mnt/tank/immich/` in the `rsync` command above, for example:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/.ix-apps/app_mounts/immich/library/ /mnt/tank/immich/data/library/
|
||||
```
|
||||
|
||||
If you were also using an ixVolume for Postgres data storage, you also should, first create the pgData dataset, as described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, and then you can use the following command to copy the Postgres data:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/.ix-apps/app_mounts/immich/pgData/ /mnt/tank/immich/pgData/
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
:::warning
|
||||
Make sure that for each folder, the `.immich` file is copied as well, as it contains important metadata for Immich. If for some reason the `.immich` file is not copied, you can copy it manually with the `rsync` command, for example:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/tank/immich/library/.immich /mnt/tank/immich/data/library/
|
||||
```
|
||||
|
||||
Replace `library` with the name of the folder where you are copying the file.
|
||||
:::
|
||||
|
||||
4. **Update the permissions** as the permissions of the data that have been copied has been preserved, to ensure that the `apps` user (UID 568) has the correct permissions on all the copied data. If you just created the dataset with the **Apps** preset, from the TrueNAS web interface, go to the **Datasets** screen, select the **data** dataset, click on the **Edit** button next to **Permissions**, tick the "Apply permissions recursively" checkbox, and click **Save**. This will apply the correct permissions to all the copied data.
|
||||
5. **Update the Immich app** to use the new dataset:
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
- Click **Edit** on the **Application Info** widget.
|
||||
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
|
||||
- For the **Data Storage**, select **Host Path (Path that already exists on the system)** and then select the new dataset you created for Immich data storage, for example, `data`.
|
||||
- For the **Postgres Data Storage**, verify that it is still set to the dataset you created for Postgres data storage, for example, `pgData`.
|
||||
- Click **Update** at the bottom of the page to save changes.
|
||||
|
||||
6. **Start the Immich app** from the TrueNAS web interface.
|
||||
|
||||
This will recreate the Immich container with the new storage configuration and start the app.
|
||||
|
||||
If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data has been copied correctly by checking the Immich web interface and ensuring that all your photos and videos are still available. You may delete the old datasets, if you no longer need them, using the TrueNAS web interface.
|
||||
|
||||
If you were using **ixVolume (dataset created automatically by the system)** or folders for Immich data storage, you can delete the old datasets using the following commands:
|
||||
|
||||
```bash
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/library
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/uploads
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/thumbs
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/profile
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/video
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/backups
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="migrate-old-dataset" label="Keep the existing datasets">
|
||||
|
||||
To migrate from the old storage configuration to the new one without creating new datasets.
|
||||
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are updating the app.
|
||||
2. **Update the datasets permissions**: Ensure that the datasets used for Immich data storage (`library`, `upload`, `thumbs`, `profile`, `video`, `backups`) have the correct permissions set for the user who will run Immich. The user should have ***modify*** permissions on these datasets. The default user for Immich is `apps` (UID 568) and the default group is `apps` (GID 568). If you are using a different user, make sure to set the permissions accordingly. You can do this from the TrueNAS web interface by going to the **Datasets** screen, selecting each dataset, clicking on the **Edit** button next to **Permissions**, and adding the user with ***modify*** permissions.
|
||||
3. **Update the Immich app** to use the existing datasets:
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
- Click **Edit** on the **Application Info** widget.
|
||||
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
|
||||
- For the **Data Storage**, you can keep the **ixVolume (dataset created automatically by the system)** as no data will be directly written to it. We recommend selecting **Host Path (Path that already exists on the system)** and then select a **new** dataset you created for Immich data storage, for example, `data`.
|
||||
- For the **Postgres Data Storage**, keep **Host Path (Path that already exists on the system)** and then select the existing dataset you used for Postgres data storage, for example, `pgData`.
|
||||
- Following the instructions in the [Multiple Datasets for Immich Storage](#additional-storage-advanced-users) section, you can add, **for each old dataset**, a new Additional Storage with the following settings:
|
||||
- **Type**: `Host Path (Path that already exists on the system)`
|
||||
- **Mount Path**: `/data/<folder-name>` (e.g. `/data/library`)
|
||||
- **Host Path**: `/mnt/<your-pool-name>/<dataset-name>` (e.g. `/mnt/tank/immich/library`)
|
||||
:::danger Ensure using the correct paths names
|
||||
Make sure to replace `<folder-name>` with the actual name of the folder used by Immich: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. Also, replace `<your-pool-name>` and `<dataset-name>` with the actual names of your pool and dataset.
|
||||
:::
|
||||
- **Read Only**: Keep it unticked as Immich needs to write to these datasets.
|
||||
- Click **Update** at the bottom of the page to save changes.
|
||||
4. **Start the Immich app** from the TrueNAS web interface. This will recreate the Immich container with the new storage configuration and start the app. If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data is still available by checking the Immich web interface and ensuring that all your photos and videos are still accessible.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
@@ -75,7 +75,6 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||
|
||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||
|
||||
|
||||
1533
docs/package-lock.json
generated
@@ -7,7 +7,8 @@
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write .",
|
||||
"start": "docusaurus start --port 3005",
|
||||
"build": "docusaurus build",
|
||||
"copy:openapi": "jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"build": "npm run copy:openapi && docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
@@ -16,8 +17,9 @@
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "~3.7.0",
|
||||
"@docusaurus/preset-classic": "~3.7.0",
|
||||
"@docusaurus/core": "~3.8.0",
|
||||
"@docusaurus/preset-classic": "~3.8.0",
|
||||
"@docusaurus/theme-common": "~3.8.0",
|
||||
"@mdi/js": "^7.3.67",
|
||||
"@mdi/react": "^1.6.1",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
@@ -26,6 +28,7 @@
|
||||
"clsx": "^2.0.0",
|
||||
"docusaurus-lunr-search": "^3.3.2",
|
||||
"docusaurus-preset-openapi": "^0.7.5",
|
||||
"lunr": "^2.3.9",
|
||||
"postcss": "^8.4.25",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"raw-loader": "^4.0.2",
|
||||
@@ -35,7 +38,7 @@
|
||||
"url": "^0.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "~3.7.0",
|
||||
"@docusaurus/module-type-aliases": "~3.8.0",
|
||||
"@docusaurus/tsconfig": "^3.7.0",
|
||||
"@docusaurus/types": "^3.7.0",
|
||||
"prettier": "^3.2.4",
|
||||
@@ -57,6 +60,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,12 @@ const guides: CommunityGuidesProps[] = [
|
||||
description: 'Access Immich with an end-to-end encrypted connection.',
|
||||
url: 'https://meshnet.nordvpn.com/how-to/remote-files-media-access/immich-remote-access',
|
||||
},
|
||||
{
|
||||
title: 'Trust Self Signed Certificates with Immich - OAuth Setup',
|
||||
description:
|
||||
'Set up Certificate Authority trust with Immich, and your private OAuth2/OpenID service, while using a private CA for HTTPS commication.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/18614',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
|
||||
@@ -100,6 +100,11 @@ const projects: CommunityProjectProps[] = [
|
||||
description: 'Automatically optimize files uploaded to Immich in order to save storage space',
|
||||
url: 'https://github.com/miguelangel-nubla/immich-upload-optimizer',
|
||||
},
|
||||
{
|
||||
title: 'Immich Machine Learning Load Balancer',
|
||||
description: 'Speed up your machine learning by load balancing your requests to multiple computers',
|
||||
url: 'https://github.com/apetersson/immich_ml_balancer',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityProject({ title, description, url }: CommunityProjectProps): JSX.Element {
|
||||
|
||||
@@ -2,4 +2,23 @@
|
||||
|
||||
## TypeORM Upgrade
|
||||
|
||||
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
||||
In order to update to Immich to `v1.137.0` (or above), the application must be started at least once on a version in the range between `1.132.0` and `1.136.0`. Doing so will complete database schema upgrades that are required for `v1.137.0` (and above). After Immich has successfully updated to a version in this range, you can now attempt to update to v1.137.0 (or above). We recommend users upgrade to `1.132.0` since it does not have any other breaking changes.
|
||||
|
||||
## Inconsistent Media Location
|
||||
|
||||
:::caution
|
||||
This error is related to the location of media files _inside the container_. Never move files on the host system when you run into this error message.
|
||||
:::
|
||||
|
||||
Immich automatically tries to detect where your Immich data is located. On start up, it compares the detected media location with the file paths in the database and throws an Inconsistent Media Location error when they do not match.
|
||||
|
||||
To fix this issue, verify that the `IMMICH_MEDIA_LOCATION` environment variable and `UPLOAD_LOCATION` volume mount are in sync with the database paths.
|
||||
|
||||
If you would like to migrate from one media location to another, simply successfully start Immich on `v1.136.0` or later, then do the following steps:
|
||||
|
||||
1. Stop Immich
|
||||
2. Update `IMMICH_MEDIA_LOCATION` to the new location
|
||||
3. Update the right-hand side of the `UPLOAD_LOCATION` volume mount to the new location
|
||||
4. Start up Immich
|
||||
|
||||
After version `1.136.0`, Immich can detect when a media location has moved and will automatically update the database paths to keep them in sync.
|
||||
|
||||
@@ -85,6 +85,7 @@ import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.135.0': new Date(2025, 5, 18),
|
||||
'v1.133.0': new Date(2025, 4, 21),
|
||||
'v1.130.0': new Date(2025, 2, 25),
|
||||
'v1.127.0': new Date(2025, 1, 26),
|
||||
@@ -196,14 +197,6 @@ const roadmap: Item[] = [
|
||||
description: 'Automate tasks with workflows',
|
||||
getDateLabel: () => 'Planned for 2025',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiTableKey,
|
||||
iconColor: 'gray',
|
||||
title: 'Fine grained access controls',
|
||||
description: 'Granular access controls for users and api keys',
|
||||
getDateLabel: () => 'Planned for 2025',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiImageEdit,
|
||||
@@ -239,12 +232,26 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
{
|
||||
icon: mdiStar,
|
||||
iconColor: 'gold',
|
||||
title: '70,000 Stars',
|
||||
description: 'Reached 70K Stars on GitHub!',
|
||||
getDateLabel: withLanguage(new Date(2025, 6, 9)),
|
||||
},
|
||||
withRelease({
|
||||
icon: mdiTableKey,
|
||||
iconColor: 'gray',
|
||||
title: 'Fine grained access controls',
|
||||
description: 'Granular access controls for api keys',
|
||||
release: 'v1.135.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiCast,
|
||||
iconColor: 'aqua',
|
||||
title: 'Google Cast (web)',
|
||||
title: 'Google Cast (web and mobile)',
|
||||
description: 'Cast assets to Google Cast/Chromecast compatible devices',
|
||||
release: 'v1.133.0',
|
||||
release: 'v1.135.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiLockOutline,
|
||||
|
||||
5
docs/static/_redirects
vendored
@@ -1,4 +1,5 @@
|
||||
/docs /docs/overview/introduction 307
|
||||
/docs /docs/overview/welcome 307
|
||||
/docs/ /docs/overview/welcome 307
|
||||
/docs/mobile-app-beta-program /docs/features/mobile-app 307
|
||||
/docs/contribution-guidelines /docs/overview/support-the-project#contributing 307
|
||||
/docs/install /docs/install/docker-compose 307
|
||||
@@ -30,4 +31,4 @@
|
||||
/docs/guides/api-album-sync /docs/community-projects 307
|
||||
/docs/guides/remove-offline-files /docs/community-projects 307
|
||||
/milestones /roadmap 307
|
||||
/docs/overview/introduction /docs/overview/welcome 307
|
||||
/docs/overview/introduction /docs/overview/welcome 307
|
||||
|
||||
20
docs/static/archived-versions.json
vendored
@@ -1,4 +1,24 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.137.3",
|
||||
"url": "https://v1.137.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.137.2",
|
||||
"url": "https://v1.137.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.137.1",
|
||||
"url": "https://v1.137.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.137.0",
|
||||
"url": "https://v1.137.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.136.0",
|
||||
"url": "https://v1.136.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.135.3",
|
||||
"url": "https://v1.135.3.archive.immich.app"
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.16.0
|
||||
22.18.0
|
||||
|
||||
@@ -3,7 +3,6 @@ name: immich-e2e
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich-e2e-server
|
||||
command: ['./start.sh']
|
||||
image: immich-server:latest
|
||||
build:
|
||||
context: ../
|
||||
@@ -36,10 +35,10 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
||||
image: redis:6.2-alpine@sha256:7fe72c486b910f6b1a9769c937dad5d63648ddee82e056f47417542dd40825bb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:0e763a2383d56f90364fcd72767ac41400cd30d2627f407f7e7960c9f1923c21
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
1439
e2e/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.135.3",
|
||||
"version": "1.137.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -24,17 +24,18 @@
|
||||
"@immich/cli": "file:../cli",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.15.32",
|
||||
"@types/node": "^22.17.0",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
@@ -44,6 +45,7 @@
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.34.0",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -52,6 +54,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.16.0"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
ReactionType,
|
||||
createActivity as create,
|
||||
createAlbum,
|
||||
removeAssetFromAlbum,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -342,5 +343,36 @@ describe('/activities', () => {
|
||||
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
|
||||
it('should return empty list when asset is removed', async () => {
|
||||
const album3 = await createAlbum(
|
||||
{
|
||||
createAlbumDto: {
|
||||
albumName: 'Album 3',
|
||||
assetIds: [asset.id],
|
||||
},
|
||||
},
|
||||
{ headers: asBearerAuth(admin.accessToken) },
|
||||
);
|
||||
|
||||
await createActivity({ albumId: album3.id, assetId: asset.id, type: ReactionType.Like });
|
||||
|
||||
await removeAssetFromAlbum(
|
||||
{
|
||||
id: album3.id,
|
||||
bulkIdsDto: {
|
||||
ids: [asset.id],
|
||||
},
|
||||
},
|
||||
{ headers: asBearerAuth(admin.accessToken) },
|
||||
);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.query({ albumId: album.id })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(200);
|
||||
expect(body).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -470,7 +470,7 @@ describe('/albums', () => {
|
||||
.send({ ids: [asset.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Not found or no album.addAsset access'));
|
||||
expect(body).toEqual(errorDto.badRequest('Not found or no albumAsset.create access'));
|
||||
});
|
||||
|
||||
it('should add duplicate assets only once', async () => {
|
||||
@@ -599,7 +599,7 @@ describe('/albums', () => {
|
||||
.send({ ids: [user1Asset1.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Not found or no album.removeAsset access'));
|
||||
expect(body).toEqual(errorDto.badRequest('Not found or no albumAsset.delete access'));
|
||||
});
|
||||
|
||||
it('should remove duplicate assets only once', async () => {
|
||||
|
||||
@@ -20,7 +20,7 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase(['api_keys']);
|
||||
await utils.resetDatabase(['api_key']);
|
||||
});
|
||||
|
||||
describe('POST /api-keys', () => {
|
||||
|
||||
@@ -15,6 +15,7 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -40,6 +41,40 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -819,6 +854,30 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets', () => {
|
||||
it('should update date time original relatively', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ ids: [user1Assets[0].id], dateTimeRelative: -1441 });
|
||||
|
||||
expect(body).toEqual({});
|
||||
expect(status).toEqual(204);
|
||||
|
||||
const result = await request(app)
|
||||
.get(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send();
|
||||
|
||||
expect(result.body).toMatchObject({
|
||||
id: user1Assets[0].id,
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-19T01:10:00+00:00',
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
@@ -1190,6 +1249,411 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreateDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
import { LoginResponseDto, login, signUpAdmin } from '@immich/sdk';
|
||||
import { loginDto, signupDto } from 'src/fixtures';
|
||||
import { errorDto, loginResponseDto, signupResponseDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const { email, password } = signupDto.admin;
|
||||
|
||||
describe(`/auth/admin-sign-up`, () => {
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
});
|
||||
|
||||
describe('POST /auth/admin-sign-up', () => {
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(signupResponseDto.admin);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.alreadyHasAdmin);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('/auth/*', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase();
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
admin = await login({ loginCredentialDto: loginDto.admin });
|
||||
});
|
||||
|
||||
describe(`POST /auth/login`, () => {
|
||||
it('should reject an incorrect password', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email, password: 'incorrect' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.incorrectLogin);
|
||||
});
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(loginResponseDto.admin);
|
||||
|
||||
const token = body.accessToken;
|
||||
expect(token).toBeDefined();
|
||||
|
||||
const cookies = headers['set-cookie'];
|
||||
expect(cookies).toHaveLength(3);
|
||||
expect(cookies[0].split(';').map((item) => item.trim())).toEqual([
|
||||
`immich_access_token=${token}`,
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[1].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_auth_type=password',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'HttpOnly',
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
expect(cookies[2].split(';').map((item) => item.trim())).toEqual([
|
||||
'immich_is_authenticated=true',
|
||||
'Max-Age=34560000',
|
||||
'Path=/',
|
||||
expect.stringContaining('Expires='),
|
||||
'SameSite=Lax',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/validateToken', () => {
|
||||
it('should reject an invalid token', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/validateToken`).set('Authorization', 'Bearer 123');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.invalidToken);
|
||||
});
|
||||
|
||||
it('should accept a valid token', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/validateToken`)
|
||||
.send({})
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ authStatus: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/change-password', () => {
|
||||
it('should require the current password', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password: 'wrong-password', newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.wrongPassword);
|
||||
});
|
||||
|
||||
it('should change the password', async () => {
|
||||
const { status } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password, newPassword: 'Password1234' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
|
||||
await login({
|
||||
loginCredentialDto: {
|
||||
email: 'admin@immich.cloud',
|
||||
password: 'Password1234',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /auth/logout', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/auth/logout`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should logout the user', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/logout`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
createMemory,
|
||||
getMemory,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -17,7 +17,6 @@ describe('/memories', () => {
|
||||
let user: LoginResponseDto;
|
||||
let adminAsset: AssetMediaResponseDto;
|
||||
let userAsset1: AssetMediaResponseDto;
|
||||
let userAsset2: AssetMediaResponseDto;
|
||||
let userMemory: MemoryResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -25,10 +24,9 @@ describe('/memories', () => {
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
[adminAsset, userAsset1, userAsset2] = await Promise.all([
|
||||
[adminAsset, userAsset1] = await Promise.all([
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
userMemory = await createMemory(
|
||||
{
|
||||
@@ -43,121 +41,7 @@ describe('/memories', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('GET /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /memories', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/memories');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should validate data when type is on this day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: {},
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
errorDto.badRequest(['data.year must be a positive number', 'data.year must be an integer number']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
isSaved: false,
|
||||
memoryAt: expect.any(String),
|
||||
ownerId: user.userId,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, userAsset2.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({ id: userAsset1.id }),
|
||||
expect.objectContaining({ id: userAsset2.id }),
|
||||
]),
|
||||
});
|
||||
expect(body.assets).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/memories')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({
|
||||
type: 'on_this_day',
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021).toISOString(),
|
||||
assetIds: [userAsset1.id, adminAsset.id],
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: userAsset1.id })],
|
||||
});
|
||||
expect(body.assets).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/memories/${userMemory.id}`)
|
||||
@@ -176,22 +60,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/memories/${uuidDto.invalid}`).send({ isSaved: true });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}`)
|
||||
.send({ isSaved: true })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}`)
|
||||
@@ -218,23 +86,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('PUT /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -244,15 +95,6 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require asset access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/memories/${userMemory.id}/assets`)
|
||||
@@ -279,23 +121,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [userAsset1.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}/assets`)
|
||||
.send({ ids: [userAsset1.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -305,15 +130,6 @@ describe('/memories', () => {
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require a valid asset id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should only remove assets in the memory', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}/assets`)
|
||||
@@ -340,21 +156,6 @@ describe('/memories', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /memories/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/memories/${uuidDto.invalid}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/memories/${userMemory.id}`)
|
||||
|
||||
@@ -227,6 +227,21 @@ describe(`/oauth`, () => {
|
||||
expect(user.storageLabel).toBe('user-username');
|
||||
});
|
||||
|
||||
it('should set the admin status from a role claim', async () => {
|
||||
const callbackParams = await loginWithOAuth(OAuthUser.WITH_ROLE);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-role@immich.app',
|
||||
isAdmin: true,
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.isAdmin).toBe(true);
|
||||
});
|
||||
|
||||
it('should work with RS256 signed tokens', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, shareUrl, utils } from 'src/utils';
|
||||
import { app, asBearerAuth, baseUrl, shareUrl, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
@@ -78,6 +78,7 @@ describe('/shared-links', () => {
|
||||
type: SharedLinkType.Album,
|
||||
albumId: metadataAlbum.id,
|
||||
showMetadata: true,
|
||||
slug: 'metadata-album',
|
||||
}),
|
||||
utils.createSharedLink(user1.accessToken, {
|
||||
type: SharedLinkType.Album,
|
||||
@@ -117,6 +118,13 @@ describe('/shared-links', () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="http://127.0.0.1:2285`);
|
||||
});
|
||||
|
||||
it('should fall back to my.immich.app og:image meta tag for shared asset if Host header is not present', async () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`).set('Host', '');
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="https://my.immich.app`);
|
||||
});
|
||||
|
||||
@@ -131,6 +139,17 @@ describe('/shared-links', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /s/:slug', () => {
|
||||
it('should work for slug auth', async () => {
|
||||
const resp = await request(baseUrl).get(`/s/${linkWithMetadata.slug}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(
|
||||
`<meta name="description" content="${metadataAlbum.assets.length} shared photos & videos" />`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /shared-links', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/shared-links');
|
||||
|
||||
@@ -15,12 +15,6 @@ describe('/system-config', () => {
|
||||
});
|
||||
|
||||
describe('PUT /system-config', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/system-config');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should always return the new config', async () => {
|
||||
const config = await getSystemConfig(admin.accessToken);
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ describe('/tags', () => {
|
||||
beforeEach(async () => {
|
||||
// tagging assets eventually triggers metadata extraction which can impact other tests
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
await utils.resetDatabase(['tags']);
|
||||
await utils.resetDatabase(['tag']);
|
||||
});
|
||||
|
||||
describe('POST /tags', () => {
|
||||
|
||||
@@ -1,230 +0,0 @@
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
AssetVisibility,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
TimeBucketAssetResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
// TODO this should probably be a test util function
|
||||
const today = DateTime.fromObject({
|
||||
year: 2023,
|
||||
month: 11,
|
||||
day: 3,
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
describe('/timeline', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
[user, timeBucketUser] = await Promise.all([
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('1')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
]);
|
||||
|
||||
user1Assets = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken, {
|
||||
isFavorite: true,
|
||||
fileCreatedAt: yesterday.toISO(),
|
||||
fileModifiedAt: yesterday.toISO(),
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
user2Assets = await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-12').toISOString() }),
|
||||
]);
|
||||
|
||||
await utils.deleteAssets(timeBucketUser.accessToken, [user2Assets[4].id]);
|
||||
});
|
||||
|
||||
describe('GET /timeline/buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/buckets');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ count: 3, timeBucket: '1970-02-01' },
|
||||
{ count: 1, timeBucket: '1970-01-01' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: user1Assets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, visibility: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const req = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /timeline/bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||
timeBucket: '1900-01-01',
|
||||
});
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.query({ timeBucket: '012345-01-01' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
// it('should fail if time bucket is invalid', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/timeline/bucket')
|
||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||
// .query({ timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
// });
|
||||
|
||||
it('should return time bucket', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-10' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-01T00:00:00.000Z', isTrashed: true });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const timeBucket: TimeBucketAssetResponseDto = body;
|
||||
expect(timeBucket.isTrashed).toEqual([true]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -97,7 +97,7 @@ describe(`immich upload`, () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase(['assets', 'albums']);
|
||||
await utils.resetDatabase(['asset', 'album']);
|
||||
});
|
||||
|
||||
describe(`immich upload /path/to/file.jpg`, () => {
|
||||
|
||||
178
e2e/src/generate-date-tag-test-images.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
@@ -116,6 +116,7 @@ export const deviceDto = {
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
current: true,
|
||||
isPendingSyncReset: false,
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
},
|
||||
|
||||