mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 09:13:13 +03:00
Compare commits
392 Commits
v2.1.0
...
chore/orig
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
173904e387 | ||
|
|
42854cad56 | ||
|
|
1bcf28c062 | ||
|
|
62628dfcfa | ||
|
|
b11aecd184 | ||
|
|
116012f6f8 | ||
|
|
7594136050 | ||
|
|
bb341cc774 | ||
|
|
af1d4afb95 | ||
|
|
75b1ef2c57 | ||
|
|
1e37f7c8c8 | ||
|
|
a32f450059 | ||
|
|
b452ab463b | ||
|
|
79bed80226 | ||
|
|
6249996cdb | ||
|
|
a3f281caa3 | ||
|
|
7c19b0591f | ||
|
|
95c29a8aea | ||
|
|
d8ca210641 | ||
|
|
ab35afd3b1 | ||
|
|
65e4fdf98d | ||
|
|
fa43fae2a5 | ||
|
|
46afd6a101 | ||
|
|
46e1967760 | ||
|
|
922282b2b4 | ||
|
|
e3ab16a5bd | ||
|
|
08f320c801 | ||
|
|
e36261b552 | ||
|
|
c0a3b58bba | ||
|
|
f12f609038 | ||
|
|
1f6eb662e5 | ||
|
|
0c1fe35f2f | ||
|
|
e98a33cf9d | ||
|
|
d38305360c | ||
|
|
3e3ca4c104 | ||
|
|
93ec8b7ecf | ||
|
|
bf1d409be1 | ||
|
|
81edf0749f | ||
|
|
01f83ae964 | ||
|
|
5eec0dc981 | ||
|
|
ca4fd07656 | ||
|
|
7ce43b3824 | ||
|
|
ce00119926 | ||
|
|
fffee80e2f | ||
|
|
64cd4e96e3 | ||
|
|
955a3bfaa6 | ||
|
|
e699d8f170 | ||
|
|
13104d49cd | ||
|
|
2d5ec528d5 | ||
|
|
5226898184 | ||
|
|
dd4169876c | ||
|
|
8321c275b8 | ||
|
|
3d6c26350a | ||
|
|
db15e5e423 | ||
|
|
35d18da14a | ||
|
|
cb56a11f0b | ||
|
|
104fa09f69 | ||
|
|
66ae07ee39 | ||
|
|
939d2c8b27 | ||
|
|
2801a6e672 | ||
|
|
4742360469 | ||
|
|
b56fa62b32 | ||
|
|
ddbe485074 | ||
|
|
01310c6d86 | ||
|
|
512327ef69 | ||
|
|
8755cd59fd | ||
|
|
7694b342ed | ||
|
|
78553a0258 | ||
|
|
c1198b99b7 | ||
|
|
8b7b9ee394 | ||
|
|
d6b39a464d | ||
|
|
75d23fe135 | ||
|
|
c860809aa1 | ||
|
|
0498f6cb9d | ||
|
|
24e5dabb51 | ||
|
|
aecf064ec9 | ||
|
|
57be3ff8c7 | ||
|
|
99505f987e | ||
|
|
1e1c4ac9d2 | ||
|
|
d952b62053 | ||
|
|
9f3eeed091 | ||
|
|
1dbc20fd77 | ||
|
|
ba8df712c4 | ||
|
|
741d838f56 | ||
|
|
ec2fa6e308 | ||
|
|
b974ed5735 | ||
|
|
78457d9b89 | ||
|
|
5d043b435e | ||
|
|
9a403d5886 | ||
|
|
1a31faf1a2 | ||
|
|
edbdc14178 | ||
|
|
e7261a04e1 | ||
|
|
acded69adf | ||
|
|
45a0315606 | ||
|
|
3856d4053c | ||
|
|
8175b3b75b | ||
|
|
56e431226f | ||
|
|
f59417cc77 | ||
|
|
11cec56e80 | ||
|
|
810f22057c | ||
|
|
2152f20b6c | ||
|
|
a6c76e78d6 | ||
|
|
644a3bf090 | ||
|
|
42dd3315f8 | ||
|
|
3a694219bf | ||
|
|
d9fd52ea18 | ||
|
|
2a281e7906 | ||
|
|
5f987a95f5 | ||
|
|
edf577d7f7 | ||
|
|
5e482dabc6 | ||
|
|
76c73549ae | ||
|
|
271a42ac7f | ||
|
|
4462952564 | ||
|
|
38d4d1a573 | ||
|
|
d310c6f3cd | ||
|
|
c086a65fa8 | ||
|
|
7134dd29ca | ||
|
|
3e08953a43 | ||
|
|
58c3c7e26b | ||
|
|
237ddcb648 | ||
|
|
fbaeffd65c | ||
|
|
d64c339b4f | ||
|
|
69880ee165 | ||
|
|
15e00f82f0 | ||
|
|
ce82e27f4b | ||
|
|
eeee5147cc | ||
|
|
af22f9b014 | ||
|
|
1086f22166 | ||
|
|
e94eb5012f | ||
|
|
4dcc049465 | ||
|
|
d784d431d0 | ||
|
|
1200bfad13 | ||
|
|
f11bfb9581 | ||
|
|
074fdb2b96 | ||
|
|
f1f203719d | ||
|
|
f73ca9d9c0 | ||
|
|
ad3f4fb434 | ||
|
|
8001dedcbf | ||
|
|
07a39226c5 | ||
|
|
88e7e21683 | ||
|
|
2cefbf8ca3 | ||
|
|
4a6c50cd81 | ||
|
|
e0535e20e6 | ||
|
|
62580455af | ||
|
|
0e7e67efe1 | ||
|
|
2c54b506b3 | ||
|
|
8969b8bdb2 | ||
|
|
5186092faa | ||
|
|
4c9142308f | ||
|
|
bea5d4fd37 | ||
|
|
74c24bfa88 | ||
|
|
95834c68d9 | ||
|
|
09024c3558 | ||
|
|
137cb043ef | ||
|
|
edf21bae41 | ||
|
|
c958f9856d | ||
|
|
70ab8bc657 | ||
|
|
edde0f93ae | ||
|
|
896665bca9 | ||
|
|
e8e9e7830e | ||
|
|
4fd9e42ce5 | ||
|
|
337e3a8dac | ||
|
|
2dc81e28fc | ||
|
|
f915d4cc90 | ||
|
|
905f4375b0 | ||
|
|
0b3633db4f | ||
|
|
2f40f5aad8 | ||
|
|
2611e2ec20 | ||
|
|
433a3cd339 | ||
|
|
0b487897a4 | ||
|
|
d5c5bdffcb | ||
|
|
dea95ac2e6 | ||
|
|
9e2208b8dd | ||
|
|
6922a92b69 | ||
|
|
7a2c8e0662 | ||
|
|
787158247f | ||
|
|
b0a0b7c2e1 | ||
|
|
cb6d81771d | ||
|
|
8de6ec1a1b | ||
|
|
d27c01ef70 | ||
|
|
d6307b262f | ||
|
|
b2cbefe41e | ||
|
|
da5a72f6de | ||
|
|
45304f1211 | ||
|
|
a4e65a7ea8 | ||
|
|
dd393c8346 | ||
|
|
493cde9d55 | ||
|
|
7705c84b04 | ||
|
|
ce0172b8c1 | ||
|
|
718b3a7b52 | ||
|
|
8a73de018c | ||
|
|
d92df63f84 | ||
|
|
6c6b00067b | ||
|
|
9cc88ed2a6 | ||
|
|
4905bba694 | ||
|
|
853d19dc2d | ||
|
|
c935ae47d0 | ||
|
|
93ab42fa24 | ||
|
|
6913697ad1 | ||
|
|
a4ae86ce29 | ||
|
|
2c50f2e244 | ||
|
|
365abd8906 | ||
|
|
25fb43bbe3 | ||
|
|
125e8cee01 | ||
|
|
c15e9bfa72 | ||
|
|
35e188e6e7 | ||
|
|
3cc9dd126c | ||
|
|
aa69d89b9f | ||
|
|
29c14a3f58 | ||
|
|
0df70365d7 | ||
|
|
c34be73d81 | ||
|
|
f396e9e374 | ||
|
|
821a9d4691 | ||
|
|
cad654586f | ||
|
|
28eb1bc13c | ||
|
|
1e4779cf48 | ||
|
|
0647c22956 | ||
|
|
b8087b4fa2 | ||
|
|
d94cb9641b | ||
|
|
517c3e1d4c | ||
|
|
619de2a5e4 | ||
|
|
79d0e3e1ed | ||
|
|
f5ff36a1f8 | ||
|
|
b5efc9c16e | ||
|
|
1036076b0d | ||
|
|
c76324c611 | ||
|
|
0ddb92e1ec | ||
|
|
d08a520aa2 | ||
|
|
7bdf0f6c50 | ||
|
|
2b33a58448 | ||
|
|
b35f00f768 | ||
|
|
86cc7c3c73 | ||
|
|
5854cbbe97 | ||
|
|
ceb36a304d | ||
|
|
f5d7e5acca | ||
|
|
be15a84f9b | ||
|
|
32791e98c2 | ||
|
|
7ea443b3a9 | ||
|
|
c69786b039 | ||
|
|
5c7d5539ea | ||
|
|
3531856d1c | ||
|
|
4abaad548a | ||
|
|
857816bccc | ||
|
|
61a2c3ace3 | ||
|
|
e9038193db | ||
|
|
e19467eddd | ||
|
|
0cb96837d0 | ||
|
|
cbdfe08344 | ||
|
|
6229f9feb4 | ||
|
|
20f5a14d03 | ||
|
|
3f5cd48a59 | ||
|
|
4cb094e7ae | ||
|
|
57c8378ca7 | ||
|
|
b073f9b802 | ||
|
|
1a2e7d06cb | ||
|
|
217d719b0b | ||
|
|
cf75ad2f26 | ||
|
|
2286444158 | ||
|
|
b489bdf8d3 | ||
|
|
5e6087ea28 | ||
|
|
4ae7cadeae | ||
|
|
fdfb04d83c | ||
|
|
8273c822d7 | ||
|
|
12bb39a111 | ||
|
|
9098717c55 | ||
|
|
8d25f81bec | ||
|
|
52596255c8 | ||
|
|
106effca2e | ||
|
|
9676da27c9 | ||
|
|
3edcb180eb | ||
|
|
9f0b5790af | ||
|
|
e0c2cdddd4 | ||
|
|
74f2c10a5a | ||
|
|
fb97d9f4d9 | ||
|
|
f72bcc8a8f | ||
|
|
46a4dce16b | ||
|
|
62ed5fe27f | ||
|
|
8e3f6cdbbf | ||
|
|
d51b8c1cdf | ||
|
|
698531d6e0 | ||
|
|
44149d187f | ||
|
|
9e3b4ef3db | ||
|
|
ac0d646401 | ||
|
|
664a8fa499 | ||
|
|
3194538817 | ||
|
|
b0d427f8f9 | ||
|
|
02b29046b3 | ||
|
|
c666dc6c67 | ||
|
|
382481735a | ||
|
|
6bb1a9e083 | ||
|
|
3f03a88767 | ||
|
|
328380cfda | ||
|
|
65f29afb0f | ||
|
|
f721a62776 | ||
|
|
c73e3dacea | ||
|
|
78fb815cdb | ||
|
|
d9cddeb0f1 | ||
|
|
c4ff2ea6d5 | ||
|
|
b91b855473 | ||
|
|
7773d6d44f | ||
|
|
2129f889f5 | ||
|
|
221e0ef02f | ||
|
|
0a6b2ad26e | ||
|
|
719bf763e4 | ||
|
|
34bad1ce71 | ||
|
|
6164b027e2 | ||
|
|
d9a13dc8ac | ||
|
|
722dbfa11f | ||
|
|
f8afef0f9d | ||
|
|
3c8df55986 | ||
|
|
47436ad0ce | ||
|
|
9b58d5663a | ||
|
|
b6cebb3ece | ||
|
|
cb7e68a287 | ||
|
|
e196cac6f4 | ||
|
|
351c0d2a4d | ||
|
|
f4969694cd | ||
|
|
b334288529 | ||
|
|
834e52fda6 | ||
|
|
8c27ba3e52 | ||
|
|
cd8d66f5dd | ||
|
|
446f738c7d | ||
|
|
f19ad9726f | ||
|
|
65cac118ca | ||
|
|
efac8c6667 | ||
|
|
a70843e2b4 | ||
|
|
0b941d78c4 | ||
|
|
fc5fc58759 | ||
|
|
9bb2fc238a | ||
|
|
76f5036026 | ||
|
|
032de9ff2f | ||
|
|
c3a533ab40 | ||
|
|
dbd6dcb786 | ||
|
|
9dffbaea98 | ||
|
|
70bda45551 | ||
|
|
d9452e485c | ||
|
|
85e9ced68d | ||
|
|
04e2e42c88 | ||
|
|
bcfdb2f9df | ||
|
|
23a34bee6f | ||
|
|
6f31f27218 | ||
|
|
b102f94e97 | ||
|
|
becb56e1b1 | ||
|
|
05f174a180 | ||
|
|
476bb1cacd | ||
|
|
24fe62ff9d | ||
|
|
a390e44402 | ||
|
|
08f81eb3c6 | ||
|
|
13d33f834f | ||
|
|
58f9659cf6 | ||
|
|
e14d5fb277 | ||
|
|
06151ad173 | ||
|
|
0700758621 | ||
|
|
f26db8053b | ||
|
|
4836047e50 | ||
|
|
0979528a05 | ||
|
|
24a6757630 | ||
|
|
67f093f75b | ||
|
|
3174a27902 | ||
|
|
e7d6a066f8 | ||
|
|
73da80394e | ||
|
|
471cc74ff2 | ||
|
|
ca745d00ee | ||
|
|
3ea8d140a2 | ||
|
|
8b8012f89d | ||
|
|
4b7f851428 | ||
|
|
cc1cd299f3 | ||
|
|
3163afd24a | ||
|
|
95889a69c9 | ||
|
|
81554e5ad1 | ||
|
|
505e16c37c | ||
|
|
24bfdf3263 | ||
|
|
a23dfff6cf | ||
|
|
2919ee4c65 | ||
|
|
d0eae97037 | ||
|
|
9d639607c7 | ||
|
|
74a9be4a0e | ||
|
|
26e877cba7 | ||
|
|
7b7d91a5e1 | ||
|
|
b3055d2e94 | ||
|
|
f1e03d0022 | ||
|
|
9b5855f848 | ||
|
|
7d0228a159 | ||
|
|
c18df7ae25 | ||
|
|
72f5ca4420 | ||
|
|
02beb85642 | ||
|
|
1b62c2ef55 | ||
|
|
7ade8ad69a | ||
|
|
6b91b31dbc | ||
|
|
65f63be564 | ||
|
|
430af9a145 | ||
|
|
f3b0e8a5e6 |
@@ -29,6 +29,12 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {
|
||||
// https://github.com/devcontainers/features/issues/1466
|
||||
"moby": false
|
||||
}
|
||||
},
|
||||
"forwardPorts": [3000, 9231, 9230, 2283],
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
|
||||
@@ -21,6 +21,7 @@ services:
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
immich-machine-learning:
|
||||
|
||||
2
.github/.nvmrc
vendored
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.20.0
|
||||
24.11.1
|
||||
|
||||
2
.github/labeler.yml
vendored
2
.github/labeler.yml
vendored
@@ -31,7 +31,7 @@ documentation:
|
||||
🧠machine-learning:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- machine-learning/app/**
|
||||
- machine-learning/**
|
||||
|
||||
changelog:translation:
|
||||
- head-branch: ['^chore/translations$']
|
||||
|
||||
10
.github/mise.toml
vendored
Normal file
10
.github/mise.toml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter github --frozen-lockfile"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
197
.github/workflows/build-mobile.yml
vendored
197
.github/workflows/build-mobile.yml
vendored
@@ -1,12 +1,16 @@
|
||||
name: Build Mobile
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
environment:
|
||||
description: 'Target environment'
|
||||
required: true
|
||||
default: 'development'
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
@@ -16,6 +20,30 @@ on:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY:
|
||||
required: true
|
||||
IOS_CERTIFICATE_P12:
|
||||
required: true
|
||||
IOS_CERTIFICATE_PASSWORD:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
FASTLANE_TEAM_ID:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -34,10 +62,17 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
@@ -55,10 +90,17 @@ jobs:
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
@@ -123,7 +165,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
@@ -140,3 +182,150 @@ jobs:
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
build-sign-ios:
|
||||
name: Build and sign iOS
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Install Flutter dependencies
|
||||
working-directory: ./mobile
|
||||
run: flutter pub get
|
||||
|
||||
- name: Generate translation files
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform APIs
|
||||
run: make pigeon
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: '3.3'
|
||||
working-directory: ./mobile/ios
|
||||
|
||||
- name: Install CocoaPods dependencies
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
pod install
|
||||
|
||||
- name: Install Fastlane
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
gem install bundler
|
||||
bundle config set --local path 'vendor/bundle'
|
||||
bundle install
|
||||
|
||||
- name: Create API Key
|
||||
env:
|
||||
API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
API_KEY_CONTENT: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
mkdir -p ~/.appstoreconnect/private_keys
|
||||
echo "$API_KEY_CONTENT" | base64 --decode > ~/.appstoreconnect/private_keys/AuthKey_${API_KEY_ID}.p8
|
||||
|
||||
- name: Import Certificate and Provisioning Profiles
|
||||
env:
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
ENVIRONMENT: ${{ inputs.environment || 'development' }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Decode certificate
|
||||
echo "$IOS_CERTIFICATE_P12" | base64 --decode > certificate.p12
|
||||
|
||||
# Decode provisioning profiles based on environment
|
||||
if [[ "$ENVIRONMENT" == "development" ]]; then
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE" | base64 --decode > profile_dev.mobileprovision
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION" | base64 --decode > profile_dev_share.mobileprovision
|
||||
echo "$IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION" | base64 --decode > profile_dev_widget.mobileprovision
|
||||
ls -lh profile_dev*.mobileprovision
|
||||
else
|
||||
echo "$IOS_PROVISIONING_PROFILE" | base64 --decode > profile.mobileprovision
|
||||
echo "$IOS_PROVISIONING_PROFILE_SHARE_EXTENSION" | base64 --decode > profile_share.mobileprovision
|
||||
echo "$IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION" | base64 --decode > profile_widget.mobileprovision
|
||||
ls -lh profile*.mobileprovision
|
||||
fi
|
||||
|
||||
- name: Create keychain and import certificate
|
||||
env:
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Create keychain
|
||||
security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
|
||||
security set-keychain-settings -t 3600 -u build.keychain
|
||||
|
||||
# Import certificate
|
||||
security import certificate.p12 -k build.keychain -P "$CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
|
||||
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" build.keychain
|
||||
|
||||
# Verify certificate was imported
|
||||
security find-identity -v -p codesigning build.keychain
|
||||
|
||||
- name: Build and deploy to TestFlight
|
||||
env:
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
KEYCHAIN_NAME: build.keychain
|
||||
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
ENVIRONMENT: ${{ inputs.environment || 'development' }}
|
||||
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
|
||||
GITHUB_REF: ${{ github.ref }}
|
||||
working-directory: ./mobile/ios
|
||||
run: |
|
||||
# Only upload to TestFlight on main branch
|
||||
if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then
|
||||
if [[ "$ENVIRONMENT" == "development" ]]; then
|
||||
bundle exec fastlane gha_testflight_dev
|
||||
else
|
||||
bundle exec fastlane gha_release_prod
|
||||
fi
|
||||
else
|
||||
# Build only, no TestFlight upload for non-main branches
|
||||
bundle exec fastlane gha_build_only
|
||||
fi
|
||||
|
||||
- name: Clean up keychain
|
||||
if: always()
|
||||
run: |
|
||||
security delete-keychain build.keychain || true
|
||||
|
||||
- name: Upload IPA artifact
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: ios-release-ipa
|
||||
path: mobile/ios/Runner.ipa
|
||||
|
||||
11
.github/workflows/cache-cleanup.yml
vendored
11
.github/workflows/cache-cleanup.yml
vendored
@@ -18,14 +18,21 @@ jobs:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Cleanup
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_TOKEN: ${{ steps.token.outputs.token }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
26
.github/workflows/cli.yml
vendored
26
.github/workflows/cli.yml
vendored
@@ -29,15 +29,22 @@ jobs:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
@@ -64,13 +71,20 @@ jobs:
|
||||
needs: publish
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
@@ -91,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
2
.github/workflows/close-duplicates.yml
vendored
2
.github/workflows/close-duplicates.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:d8ae47cf2e6cf4e2559bd57a60b73674fe44f897cba2c2bddff2987a05be10a4
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:73a05fc805dfd3bd29bebc08442aedfec5c419c5ad3421ec73edc5647233891a
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
|
||||
15
.github/workflows/codeql-analysis.yml
vendored
15
.github/workflows/codeql-analysis.yml
vendored
@@ -43,14 +43,21 @@ jobs:
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +70,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/autobuild@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +83,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
28
.github/workflows/docker.yml
vendored
28
.github/workflows/docker.yml
vendored
@@ -22,10 +22,17 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
server:
|
||||
- 'server/**'
|
||||
@@ -58,6 +65,7 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
@@ -87,6 +95,7 @@ jobs:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
@@ -107,24 +116,23 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- device: cpu
|
||||
tag-suffix: ''
|
||||
- device: cuda
|
||||
tag-suffix: '-cuda'
|
||||
suffixes: '-cuda'
|
||||
platforms: linux/amd64
|
||||
- device: openvino
|
||||
tag-suffix: '-openvino'
|
||||
suffixes: '-openvino'
|
||||
platforms: linux/amd64
|
||||
- device: armnn
|
||||
tag-suffix: '-armnn'
|
||||
suffixes: '-armnn'
|
||||
platforms: linux/arm64
|
||||
- device: rknn
|
||||
tag-suffix: '-rknn'
|
||||
suffixes: '-rknn'
|
||||
platforms: linux/arm64
|
||||
- device: rocm
|
||||
tag-suffix: '-rocm'
|
||||
suffixes: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
@@ -138,7 +146,7 @@ jobs:
|
||||
dockerfile: machine-learning/Dockerfile
|
||||
platforms: ${{ matrix.platforms }}
|
||||
runner-mapping: ${{ matrix.runner-mapping }}
|
||||
tag-suffix: ${{ matrix.tag-suffix }}
|
||||
suffixes: ${{ matrix.suffixes }}
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
@@ -147,7 +155,7 @@ jobs:
|
||||
name: Build and Push Server
|
||||
needs: pre-job
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
24
.github/workflows/docs-build.yml
vendored
24
.github/workflows/docs-build.yml
vendored
@@ -20,10 +20,17 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
docs:
|
||||
- 'docs/**'
|
||||
@@ -46,16 +53,23 @@ jobs:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -71,7 +85,7 @@ jobs:
|
||||
run: pnpm build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
|
||||
64
.github/workflows/docs-deploy.yml
vendored
64
.github/workflows/docs-deploy.yml
vendored
@@ -5,6 +5,9 @@ on:
|
||||
types:
|
||||
- completed
|
||||
|
||||
env:
|
||||
TG_NON_INTERACTIVE: 'true'
|
||||
|
||||
jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
@@ -16,12 +19,19 @@ jobs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
@@ -42,6 +52,7 @@ jobs:
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
const isFork = context.payload.workflow_run.repository.fork;
|
||||
@@ -107,10 +118,20 @@ jobs:
|
||||
pull-requests: write
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup Mise
|
||||
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
@@ -118,6 +139,7 @@ jobs:
|
||||
env:
|
||||
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
const parameters = JSON.parse(process.env.PARAM_JSON);
|
||||
core.setOutput("event", parameters.event);
|
||||
@@ -129,6 +151,7 @@ jobs:
|
||||
env:
|
||||
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
let artifact = JSON.parse(process.env.ARTIFACT_JSON);
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
@@ -150,12 +173,8 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||
tg_command: 'apply'
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
@@ -165,20 +184,12 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||
tg_command: 'output -json'
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
env:
|
||||
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: |
|
||||
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
mise run //deployment:tf output -- -json | jq -r '
|
||||
"projectName=\(.pages_project_name.value)",
|
||||
"subdomain=\(.immich_app_branch_subdomain.value)"
|
||||
' >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
# TODO: Action is deprecated
|
||||
@@ -186,7 +197,7 @@ jobs:
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||
projectName: ${{ steps.docs-output.outputs.projectName }}
|
||||
workingDirectory: 'docs'
|
||||
directory: 'build'
|
||||
branch: ${{ steps.parameters.outputs.name }}
|
||||
@@ -199,19 +210,16 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||
tg_command: 'apply'
|
||||
working-directory: 'deployment/modules/cloudflare/docs-release'
|
||||
run: 'mise run //deployment:tf apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
body: |
|
||||
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||
📖 Documentation deployed to [${{ steps.docs-output.outputs.subdomain }}](https://${{ steps.docs-output.outputs.subdomain }})
|
||||
emojis: 'rocket'
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
|
||||
24
.github/workflows/docs-destroy.yml
vendored
24
.github/workflows/docs-destroy.yml
vendored
@@ -5,6 +5,9 @@ on:
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
TG_NON_INTERACTIVE: 'true'
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
@@ -13,10 +16,20 @@ jobs:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup Mise
|
||||
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
@@ -25,16 +38,13 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||
tg_command: 'destroy -refresh=false'
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run //deployment:tf destroy -- -refresh=false'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
|
||||
11
.github/workflows/fix-format.yml
vendored
11
.github/workflows/fix-format.yml
vendored
@@ -16,30 +16,30 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
run: pnpm --recursive install && pnpm run --recursive --parallel fix:format
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
@@ -51,6 +51,7 @@ jobs:
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
|
||||
18
.github/workflows/merge-translations.yml
vendored
18
.github/workflows/merge-translations.yml
vendored
@@ -28,11 +28,19 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate_token
|
||||
if: ${{ inputs.skip != true }}
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Find translation PR
|
||||
id: find_pr
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
@@ -55,14 +63,6 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Generate a token
|
||||
id: generate_token
|
||||
if: ${{ inputs.skip != true }}
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Lock weblate
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
|
||||
7
.github/workflows/pr-label-validation.yml
vendored
7
.github/workflows/pr-label-validation.yml
vendored
@@ -13,9 +13,16 @@ jobs:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
mode: exactly
|
||||
count: 1
|
||||
use_regex: true
|
||||
|
||||
8
.github/workflows/pr-labeler.yml
vendored
8
.github/workflows/pr-labeler.yml
vendored
@@ -11,4 +11,12 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
|
||||
34
.github/workflows/prepare-release.yml
vendored
34
.github/workflows/prepare-release.yml
vendored
@@ -49,26 +49,26 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -99,8 +99,23 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
environment: production
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -111,24 +126,25 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@aec2ec56f94eb8180ceec724245f64ef008b89f5 # v2.4.0
|
||||
uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
|
||||
18
.github/workflows/preview-label.yaml
vendored
18
.github/workflows/preview-label.yaml
vendored
@@ -13,10 +13,17 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
|
||||
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.build/'
|
||||
|
||||
remove-label:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -24,8 +31,15 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
@@ -37,11 +51,13 @@ jobs:
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'PRs from forks cannot have preview environments.'
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'Preview environment has been removed.'
|
||||
|
||||
170
.github/workflows/release-pr.yml
vendored
Normal file
170
.github/workflows/release-pr.yml
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
name: Manage release PR
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Determine release type
|
||||
id: bump-type
|
||||
uses: ietf-tools/semver-action@c90370b2958652d71c06a3484129a4d423a6d8a8 # v1.11.0
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Bump versions
|
||||
env:
|
||||
TYPE: ${{ steps.bump-type.outputs.bump }}
|
||||
run: |
|
||||
if [ "$TYPE" == "none" ]; then
|
||||
exit 1 # TODO: Is there a cleaner way to abort the workflow?
|
||||
fi
|
||||
misc/release/pump-version.sh -s $TYPE -m true
|
||||
|
||||
- name: Manage Outline release document
|
||||
id: outline
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
|
||||
NEXT_VERSION: ${{ steps.bump-type.outputs.next }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const outlineKey = process.env.OUTLINE_API_KEY;
|
||||
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9'
|
||||
const collectionId = 'e2910656-714c-4871-8721-447d9353bd73';
|
||||
const baseUrl = 'https://outline.immich.cloud';
|
||||
|
||||
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ parentDocumentId })
|
||||
});
|
||||
|
||||
if (!listResponse.ok) {
|
||||
throw new Error(`Outline list failed: ${listResponse.statusText}`);
|
||||
}
|
||||
|
||||
const listData = await listResponse.json();
|
||||
const allDocuments = listData.data || [];
|
||||
|
||||
const document = allDocuments.find(doc => doc.title === 'next');
|
||||
|
||||
let documentId;
|
||||
let documentUrl;
|
||||
let documentText;
|
||||
|
||||
if (!document) {
|
||||
// Create new document
|
||||
console.log('No existing document found. Creating new one...');
|
||||
const notesTmpl = fs.readFileSync('misc/release/notes.tmpl', 'utf8');
|
||||
const createResponse = await fetch(`${baseUrl}/api/documents.create`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title: 'next',
|
||||
text: notesTmpl,
|
||||
collectionId: collectionId,
|
||||
parentDocumentId: parentDocumentId,
|
||||
publish: true
|
||||
})
|
||||
});
|
||||
|
||||
if (!createResponse.ok) {
|
||||
throw new Error(`Failed to create document: ${createResponse.statusText}`);
|
||||
}
|
||||
|
||||
const createData = await createResponse.json();
|
||||
documentId = createData.data.id;
|
||||
const urlId = createData.data.urlId;
|
||||
documentUrl = `${baseUrl}/doc/next-${urlId}`;
|
||||
documentText = createData.data.text || '';
|
||||
console.log(`Created new document: ${documentUrl}`);
|
||||
} else {
|
||||
documentId = document.id;
|
||||
const docPath = document.url;
|
||||
documentUrl = `${baseUrl}${docPath}`;
|
||||
documentText = document.text || '';
|
||||
console.log(`Found existing document: ${documentUrl}`);
|
||||
}
|
||||
|
||||
// Generate GitHub release notes
|
||||
console.log('Generating GitHub release notes...');
|
||||
const releaseNotesResponse = await github.rest.repos.generateReleaseNotes({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
tag_name: `${process.env.NEXT_VERSION}`,
|
||||
});
|
||||
|
||||
// Combine the content
|
||||
const changelog = `
|
||||
# ${process.env.NEXT_VERSION}
|
||||
|
||||
${documentText}
|
||||
|
||||
${releaseNotesResponse.data.body}
|
||||
|
||||
---
|
||||
|
||||
`
|
||||
|
||||
const existingChangelog = fs.existsSync('CHANGELOG.md') ? fs.readFileSync('CHANGELOG.md', 'utf8') : '';
|
||||
fs.writeFileSync('CHANGELOG.md', changelog + existingChangelog, 'utf8');
|
||||
|
||||
core.setOutput('document_url', documentUrl);
|
||||
|
||||
- name: Create PR
|
||||
id: create-pr
|
||||
uses: peter-evans/create-pull-request@84ae59a2cdc2258d6fa0732dd66352dddae2a412 # v7.0.9
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
commit-message: 'chore: release ${{ steps.bump-type.outputs.next }}'
|
||||
title: 'chore: release ${{ steps.bump-type.outputs.next }}'
|
||||
body: 'Release notes: ${{ steps.outline.outputs.document_url }}'
|
||||
labels: 'changelog:skip'
|
||||
branch: 'release/next'
|
||||
draft: true
|
||||
148
.github/workflows/release.yml
vendored
Normal file
148
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
name: release.yml
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
paths:
|
||||
- CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
# Maybe double check PR source branch?
|
||||
|
||||
merge_translations:
|
||||
uses: ./.github/workflows/merge-translations.yml
|
||||
permissions:
|
||||
pull-requests: write
|
||||
secrets:
|
||||
PUSH_O_MATIC_APP_ID: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
PUSH_O_MATIC_APP_KEY: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
|
||||
|
||||
build_mobile:
|
||||
uses: ./.github/workflows/build-mobile.yml
|
||||
needs: merge_translations
|
||||
permissions:
|
||||
contents: read
|
||||
secrets:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}misc/release/notes.tmpl
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
with:
|
||||
ref: main
|
||||
environment: production
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_mobile
|
||||
permissions:
|
||||
actions: read # To download the app artifact
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@7e473efe3cb98aa54f8d4bac15400b15fad77d94 # v2.2.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
ref: main
|
||||
|
||||
- name: Extract changelog
|
||||
id: changelog
|
||||
run: |
|
||||
CHANGELOG_PATH=$RUNNER_TEMP/changelog.md
|
||||
sed -n '1,/^---$/p' CHANGELOG.md | head -n -1 > $CHANGELOG_PATH
|
||||
echo "path=$CHANGELOG_PATH" >> $GITHUB_OUTPUT
|
||||
VERSION=$(sed -n 's/^# //p' $CHANGELOG_PATH)
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
|
||||
with:
|
||||
tag_name: ${{ steps.version.outputs.result }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
body_path: ${{ steps.changelog.outputs.path }}
|
||||
draft: true
|
||||
files: |
|
||||
docker/docker-compose.yml
|
||||
docker/example.env
|
||||
docker/hwaccel.ml.yml
|
||||
docker/hwaccel.transcoding.yml
|
||||
docker/prometheus.yml
|
||||
*.apk
|
||||
|
||||
- name: Rename Outline document
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
continue-on-error: true
|
||||
env:
|
||||
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
|
||||
VERSION: ${{ steps.changelog.outputs.version }}
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
const outlineKey = process.env.OUTLINE_API_KEY;
|
||||
const version = process.env.VERSION;
|
||||
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9';
|
||||
const baseUrl = 'https://outline.immich.cloud';
|
||||
|
||||
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ parentDocumentId })
|
||||
});
|
||||
|
||||
if (!listResponse.ok) {
|
||||
throw new Error(`Outline list failed: ${listResponse.statusText}`);
|
||||
}
|
||||
|
||||
const listData = await listResponse.json();
|
||||
const allDocuments = listData.data || [];
|
||||
const document = allDocuments.find(doc => doc.title === 'next');
|
||||
|
||||
if (document) {
|
||||
console.log(`Found document 'next', renaming to '${version}'...`);
|
||||
|
||||
const updateResponse = await fetch(`${baseUrl}/api/documents.update`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${outlineKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
id: document.id,
|
||||
title: version
|
||||
})
|
||||
});
|
||||
|
||||
if (!updateResponse.ok) {
|
||||
throw new Error(`Failed to rename document: ${updateResponse.statusText}`);
|
||||
}
|
||||
} else {
|
||||
console.log('No document titled "next" found to rename');
|
||||
}
|
||||
13
.github/workflows/sdk.yml
vendored
13
.github/workflows/sdk.yml
vendored
@@ -16,15 +16,22 @@ jobs:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
20
.github/workflows/static_analysis.yml
vendored
20
.github/workflows/static_analysis.yml
vendored
@@ -19,10 +19,17 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
@@ -41,10 +48,17 @@ jobs:
|
||||
run:
|
||||
working-directory: ./mobile
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
@@ -58,7 +72,7 @@ jobs:
|
||||
- name: Install DCM
|
||||
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
version: auto
|
||||
working-directory: ./mobile
|
||||
|
||||
|
||||
217
.github/workflows/test.yml
vendored
217
.github/workflows/test.yml
vendored
@@ -16,10 +16,17 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/**'
|
||||
@@ -55,14 +62,22 @@ jobs:
|
||||
run:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -92,14 +107,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -132,14 +154,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -167,14 +196,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./web
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -204,14 +240,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./web
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -235,14 +278,21 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -276,14 +326,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -315,14 +372,22 @@ jobs:
|
||||
run:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -346,15 +411,22 @@ jobs:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -394,15 +466,22 @@ jobs:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -421,8 +500,16 @@ jobs:
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Run e2e tests (web)
|
||||
env:
|
||||
CI: true
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Archive test results
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: success() || failure()
|
||||
with:
|
||||
name: e2e-web-test-results-${{ matrix.runner }}
|
||||
path: e2e/playwright-report/
|
||||
success-check-e2e:
|
||||
name: End-to-End Tests Success
|
||||
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||
@@ -441,9 +528,16 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
@@ -466,11 +560,18 @@ jobs:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
@@ -502,14 +603,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./.github
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -525,9 +633,16 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
with:
|
||||
@@ -539,14 +654,21 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -594,14 +716,21 @@ jobs:
|
||||
run:
|
||||
working-directory: ./server
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
20
.github/workflows/weblate-lock.yml
vendored
20
.github/workflows/weblate-lock.yml
vendored
@@ -23,14 +23,20 @@ jobs:
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@5f91b52dfbb92b8d96ca411ab59c896cd59714ca # pre-job-action-v1.1.0
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
exclude-branches: 'chore/translations'
|
||||
- modified: 'i18n/!(en)**\.json'
|
||||
skip-force-logic: 'true'
|
||||
|
||||
enforce-lock:
|
||||
@@ -40,10 +46,16 @@ jobs:
|
||||
permissions: {}
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Bot review status
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.pull_request_review.pull_request.number }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_TOKEN: ${{ steps.token.outputs.token }}
|
||||
run: |
|
||||
# Then check for APPROVED by the bot, if absent fail
|
||||
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json reviews | jq -e '.reviews | map(select(.author.login == env.BOT_NAME and .state == "APPROVED")) | length > 0' \
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -52,7 +52,7 @@
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"eslint.validate": ["javascript", "typescript", "svelte"],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
|
||||
3
Makefile
3
Makefile
@@ -17,6 +17,9 @@ dev-docs:
|
||||
e2e:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
|
||||
|
||||
e2e-dev:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
|
||||
10
README.md
10
README.md
@@ -118,16 +118,16 @@ Read more about translations [here](https://docs.immich.app/developer/translatio
|
||||
|
||||
## Star history
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<a href="https://star-history.com/#immich-app/immich&type=date&legend=top-left">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<a href="https://github.com/immich-app/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.20.0
|
||||
24.11.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
FROM node:24.1.0-alpine3.20@sha256:8fe019e0d57dbdce5f5c27c0b63d2775cf34b00e3755a7dea969802d7e0c2b25 AS core
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY package* pnpm* .pnpmfile.cjs ./
|
||||
|
||||
29
cli/mise.toml
Normal file
29
cli/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter @immich/cli --frozen-lockfile"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite build"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vite"
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.97",
|
||||
"version": "2.2.103",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.18.8",
|
||||
"@types/node": "^24.10.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -28,7 +28,7 @@
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"eslint-plugin-unicorn": "^62.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.20.0"
|
||||
"node": "24.11.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -271,7 +271,7 @@ describe('startWatch', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should filger out ignored patterns', async () => {
|
||||
it('should filter out ignored patterns', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const ignoredPattern = 'ignored';
|
||||
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface UploadOptionsDto {
|
||||
dryRun?: boolean;
|
||||
skipHash?: boolean;
|
||||
delete?: boolean;
|
||||
deleteDuplicates?: boolean;
|
||||
album?: boolean;
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
@@ -70,10 +71,8 @@ const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
|
||||
}
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(
|
||||
newAssets.map(({ filepath }) => filepath),
|
||||
options,
|
||||
);
|
||||
|
||||
await deleteFiles(newAssets, duplicates, options);
|
||||
};
|
||||
|
||||
export const startWatch = async (
|
||||
@@ -406,28 +405,46 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
|
||||
return response.json();
|
||||
};
|
||||
|
||||
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
|
||||
if (!options.delete) {
|
||||
return;
|
||||
const deleteFiles = async (uploaded: Asset[], duplicates: Asset[], options: UploadOptionsDto): Promise<void> => {
|
||||
let fileCount = 0;
|
||||
if (options.delete) {
|
||||
fileCount += uploaded.length;
|
||||
}
|
||||
|
||||
if (options.deleteDuplicates) {
|
||||
fileCount += duplicates.length;
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
|
||||
console.log(`Would have deleted ${fileCount} local asset${s(fileCount)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (fileCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Deleting assets that have been uploaded...');
|
||||
|
||||
const deletionProgress = new SingleBar(
|
||||
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
deletionProgress.start(files.length, 0);
|
||||
deletionProgress.start(fileCount, 0);
|
||||
|
||||
const chunkDelete = async (files: Asset[]) => {
|
||||
for (const assetBatch of chunk(files, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((input: Asset) => unlink(input.filepath)));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
for (const assetBatch of chunk(files, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((input: string) => unlink(input)));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
if (options.delete) {
|
||||
await chunkDelete(uploaded);
|
||||
}
|
||||
|
||||
if (options.deleteDuplicates) {
|
||||
await chunkDelete(duplicates);
|
||||
}
|
||||
} finally {
|
||||
deletionProgress.stop();
|
||||
|
||||
@@ -8,6 +8,7 @@ import { serverInfo } from 'src/commands/server-info';
|
||||
import { version } from '../package.json';
|
||||
|
||||
const defaultConfigDirectory = path.join(os.homedir(), '.config/immich/');
|
||||
const defaultConcurrency = Math.max(1, os.cpus().length - 1);
|
||||
|
||||
const program = new Command()
|
||||
.name('immich')
|
||||
@@ -66,7 +67,7 @@ program
|
||||
.addOption(
|
||||
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
|
||||
.env('IMMICH_UPLOAD_CONCURRENCY')
|
||||
.default(4),
|
||||
.default(defaultConcurrency),
|
||||
)
|
||||
.addOption(
|
||||
new Option('-j, --json-output', 'Output detailed information in json format')
|
||||
@@ -74,6 +75,11 @@ program
|
||||
.default(false),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.addOption(
|
||||
new Option('--delete-duplicates', 'Delete local assets that are duplicates (already exist on server)').env(
|
||||
'IMMICH_DELETE_DUPLICATES',
|
||||
),
|
||||
)
|
||||
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||
.addOption(
|
||||
new Option('--watch', 'Watch for changes and upload automatically')
|
||||
|
||||
@@ -299,7 +299,7 @@ describe('crawl', () => {
|
||||
.map(([file]) => file);
|
||||
|
||||
// Compare file's content instead of path since a file can be represent in multiple ways.
|
||||
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
|
||||
expect(actual.map((path) => readContent(path)).toSorted()).toEqual(expected.toSorted());
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -160,7 +160,7 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
ignore: [`**/${exclusionPattern}`],
|
||||
});
|
||||
globbedFiles.push(...crawledFiles);
|
||||
return globbedFiles.sort();
|
||||
return globbedFiles.toSorted();
|
||||
};
|
||||
|
||||
export const sha1 = (filepath: string) => {
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"resolveJsonModule": true,
|
||||
"target": "es2022",
|
||||
"target": "es2023",
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"incremental": true,
|
||||
|
||||
20
deployment/mise.toml
Normal file
20
deployment/mise.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[tools]
|
||||
terragrunt = "0.93.10"
|
||||
opentofu = "1.10.7"
|
||||
|
||||
[tasks."tg:fmt"]
|
||||
run = "terragrunt hclfmt"
|
||||
description = "Format terragrunt files"
|
||||
|
||||
[tasks.tf]
|
||||
run = "terragrunt run --all"
|
||||
description = "Wrapper for terragrunt run-all"
|
||||
dir = "{{cwd}}"
|
||||
|
||||
[tasks."tf:fmt"]
|
||||
run = "tofu fmt -recursive tf/"
|
||||
description = "Format terraform files"
|
||||
|
||||
[tasks."tf:init"]
|
||||
run = { task = "tf init -- -reconfigure" }
|
||||
dir = "{{cwd}}"
|
||||
@@ -41,6 +41,7 @@ services:
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -122,7 +123,7 @@ services:
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
- ../machine-learning:/usr/src/app
|
||||
- ../machine-learning/immich_ml:/usr/src/immich_ml
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
@@ -134,7 +135,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
|
||||
image: docker.io/valkey/valkey:9@sha256:4503e204c900a00ad393bec83c8c7c4c76b0529cd629e23b34b52011aefd1d27
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
|
||||
image: docker.io/valkey/valkey:9@sha256:4503e204c900a00ad393bec83c8c7c4c76b0529cd629e23b34b52011aefd1d27
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -83,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
|
||||
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -95,7 +95,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
|
||||
image: grafana/grafana:12.3.0-ubuntu@sha256:cee936306135e1925ab21dffa16f8a411535d16ab086bef2309339a8e74d62df
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
|
||||
image: docker.io/valkey/valkey:9@sha256:4503e204c900a00ad393bec83c8c7c4c76b0529cd629e23b34b52011aefd1d27
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
@@ -9,8 +9,8 @@ DB_DATA_LOCATION=./postgres
|
||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
# TZ=Etc/UTC
|
||||
|
||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
# The Immich version to use. You can pin this to a specific version like "v2.1.0"
|
||||
IMMICH_VERSION=v2
|
||||
|
||||
# Connection secret for postgres. You should change it to a random password
|
||||
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.20.0
|
||||
24.11.1
|
||||
|
||||
@@ -133,9 +133,9 @@ There are a few different scenarios that can lead to this situation. The solutio
|
||||
The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc.,
|
||||
the job may not have run automatically the first time.
|
||||
|
||||
### How can I hide photos from the timeline?
|
||||
### How can I hide a photo or video from the timeline?
|
||||
|
||||
You can _archive_ them.
|
||||
You can _archive_ them. This will hide the asset from the main timeline and folder view, but it will still show up in searches. All archived assets can be found in the _Archive_ view
|
||||
|
||||
### How can I backup data from Immich?
|
||||
|
||||
|
||||
@@ -57,6 +57,7 @@ Then please follow the steps in the following section for restoring the database
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Backup'
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
@@ -69,16 +70,18 @@ docker compose create # Create Docker containers for Immich apps witho
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
# Check the database user if you deviated from the default
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
gunzip --stdout "/path/to/backup/dump.sql.gz" \
|
||||
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</TabItem>
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
```
|
||||
|
||||
@@ -92,13 +95,15 @@ docker compose create # Create Docker containers for
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
|
||||
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
|
||||
# If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
|
||||
# Replace <DB_USERNAME> with the database username - usually postgres unless you have changed it.
|
||||
|
||||
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
|
||||
exit # Exit the Docker shell
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.), in which case you need to delete the `DB_DATA_LOCATION` folder to reset the database.
|
||||
|
||||
18
docs/docs/administration/maintenance-mode.md
Normal file
18
docs/docs/administration/maintenance-mode.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Maintenance Mode
|
||||
|
||||
Maintenance mode is used to perform administrative tasks such as restoring backups to Immich.
|
||||
|
||||
You can enter maintenance mode by either:
|
||||
|
||||
- Selecting "enable maintenance mode" in system settings in administration.
|
||||
- Running the enable maintenance mode [administration command](./server-commands.md).
|
||||
|
||||
## Logging in during maintenance
|
||||
|
||||
Maintenance mode uses a separate login system which is handled automatically behind the scenes in most cases. Enabling maintenance mode in settings will automatically log you into maintenance mode when the server comes back up.
|
||||
|
||||
If you find that you've been logged out, you can:
|
||||
|
||||
- Open the logs for the Immich server and look for _"🚧 Immich is in maintenance mode, you can log in using the following URL:"_
|
||||
- Run the enable maintenance mode [administration command](./server-commands.md) again, this will give you a new URL to login with.
|
||||
- Run the disable maintenance mode [administration command](./server-commands.md) then re-enter through system settings.
|
||||
@@ -10,16 +10,19 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||
You must install pgvector as it is a prerequisite for VectorChord.
|
||||
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||
|
||||
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||
:::note Supported versions
|
||||
Immich is known to work with Postgres versions `>= 14, < 19`.
|
||||
|
||||
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
|
||||
VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
|
||||
|
||||
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
|
||||
The current accepted range for VectorChord is `>= 0.3, < 0.6`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
|
||||
@@ -6,6 +6,10 @@ Users can deploy a custom reverse proxy that forwards requests to Immich. This w
|
||||
Immich does not support being served on a sub-path such as `location /immich {`. It has to be served on the root path of a (sub)domain.
|
||||
:::
|
||||
|
||||
:::info
|
||||
If your reverse proxy uses the [Let's Encrypt](https://letsencrypt.org/) [http-01 challenge](https://letsencrypt.org/docs/challenge-types/#http-01-challenge), you may want to verify that the Immich well-known endpoint (`/.well-known/immich`) gets correctly routed to Immich, otherwise it will likely be routed elsewhere and the mobile app may run into connection issues.
|
||||
:::
|
||||
|
||||
### Nginx example config
|
||||
|
||||
Below is an example config for nginx. Make sure to set `public_url` to the front-facing URL of your instance, and `backend_url` to the path of the Immich server.
|
||||
@@ -37,29 +41,14 @@ server {
|
||||
location / {
|
||||
proxy_pass http://<backend_url>:2283;
|
||||
}
|
||||
|
||||
# useful when using Let's Encrypt http-01 challenge
|
||||
# location = /.well-known/immich {
|
||||
# proxy_pass http://<backend_url>:2283;
|
||||
# }
|
||||
}
|
||||
```
|
||||
|
||||
#### Compatibility with Let's Encrypt
|
||||
|
||||
In the event that your nginx configuration includes a section for Let's Encrypt, it's likely that you have a segment similar to the following:
|
||||
|
||||
```nginx
|
||||
location ~ /.well-known {
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
This particular `location` directive can inadvertently prevent mobile clients from reaching the `/.well-known/immich` path, which is crucial for discovery. Usual error message for this case is: "Your app major version is not compatible with the server". To remedy this, you should introduce an additional location block specifically for this path, ensuring that requests are correctly proxied to the Immich server:
|
||||
|
||||
```nginx
|
||||
location = /.well-known/immich {
|
||||
proxy_pass http://<backend_url>:2283;
|
||||
}
|
||||
```
|
||||
|
||||
By doing so, you'll maintain the functionality of Let's Encrypt while allowing mobile clients to access the necessary Immich path without obstruction.
|
||||
|
||||
### Caddy example config
|
||||
|
||||
As an alternative to nginx, you can also use [Caddy](https://caddyserver.com/) as a reverse proxy (with automatic HTTPS configuration). Below is an example config.
|
||||
|
||||
@@ -2,17 +2,19 @@
|
||||
|
||||
The `immich-server` docker image comes preinstalled with an administrative CLI (`immich-admin`) that supports the following commands:
|
||||
|
||||
| Command | Description |
|
||||
| ------------------------ | ------------------------------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| `change-media-location` | Change database file paths to align with a new media location |
|
||||
| Command | Description |
|
||||
| -------------------------- | ------------------------------------------------------------- |
|
||||
| `help` | Display help |
|
||||
| `reset-admin-password` | Reset the password for the admin user |
|
||||
| `disable-password-login` | Disable password login |
|
||||
| `enable-password-login` | Enable password login |
|
||||
| `disable-maintenance-mode` | Disable maintenance mode |
|
||||
| `enable-maintenance-mode` | Enable maintenance mode |
|
||||
| `enable-oauth-login` | Enable OAuth login |
|
||||
| `disable-oauth-login` | Disable OAuth login |
|
||||
| `list-users` | List Immich users |
|
||||
| `version` | Print Immich version |
|
||||
| `change-media-location` | Change database file paths to align with a new media location |
|
||||
|
||||
## How to run a command
|
||||
|
||||
@@ -47,6 +49,23 @@ immich-admin enable-password-login
|
||||
Password login has been enabled.
|
||||
```
|
||||
|
||||
Disable Maintenance Mode
|
||||
|
||||
```
|
||||
immich-admin disable-maintenace-mode
|
||||
Maintenance mode has been disabled.
|
||||
```
|
||||
|
||||
Enable Maintenance Mode
|
||||
|
||||
```
|
||||
immich-admin enable-maintenance-mode
|
||||
Maintenance mode has been enabled.
|
||||
|
||||
Log in using the following URL:
|
||||
https://my.immich.app/maintenance?token=<token>
|
||||
```
|
||||
|
||||
Enable OAuth login
|
||||
|
||||
```
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
# Community Guides
|
||||
|
||||
This page lists community guides that are written around Immich, but not officially supported by the development team.
|
||||
|
||||
:::warning
|
||||
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||
:::
|
||||
|
||||
import CommunityGuides from '../src/components/community-guides.tsx';
|
||||
import React from 'react';
|
||||
|
||||
<CommunityGuides />
|
||||
@@ -1,12 +0,0 @@
|
||||
# Community Projects
|
||||
|
||||
This page lists community projects that are built around Immich, but not officially supported by the development team.
|
||||
|
||||
:::warning
|
||||
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||
:::
|
||||
|
||||
import CommunityProjects from '../src/components/community-projects.tsx';
|
||||
import React from 'react';
|
||||
|
||||
<CommunityProjects />
|
||||
@@ -12,3 +12,13 @@ pnpm run migrations:generate <migration-name>
|
||||
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
||||
## Reverting a Migration
|
||||
|
||||
If you need to undo the most recently applied migration—for example, when developing or testing on schema changes—run:
|
||||
|
||||
```bash
|
||||
pnpm run migrations:revert
|
||||
```
|
||||
|
||||
This command rolls back the latest migration and brings the database schema back to its previous state.
|
||||
|
||||
@@ -256,7 +256,7 @@ The Dev Container supports multiple ways to run tests:
|
||||
|
||||
```bash
|
||||
# Run tests for specific components
|
||||
make test-server # Server unit tests
|
||||
make test-server # Server unit tests
|
||||
make test-web # Web unit tests
|
||||
make test-e2e # End-to-end tests
|
||||
make test-cli # CLI tests
|
||||
@@ -268,12 +268,13 @@ make test-all # Runs tests for all components
|
||||
make test-medium-dev # End-to-end tests
|
||||
```
|
||||
|
||||
#### Using NPM Directly
|
||||
#### Using PNPM Directly
|
||||
|
||||
```bash
|
||||
# Server tests
|
||||
cd /workspaces/immich/server
|
||||
pnpm test # Run all tests
|
||||
pnpm test # Run all tests
|
||||
pnpm run test:medium # Medium tests (integration tests)
|
||||
pnpm run test:watch # Watch mode
|
||||
pnpm run test:cov # Coverage report
|
||||
|
||||
@@ -293,21 +294,21 @@ pnpm run test:web # Run web UI tests
|
||||
```bash
|
||||
# Linting
|
||||
make lint-server # Lint server code
|
||||
make lint-web # Lint web code
|
||||
make lint-all # Lint all components
|
||||
make lint-web # Lint web code
|
||||
make lint-all # Lint all components
|
||||
|
||||
# Formatting
|
||||
make format-server # Format server code
|
||||
make format-web # Format web code
|
||||
make format-all # Format all code
|
||||
make format-web # Format web code
|
||||
make format-all # Format all code
|
||||
|
||||
# Type checking
|
||||
make check-server # Type check server
|
||||
make check-web # Type check web
|
||||
make check-all # Check all components
|
||||
make check-web # Type check web
|
||||
make check-all # Check all components
|
||||
|
||||
# Complete hygiene check
|
||||
make hygiene-all # Runs lint, format, check, SQL sync, and audit
|
||||
make hygiene-all # Run lint, format, check, SQL sync, and audit
|
||||
```
|
||||
|
||||
### Additional Make Commands
|
||||
@@ -315,21 +316,21 @@ make hygiene-all # Runs lint, format, check, SQL sync, and audit
|
||||
```bash
|
||||
# Build commands
|
||||
make build-server # Build server
|
||||
make build-web # Build web app
|
||||
make build-all # Build everything
|
||||
make build-web # Build web app
|
||||
make build-all # Build everything
|
||||
|
||||
# API generation
|
||||
make open-api # Generate OpenAPI specs
|
||||
make open-api # Generate OpenAPI specs
|
||||
make open-api-typescript # Generate TypeScript SDK
|
||||
make open-api-dart # Generate Dart SDK
|
||||
make open-api-dart # Generate Dart SDK
|
||||
|
||||
# Database
|
||||
make sql # Sync database schema
|
||||
make sql # Sync database schema
|
||||
|
||||
# Dependencies
|
||||
make install-server # Install server dependencies
|
||||
make install-web # Install web dependencies
|
||||
make install-all # Install all dependencies
|
||||
make install-server # Install server dependencies
|
||||
make install-web # Install web dependencies
|
||||
make install-all # Install all dependencies
|
||||
```
|
||||
|
||||
### Debugging
|
||||
|
||||
@@ -14,15 +14,15 @@ When contributing code through a pull request, please check the following:
|
||||
- [ ] `pnpm run check:typescript` (check typescript)
|
||||
- [ ] `pnpm test` (unit tests)
|
||||
|
||||
:::tip AIO
|
||||
Run all web checks with `pnpm run check:all`
|
||||
:::
|
||||
|
||||
## Documentation
|
||||
|
||||
- [ ] `pnpm run format` (formatting via Prettier)
|
||||
- [ ] Update the `_redirects` file if you have renamed a page or removed it from the documentation.
|
||||
|
||||
:::tip AIO
|
||||
Run all web checks with `pnpm run check:all`
|
||||
:::
|
||||
|
||||
## Server Checks
|
||||
|
||||
- [ ] `pnpm run lint` (linting via ESLint)
|
||||
|
||||
@@ -5,7 +5,7 @@ sidebar_position: 2
|
||||
# Setup
|
||||
|
||||
:::note
|
||||
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can:
|
||||
If there's a feature you're planning to work on, just give us a heads up in [#contributing](https://discord.com/channels/979116623879368755/1071165397228855327) on [our Discord](https://discord.immich.app) so we can:
|
||||
|
||||
1. Let you know if it's something we would accept into Immich
|
||||
2. Provide any guidance on how something like that would ideally be implemented
|
||||
|
||||
@@ -18,6 +18,7 @@ make e2e
|
||||
Before you can run the tests, you need to run the following commands _once_:
|
||||
|
||||
- `pnpm install` (in `e2e/`)
|
||||
- `pnpm run build` (in `cli/`)
|
||||
- `make open-api` (in the project root `/`)
|
||||
|
||||
Once the test environment is running, the e2e tests can be run via:
|
||||
|
||||
@@ -103,6 +103,7 @@ Options:
|
||||
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||
-j, --json-output Output detailed information in json format (default: false, env: IMMICH_JSON_OUTPUT)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--delete-duplicates Delete local assets that are duplicates (already exist on server) (env: IMMICH_DELETE_DUPLICATES)
|
||||
--no-progress Hide progress bars (env: IMMICH_PROGRESS_BAR)
|
||||
--watch Watch for changes and upload automatically (default: false, env: IMMICH_WATCH_CHANGES)
|
||||
--help display help for command
|
||||
@@ -182,7 +183,7 @@ For example to get a list of files that would be uploaded for further
|
||||
processing:
|
||||
|
||||
```bash
|
||||
immich upload --dry-run . | tail -n +4 | jq .newFiles[]
|
||||
immich upload --dry-run . | tail -n +6 | jq .newFiles[]
|
||||
```
|
||||
|
||||
### Obtain the API Key
|
||||
|
||||
@@ -54,9 +54,25 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
#### OpenVINO
|
||||
|
||||
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
||||
- Ensure the server's kernel version is new enough to use the device for hardware accceleration.
|
||||
- Ensure the server's kernel version is new enough to use the device for hardware acceleration.
|
||||
- Expect higher RAM usage when using OpenVINO compared to CPU processing.
|
||||
|
||||
#### OpenVINO-WSL
|
||||
|
||||
- Ensure your container can access the /dev/dri directory, you can verify this by doing `docker exec -t immich_machine_learning ls -la /dev/dri`. If this is not the case execute `getent group render` and `getent group video` on the WSL host, then add those groups to hwaccel.ml.yaml
|
||||
```yaml
|
||||
openvino-wsl:
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
- /dev/dxg:/dev/dxg
|
||||
volumes:
|
||||
- /dev/bus/usb:/dev/bus/usb
|
||||
- /usr/lib/wsl:/usr/lib/wsl
|
||||
group_add:
|
||||
- 44 # Replace this number with the number you found with getent group video
|
||||
- 992 # Replace this number with the number you found with getent group render
|
||||
```
|
||||
|
||||
#### RKNN
|
||||
|
||||
- You must have a supported Rockchip SoC: only RK3566, RK3568, RK3576 and RK3588 are supported at this moment.
|
||||
|
||||
@@ -3,7 +3,6 @@ import { mdiCloudOffOutline, mdiCloudCheckOutline } from '@mdi/js';
|
||||
import MobileAppDownload from '/docs/partials/_mobile-app-download.md';
|
||||
import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
|
||||
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||
import { cloudDonePath, cloudOffPath } from '@site/src/components/svg-paths';
|
||||
|
||||
# Mobile App
|
||||
|
||||
@@ -11,6 +10,16 @@ import { cloudDonePath, cloudOffPath } from '@site/src/components/svg-paths';
|
||||
|
||||
<MobileAppDownload />
|
||||
|
||||
:::info Android verification
|
||||
Below are the SHA-256 fingerprints for the certificates signing the android applications.
|
||||
|
||||
- Playstore / Github releases:
|
||||
`86:C5:C4:55:DF:AF:49:85:92:3A:8F:35:AD:B3:1D:0C:9E:0B:95:7D:7F:94:C2:D2:AF:6A:24:38:AA:96:00:20`
|
||||
- F-Droid releases:
|
||||
`FA:8B:43:95:F4:A6:47:71:A0:53:D1:C7:57:73:5F:A2:30:13:74:F5:3D:58:0D:D1:75:AA:F7:A1:35:72:9C:BF`
|
||||
|
||||
:::
|
||||
|
||||
:::info Beta Program
|
||||
The beta release channel allows users to test upcoming changes before they are officially released. To join the channel use the links below.
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ You can read this guide to learn more about [partner sharing](/features/partner-
|
||||
|
||||
## Public sharing
|
||||
|
||||
You can create a public link to share a group of photos or videos, or an album, with anyone. The public link can be shared via email, social media, or any other method. There are a varierity of options to customize the public link, such as setting an expiration date, password protection, and more. Public shared link is handy when you want to share a group of photos or videos with someone who doesn't have an Immich account and allow the shared user to upload their photos or videos to your account.
|
||||
You can create a public link to share a group of photos or videos, or an album, with anyone. The public link can be shared via email, social media, or any other method. There are a variety of options to customize the public link, such as setting an expiration date, password protection, and more. Public shared link is handy when you want to share a group of photos or videos with someone who doesn't have an Immich account and allow the shared user to upload their photos or videos to your account.
|
||||
|
||||
The public shared link is generated with a random URL, which acts as as a secret to avoid the link being guessed by unwanted parties, for instance.
|
||||
|
||||
|
||||
@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
|
||||
|
||||
```sql title="Count by tag"
|
||||
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
```sql title="Count by tag (per user)"
|
||||
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
@@ -37,7 +37,7 @@ In the Immich web UI:
|
||||
<img src={require('./img/create-external-library.webp').default} width="50%" title="Create Library button" />
|
||||
|
||||
- In the dialog, select which user should own the new library
|
||||
<img src={require('./img/library-owner.webp').default} width="50%" title="Library owner diaglog" />
|
||||
<img src={require('./img/library-owner.webp').default} width="50%" title="Library owner dialog" />
|
||||
|
||||
- Click the three-dots menu and select **Edit Import Paths**
|
||||
<img src={require('./img/edit-import-paths.webp').default} width="50%" title="Edit Import Paths menu option" />
|
||||
|
||||
@@ -16,48 +16,76 @@ The default configuration looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"ffmpeg": {
|
||||
"crf": 23,
|
||||
"threads": 0,
|
||||
"preset": "ultrafast",
|
||||
"targetVideoCodec": "h264",
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"targetAudioCodec": "aac",
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"targetResolution": "720",
|
||||
"maxBitrate": "0",
|
||||
"bframes": -1,
|
||||
"refs": 0,
|
||||
"gopSize": 0,
|
||||
"temporalAQ": false,
|
||||
"cqMode": "auto",
|
||||
"twoPass": false,
|
||||
"preferredHwDevice": "auto",
|
||||
"transcode": "required",
|
||||
"tonemap": "hable",
|
||||
"accel": "disabled",
|
||||
"accelDecode": false
|
||||
},
|
||||
"backup": {
|
||||
"database": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 02 * * *",
|
||||
"enabled": true,
|
||||
"keepLastAmount": 14
|
||||
}
|
||||
},
|
||||
"ffmpeg": {
|
||||
"accel": "disabled",
|
||||
"accelDecode": false,
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"bframes": -1,
|
||||
"cqMode": "auto",
|
||||
"crf": 23,
|
||||
"gopSize": 0,
|
||||
"maxBitrate": "0",
|
||||
"preferredHwDevice": "auto",
|
||||
"preset": "ultrafast",
|
||||
"refs": 0,
|
||||
"targetAudioCodec": "aac",
|
||||
"targetResolution": "720",
|
||||
"targetVideoCodec": "h264",
|
||||
"temporalAQ": false,
|
||||
"threads": 0,
|
||||
"tonemap": "hable",
|
||||
"transcode": "required",
|
||||
"twoPass": false
|
||||
},
|
||||
"image": {
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false,
|
||||
"fullsize": {
|
||||
"enabled": false,
|
||||
"format": "jpeg",
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"quality": 80,
|
||||
"size": 1440
|
||||
},
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"quality": 80,
|
||||
"size": 250
|
||||
}
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"smartSearch": {
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"metadataExtraction": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"ocr": {
|
||||
"concurrency": 1
|
||||
},
|
||||
"search": {
|
||||
"concurrency": 5
|
||||
@@ -65,20 +93,23 @@ The default configuration looks like this:
|
||||
"sidecar": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
"smartSearch": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"thumbnailGeneration": {
|
||||
"concurrency": 3
|
||||
},
|
||||
"videoConversion": {
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"cronExpression": "0 0 * * *",
|
||||
"enabled": true
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
@@ -86,8 +117,11 @@ The default configuration looks like this:
|
||||
"level": "log"
|
||||
},
|
||||
"machineLearning": {
|
||||
"enabled": true,
|
||||
"urls": ["http://immich-machine-learning:3003"],
|
||||
"availabilityChecks": {
|
||||
"enabled": true,
|
||||
"interval": 30000,
|
||||
"timeout": 2000
|
||||
},
|
||||
"clip": {
|
||||
"enabled": true,
|
||||
"modelName": "ViT-B-32__openai"
|
||||
@@ -96,27 +130,59 @@ The default configuration looks like this:
|
||||
"enabled": true,
|
||||
"maxDistance": 0.01
|
||||
},
|
||||
"enabled": true,
|
||||
"facialRecognition": {
|
||||
"enabled": true,
|
||||
"modelName": "buffalo_l",
|
||||
"minScore": 0.7,
|
||||
"maxDistance": 0.5,
|
||||
"minFaces": 3
|
||||
}
|
||||
"minFaces": 3,
|
||||
"minScore": 0.7,
|
||||
"modelName": "buffalo_l"
|
||||
},
|
||||
"ocr": {
|
||||
"enabled": true,
|
||||
"maxResolution": 736,
|
||||
"minDetectionScore": 0.5,
|
||||
"minRecognitionScore": 0.8,
|
||||
"modelName": "PP-OCRv5_mobile"
|
||||
},
|
||||
"urls": ["http://immich-machine-learning:3003"]
|
||||
},
|
||||
"map": {
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
|
||||
"enabled": true,
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
|
||||
},
|
||||
"metadata": {
|
||||
"faces": {
|
||||
"import": false
|
||||
}
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"nightlyTasks": {
|
||||
"clusterNewFaces": true,
|
||||
"databaseCleanup": true,
|
||||
"generateMemories": true,
|
||||
"missingThumbnails": true,
|
||||
"startTime": "00:00",
|
||||
"syncQuotaUsage": true
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"host": "",
|
||||
"ignoreCert": false,
|
||||
"password": "",
|
||||
"port": 587,
|
||||
"secure": false,
|
||||
"username": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"oauth": {
|
||||
"autoLaunch": false,
|
||||
"autoRegister": true,
|
||||
@@ -128,70 +194,44 @@ The default configuration looks like this:
|
||||
"issuerUrl": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
"mobileRedirectUri": "",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"roleClaim": "immich_role",
|
||||
"scope": "openid email profile",
|
||||
"signingAlgorithm": "RS256",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"storageLabelClaim": "preferred_username",
|
||||
"storageQuotaClaim": "immich_quota"
|
||||
"storageQuotaClaim": "immich_quota",
|
||||
"timeout": 30000,
|
||||
"tokenEndpointAuthMethod": "client_secret_post"
|
||||
},
|
||||
"passwordLogin": {
|
||||
"enabled": true
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": "",
|
||||
"publicUsers": true
|
||||
},
|
||||
"storageTemplate": {
|
||||
"enabled": false,
|
||||
"hashVerificationEnabled": true,
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"image": {
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"size": 250,
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"size": 1440,
|
||||
"quality": 80
|
||||
},
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"trash": {
|
||||
"enabled": true,
|
||||
"days": 30
|
||||
"templates": {
|
||||
"email": {
|
||||
"albumInviteTemplate": "",
|
||||
"albumUpdateTemplate": "",
|
||||
"welcomeTemplate": ""
|
||||
}
|
||||
},
|
||||
"theme": {
|
||||
"customCss": ""
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 0 * * *"
|
||||
},
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": ""
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"ignoreCert": false,
|
||||
"host": "",
|
||||
"port": 587,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
}
|
||||
"trash": {
|
||||
"days": 30,
|
||||
"enabled": true
|
||||
},
|
||||
"user": {
|
||||
"deleteDelay": 7
|
||||
|
||||
@@ -62,10 +62,10 @@ Information on the current workers can be found [here](/administration/jobs-work
|
||||
|
||||
## Ports
|
||||
|
||||
| Variable | Description | Default |
|
||||
| :------------ | :------------- | :----------------------------------------: |
|
||||
| `IMMICH_HOST` | Listening host | `0.0.0.0` |
|
||||
| `IMMICH_PORT` | Listening port | `2283` (server), `3003` (machine learning) |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :------------ | :------------- | :----------------------------------------: | :----------------------- |
|
||||
| `IMMICH_HOST` | Listening host | `0.0.0.0` | server, machine learning |
|
||||
| `IMMICH_PORT` | Listening port | `2283` (server), `3003` (machine learning) | server, machine learning |
|
||||
|
||||
## Database
|
||||
|
||||
@@ -80,7 +80,7 @@ Information on the current workers can be found [here](/administration/jobs-work
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | server |
|
||||
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | database |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
@@ -93,7 +93,7 @@ Information on the current workers can be found [here](/administration/jobs-work
|
||||
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||
|
||||
`DB_URL` must be in the format `postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename`.
|
||||
You can require SSL by adding `?sslmode=require` to the end of the `DB_URL` string, or require SSL and skip certificate verification by adding `?sslmode=require&sslmode=no-verify`.
|
||||
You can require SSL by adding `?sslmode=require` to the end of the `DB_URL` string, or require SSL and skip certificate verification by adding `?sslmode=require&uselibpqcompat=true`. This allows both immich and `pg_dumpall` (the utility used for database backups) to [properly connect](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string#tcp-connections) to your database.
|
||||
|
||||
When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSWORD` and `DB_DATABASE_NAME` database variables are ignored.
|
||||
|
||||
@@ -149,28 +149,31 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
|
||||
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ In the settings of your new project, set "**Project name**" to a name you'll rem
|
||||
|
||||

|
||||
|
||||
The following screen will give you the option to further customize your `docker-compose.yml` file. Take note of `DB_STORAGE_TYPE: 'HDD'`and uncomment if applicable for your Synology setup.
|
||||
The following screen will give you the option to further customize your `docker-compose.yml` file. Take note of `DB_STORAGE_TYPE: 'HDD'` and uncomment if applicable for your Synology setup.
|
||||
|
||||

|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ After making a backup, please modify your `docker-compose.yml` file with the fol
|
||||
If you deviated from the defaults of pg14 or pgvectors0.2.0, you must adjust the pg major version and pgvecto.rs version. If you are still using the default `docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0` image, you can just follow the changes above. For example, if the previous image is `docker.io/tensorchord/pgvecto-rs:pg16-v0.3.0`, the new image should be `ghcr.io/immich-app/postgres:16-vectorchord0.3.0-pgvectors0.3.0` instead of the image specified in the diff.
|
||||
:::
|
||||
|
||||
After making these changes, you can start Immich as normal. Immich will make some changes to the DB during startup, which can take seconds to minutes to finish, depending on hardware and library size. In particular, it’s normal for the server logs to be seemingly stuck at `Reindexing clip_index` and `Reindexing face_index`for some time if you have over 100k assets in Immich and/or Immich is on a relatively weak server. If you see these logs and there are no errors, just give it time.
|
||||
After making these changes, you can start Immich as normal. Immich will make some changes to the DB during startup, which can take seconds to minutes to finish, depending on hardware and library size. In particular, it’s normal for the server logs to be seemingly stuck at `Reindexing clip_index` and `Reindexing face_index` for some time if you have over 100k assets in Immich and/or Immich is on a relatively weak server. If you see these logs and there are no errors, just give it time.
|
||||
|
||||
:::danger
|
||||
After switching to VectorChord, you should not downgrade Immich below 1.133.0.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
The mobile app can be downloaded from the following places:
|
||||
|
||||
- Obtainium: You can get your Obtainium config link from the [Utilities page of your Immich server](https://my.immich.app/utilities).
|
||||
- [Google Play Store](https://play.google.com/store/apps/details?id=app.alextran.immich)
|
||||
- [Apple App Store](https://apps.apple.com/us/app/immich/id1613945652)
|
||||
- [F-Droid](https://f-droid.org/packages/app.alextran.immich)
|
||||
|
||||
25
docs/mise.toml
Normal file
25
docs/mise.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter documentation --frozen-lockfile"
|
||||
|
||||
[tasks.start]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus --port 3005"
|
||||
|
||||
[tasks.build]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = [
|
||||
"jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"docusaurus build",
|
||||
]
|
||||
|
||||
[tasks.preview]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "docusaurus serve"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
@@ -57,6 +57,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.20.0"
|
||||
"node": "24.11.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
import Link from '@docusaurus/Link';
|
||||
import React from 'react';
|
||||
|
||||
interface CommunityGuidesProps {
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
const guides: CommunityGuidesProps[] = [
|
||||
{
|
||||
title: 'Cloudflare Tunnels with SSO/OAuth',
|
||||
description: `Setting up Cloudflare Tunnels and a SaaS App for Immich.`,
|
||||
url: 'https://github.com/immich-app/immich/discussions/8299',
|
||||
},
|
||||
{
|
||||
title: 'Database backup in TrueNAS',
|
||||
description: `Create a database backup with pgAdmin in TrueNAS.`,
|
||||
url: 'https://github.com/immich-app/immich/discussions/8809',
|
||||
},
|
||||
{
|
||||
title: 'Unraid backup scripts',
|
||||
description: `Back up your assets in Unraid with a pre-prepared script.`,
|
||||
url: 'https://github.com/immich-app/immich/discussions/8416',
|
||||
},
|
||||
{
|
||||
title: 'Sync folders with albums',
|
||||
description: `synchronize folders in imported library with albums having the folders name.`,
|
||||
url: 'https://github.com/immich-app/immich/discussions/3382',
|
||||
},
|
||||
{
|
||||
title: 'Immich Podman Quadlets Handbook',
|
||||
description:
|
||||
'A rewrite of the original Immich Docker Compose file using Podman Quadlets, with a set of extra guides in the repository’s wiki.',
|
||||
url: 'https://github.com/linux-universe/immich-podman-quadlets/blob/main/README.md',
|
||||
},
|
||||
{
|
||||
title: 'Podman/Quadlets Install',
|
||||
description: 'Documentation for simple podman setup using quadlets.',
|
||||
url: 'https://github.com/tbelway/immich-podman-quadlets/blob/main/docs/install/podman-quadlet.md',
|
||||
},
|
||||
{
|
||||
title: 'Google Photos import + albums',
|
||||
description: 'Import your Google Photos files into Immich and add your albums.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/1340',
|
||||
},
|
||||
{
|
||||
title: 'Access Immich with custom domain',
|
||||
description: 'Access your local Immich installation over the internet using your own domain.',
|
||||
url: 'https://github.com/ppr88/immich-guides/blob/main/open-immich-custom-domain.md',
|
||||
},
|
||||
{
|
||||
title: 'Nginx caching map server',
|
||||
description: 'Increase privacy by using nginx as a caching proxy in front of a map tile server.',
|
||||
url: 'https://github.com/pcouy/pcouy.github.io/blob/main/_posts/2024-08-30-proxying-a-map-tile-server-for-increased-privacy.md',
|
||||
},
|
||||
{
|
||||
title: 'fail2ban setup instructions',
|
||||
description: 'How to configure an existing fail2ban installation to block incorrect login attempts.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/3243#discussioncomment-6681948',
|
||||
},
|
||||
{
|
||||
title: 'Immich remote access with NordVPN Meshnet',
|
||||
description: 'Access Immich with an end-to-end encrypted connection.',
|
||||
url: 'https://meshnet.nordvpn.com/how-to/remote-files-media-access/immich-remote-access',
|
||||
},
|
||||
{
|
||||
title: 'Trust Self Signed Certificates with Immich - OAuth Setup',
|
||||
description:
|
||||
'Set up Certificate Authority trust with Immich, and your private OAuth2/OpenID service, while using a private CA for HTTPS commication.',
|
||||
url: 'https://github.com/immich-app/immich/discussions/18614',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
return (
|
||||
<section className="flex flex-col gap-4 justify-between dark:bg-immich-dark-gray bg-immich-gray dark:border-0 border-gray-200 border border-solid rounded-2xl px-4 py-6">
|
||||
<div className="flex flex-col gap-2">
|
||||
<p className="m-0 items-start flex gap-2 text-2xl font-bold text-immich-primary dark:text-immich-dark-primary">
|
||||
<span>{title}</span>
|
||||
</p>
|
||||
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">{description}</p>
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300 my-4">
|
||||
<a href={url}>{url}</a>
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex">
|
||||
<Link
|
||||
className="px-4 py-2 bg-immich-primary/10 dark:bg-gray-300 rounded-xl text-sm hover:no-underline text-immich-primary dark:text-immich-dark-bg font-semibold"
|
||||
to={url}
|
||||
>
|
||||
View Guide
|
||||
</Link>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
export default function CommunityGuides(): JSX.Element {
|
||||
return (
|
||||
<div className="grid grid-cols-1 xl:grid-cols-2 gap-4">
|
||||
{guides.map((guides) => (
|
||||
<CommunityGuide {...guides} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
import Link from '@docusaurus/Link';
|
||||
import React from 'react';
|
||||
|
||||
interface CommunityProjectProps {
|
||||
title: string;
|
||||
description: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
const projects: CommunityProjectProps[] = [
|
||||
{
|
||||
title: 'immich-go',
|
||||
description: `An alternative to the immich-CLI that doesn't depend on nodejs. It specializes in importing Google Photos Takeout archives.`,
|
||||
url: 'https://github.com/simulot/immich-go',
|
||||
},
|
||||
{
|
||||
title: 'ImmichFrame',
|
||||
description: 'Run an Immich slideshow in a photo frame.',
|
||||
url: 'https://github.com/3rob3/ImmichFrame',
|
||||
},
|
||||
{
|
||||
title: 'API Album Sync',
|
||||
description: 'A Python script to sync folders as albums.',
|
||||
url: 'https://git.orenit.solutions/open/immichalbumpull',
|
||||
},
|
||||
{
|
||||
title: 'Immich-Tools',
|
||||
description: 'Provides scripts for handling problems on the repair page.',
|
||||
url: 'https://github.com/clumsyCoder00/Immich-Tools',
|
||||
},
|
||||
{
|
||||
title: 'Lightroom Publisher: mi.Immich.Publisher',
|
||||
description: 'Lightroom plugin to publish photos from Lightroom collections to Immich albums.',
|
||||
url: 'https://github.com/midzelis/mi.Immich.Publisher',
|
||||
},
|
||||
{
|
||||
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
|
||||
description:
|
||||
'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.',
|
||||
url: 'https://blog.fokuspunk.de/lrc-immich-plugin/',
|
||||
},
|
||||
{
|
||||
title: 'Immich-Tiktok-Remover',
|
||||
description: 'Script to search for and remove TikTok videos from your Immich library.',
|
||||
url: 'https://github.com/mxc2/immich-tiktok-remover',
|
||||
},
|
||||
{
|
||||
title: 'Immich Android TV',
|
||||
description: 'Unofficial Immich Android TV app.',
|
||||
url: 'https://github.com/giejay/Immich-Android-TV',
|
||||
},
|
||||
{
|
||||
title: 'Create albums from folders',
|
||||
description: 'A Python script to create albums based on the folder structure of an external library.',
|
||||
url: 'https://github.com/Salvoxia/immich-folder-album-creator',
|
||||
},
|
||||
{
|
||||
title: 'Powershell Module PSImmich',
|
||||
description: 'Powershell Module for the Immich API',
|
||||
url: 'https://github.com/hanpq/PSImmich',
|
||||
},
|
||||
{
|
||||
title: 'Immich Distribution',
|
||||
description: 'Snap package for easy install and zero-care auto updates of Immich. Self-hosted photo management.',
|
||||
url: 'https://immich-distribution.nsg.cc',
|
||||
},
|
||||
{
|
||||
title: 'Immich Kiosk',
|
||||
description: 'Lightweight slideshow to run on kiosk devices and browsers.',
|
||||
url: 'https://github.com/damongolding/immich-kiosk',
|
||||
},
|
||||
{
|
||||
title: 'Immich Power Tools',
|
||||
description: 'Power tools for organizing your immich library.',
|
||||
url: 'https://github.com/varun-raj/immich-power-tools',
|
||||
},
|
||||
{
|
||||
title: 'Immich Public Proxy',
|
||||
description:
|
||||
'Share your Immich photos and albums in a safe way without exposing your Immich instance to the public.',
|
||||
url: 'https://github.com/alangrainger/immich-public-proxy',
|
||||
},
|
||||
{
|
||||
title: 'Immich Kodi',
|
||||
description: 'Unofficial Kodi plugin for Immich.',
|
||||
url: 'https://github.com/vladd11/immich-kodi',
|
||||
},
|
||||
{
|
||||
title: 'Immich Downloader',
|
||||
description: 'Downloads a configurable number of random photos based on people or album ID.',
|
||||
url: 'https://github.com/jon6fingrs/immich-dl',
|
||||
},
|
||||
{
|
||||
title: 'Immich Upload Optimizer',
|
||||
description: 'Automatically optimize files uploaded to Immich in order to save storage space',
|
||||
url: 'https://github.com/miguelangel-nubla/immich-upload-optimizer',
|
||||
},
|
||||
{
|
||||
title: 'Immich Machine Learning Load Balancer',
|
||||
description: 'Speed up your machine learning by load balancing your requests to multiple computers',
|
||||
url: 'https://github.com/apetersson/immich_ml_balancer',
|
||||
},
|
||||
{
|
||||
title: 'Immich Drop Uploader',
|
||||
description: 'A tiny, zero-login web app for collecting photos/videos from anyone into your Immich server.',
|
||||
url: 'https://github.com/Nasogaa/immich-drop',
|
||||
},
|
||||
{
|
||||
title: 'Immich Birthday Sync',
|
||||
description: 'Bulk-upload and -download birthdays, with CardDAV sync support',
|
||||
url: 'https://github.com/sid3windr/immich-birthday',
|
||||
},
|
||||
{
|
||||
title: 'Immich Stack',
|
||||
description: 'Auto-stack photos with identical filenames and differing extensions (i.e. JPG+RAW)',
|
||||
url: 'https://github.com/sid3windr/immich-stack',
|
||||
},
|
||||
{
|
||||
title: 'Immich Stack',
|
||||
description: 'Automatically groups similar photos into stacks within the Immich photo management system.',
|
||||
url: 'https://github.com/Majorfi/immich-stack/',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityProject({ title, description, url }: CommunityProjectProps): JSX.Element {
|
||||
return (
|
||||
<section className="flex flex-col gap-4 justify-between dark:bg-immich-dark-gray bg-immich-gray dark:border-0 border-gray-200 border border-solid rounded-2xl px-4 py-6">
|
||||
<div className="flex flex-col gap-2">
|
||||
<p className="m-0 items-start flex gap-2 text-2xl font-bold text-immich-primary dark:text-immich-dark-primary">
|
||||
<span>{title}</span>
|
||||
</p>
|
||||
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300">{description}</p>
|
||||
<p className="m-0 text-sm text-gray-600 dark:text-gray-300 my-4">
|
||||
<a href={url}>{url}</a>
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex">
|
||||
<Link
|
||||
className="px-4 py-2 bg-immich-primary/10 dark:bg-gray-300 rounded-xl text-sm hover:no-underline text-immich-primary dark:text-immich-dark-bg font-semibold"
|
||||
to={url}
|
||||
>
|
||||
View Link
|
||||
</Link>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
}
|
||||
|
||||
export default function CommunityProjects(): JSX.Element {
|
||||
return (
|
||||
<div className="grid grid-cols-1 xl:grid-cols-2 gap-4">
|
||||
{projects.map((project) => (
|
||||
<CommunityProject {...project} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export const discordPath =
|
||||
'M81.15,0c-1.2376,2.1973-2.3489,4.4704-3.3591,6.794-9.5975-1.4396-19.3718-1.4396-28.9945,0-.985-2.3236-2.1216-4.5967-3.3591-6.794-9.0166,1.5407-17.8059,4.2431-26.1405,8.0568C2.779,32.5304-1.6914,56.3725.5312,79.8863c9.6732,7.1476,20.5083,12.603,32.0505,16.0884,2.6014-3.4854,4.8998-7.1981,6.8698-11.0623-3.738-1.3891-7.3497-3.1318-10.8098-5.1523.9092-.6567,1.7932-1.3386,2.6519-1.9953,20.281,9.547,43.7696,9.547,64.0758,0,.8587.7072,1.7427,1.3891,2.6519,1.9953-3.4601,2.0457-7.0718,3.7632-10.835,5.1776,1.97,3.8642,4.2683,7.5769,6.8698,11.0623,11.5419-3.4854,22.3769-8.9156,32.0509-16.0631,2.626-27.2771-4.496-50.9172-18.817-71.8548C98.9811,4.2684,90.1918,1.5659,81.1752.0505l-.0252-.0505ZM42.2802,65.4144c-6.2383,0-11.4159-5.6575-11.4159-12.6535s4.9755-12.6788,11.3907-12.6788,11.5169,5.708,11.4159,12.6788c-.101,6.9708-5.026,12.6535-11.3907,12.6535ZM84.3576,65.4144c-6.2637,0-11.3907-5.6575-11.3907-12.6535s4.9755-12.6788,11.3907-12.6788,11.4917,5.708,11.3906,12.6788c-.101,6.9708-5.026,12.6535-11.3906,12.6535Z';
|
||||
export const discordViewBox = '0 0 126.644 96';
|
||||
6
docs/static/_redirects
vendored
6
docs/static/_redirects
vendored
@@ -27,8 +27,10 @@
|
||||
/administration/password-login /administration/system-settings 307
|
||||
/features/search /features/searching 307
|
||||
/features/smart-search /features/searching 307
|
||||
/guides/api-album-sync /community-projects 307
|
||||
/guides/remove-offline-files /community-projects 307
|
||||
/guides/api-album-sync https://awesome.immich.app/ 307
|
||||
/guides/remove-offline-files https://awesome.immich.app/ 307
|
||||
/community-guides https://awesome.immich.app/ 307
|
||||
/community-projects https://awesome.immich.app/ 307
|
||||
/overview/introduction /overview/quick-start 307
|
||||
/overview/welcome /overview/quick-start 307
|
||||
/docs/* /:splat 307
|
||||
|
||||
24
docs/static/archived-versions.json
vendored
24
docs/static/archived-versions.json
vendored
@@ -1,4 +1,28 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.3.1",
|
||||
"url": "https://docs.v2.3.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.3.0",
|
||||
"url": "https://docs.v2.3.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.3",
|
||||
"url": "https://docs.v2.2.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.0",
|
||||
"url": "https://docs.v2.2.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.1.0",
|
||||
"url": "https://docs.v2.1.0.archive.immich.app"
|
||||
|
||||
1
e2e/.gitignore
vendored
1
e2e/.gitignore
vendored
@@ -4,3 +4,4 @@ node_modules/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/dist
|
||||
.env
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.20.0
|
||||
24.11.1
|
||||
|
||||
105
e2e/docker-compose.dev.yml
Normal file
105
e2e/docker-compose.dev.yml
Normal file
@@ -0,0 +1,105 @@
|
||||
name: immich-e2e
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich-e2e-server
|
||||
command: ['immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
target: dev
|
||||
environment:
|
||||
- DB_HOSTNAME=database
|
||||
- DB_USERNAME=postgres
|
||||
- DB_PASSWORD=postgres
|
||||
- DB_DATABASE_NAME=immich
|
||||
- IMMICH_MACHINE_LEARNING_ENABLED=false
|
||||
- IMMICH_TELEMETRY_INCLUDE=all
|
||||
- IMMICH_ENV=testing
|
||||
- IMMICH_PORT=2285
|
||||
- IMMICH_IGNORE_MOUNT_CHECK_ERRORS=true
|
||||
volumes:
|
||||
- ./test-assets:/test-assets
|
||||
- ..:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../plugins:/build/corePlugin
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
database:
|
||||
condition: service_healthy
|
||||
|
||||
immich-web:
|
||||
container_name: immich-e2e-web
|
||||
image: immich-web-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
target: dev
|
||||
command: ['immich-web']
|
||||
ports:
|
||||
- 2285:3000
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://immich-server:2285/
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
restart: unless-stopped
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: immich
|
||||
ports:
|
||||
- 5435:5432
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d immich']
|
||||
interval: 1s
|
||||
timeout: 5s
|
||||
retries: 30
|
||||
start_period: 10s
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
prometheus-data:
|
||||
grafana-data:
|
||||
pnpm-store:
|
||||
server-node_modules:
|
||||
web-node_modules:
|
||||
github-node_modules:
|
||||
cli-node_modules:
|
||||
docs-node_modules:
|
||||
e2e-node_modules:
|
||||
sdk-node_modules:
|
||||
app-node_modules:
|
||||
sveltekit:
|
||||
coverage:
|
||||
@@ -7,6 +7,9 @@ services:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
cache_from:
|
||||
- type=registry,ref=ghcr.io/immich-app/immich-server-build-cache:linux-amd64-cc099f297acd18c924b35ece3245215b53d106eb2518e3af6415931d055746cd-main
|
||||
- type=registry,ref=ghcr.io/immich-app/immich-server-build-cache:linux-arm64-cc099f297acd18c924b35ece3245215b53d106eb2518e3af6415931d055746cd-main
|
||||
args:
|
||||
- BUILD_ID=1234567890
|
||||
- BUILD_IMAGE=e2e
|
||||
@@ -35,7 +38,7 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
|
||||
29
e2e/mise.toml
Normal file
29
e2e/mise.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[tasks.install]
|
||||
run = "pnpm install --filter immich-e2e --frozen-lockfile"
|
||||
|
||||
[tasks.test]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "vitest --run"
|
||||
|
||||
[tasks."test-web"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "playwright test"
|
||||
|
||||
[tasks.format]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --check ."
|
||||
|
||||
[tasks."format-fix"]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "prettier --write ."
|
||||
|
||||
[tasks.lint]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "eslint \"src/**/*.ts\" --max-warnings 0"
|
||||
|
||||
[tasks."lint-fix"]
|
||||
run = { task = "lint --fix" }
|
||||
|
||||
[tasks.check]
|
||||
env._.path = "./node_modules/.bin"
|
||||
run = "tsc --noEmit"
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.1.0",
|
||||
"version": "2.3.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -20,21 +20,23 @@
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@faker-js/faker": "^10.1.0",
|
||||
"@immich/cli": "file:../cli",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.18.8",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"dotenv": "^17.2.3",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"eslint-plugin-unicorn": "^62.0.0",
|
||||
"exiftool-vendored": "^33.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
@@ -43,7 +45,7 @@
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.34.4",
|
||||
"sharp": "^0.34.5",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -52,6 +54,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.20.0"
|
||||
"node": "24.11.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +1,50 @@
|
||||
import { defineConfig, devices } from '@playwright/test';
|
||||
import { defineConfig, devices, PlaywrightTestConfig } from '@playwright/test';
|
||||
import dotenv from 'dotenv';
|
||||
import { cpus } from 'node:os';
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
export default defineConfig({
|
||||
dotenv.config({ path: resolve(import.meta.dirname, '.env') });
|
||||
|
||||
export const playwrightHost = process.env.PLAYWRIGHT_HOST ?? '127.0.0.1';
|
||||
export const playwrightDbHost = process.env.PLAYWRIGHT_DB_HOST ?? '127.0.0.1';
|
||||
export const playwriteBaseUrl = process.env.PLAYWRIGHT_BASE_URL ?? `http://${playwrightHost}:2285`;
|
||||
export const playwriteSlowMo = parseInt(process.env.PLAYWRIGHT_SLOW_MO ?? '0');
|
||||
export const playwrightDisableWebserver = process.env.PLAYWRIGHT_DISABLE_WEBSERVER;
|
||||
|
||||
process.env.PW_EXPERIMENTAL_SERVICE_WORKER_NETWORK_EVENTS = '1';
|
||||
|
||||
const config: PlaywrightTestConfig = {
|
||||
testDir: './src/web/specs',
|
||||
fullyParallel: false,
|
||||
forbidOnly: !!process.env.CI,
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
workers: 1,
|
||||
retries: process.env.CI ? 4 : 0,
|
||||
reporter: 'html',
|
||||
use: {
|
||||
baseURL: 'http://127.0.0.1:2285',
|
||||
baseURL: playwriteBaseUrl,
|
||||
trace: 'on-first-retry',
|
||||
screenshot: 'only-on-failure',
|
||||
launchOptions: {
|
||||
slowMo: playwriteSlowMo,
|
||||
},
|
||||
},
|
||||
|
||||
testMatch: /.*\.e2e-spec\.ts/,
|
||||
|
||||
workers: process.env.CI ? 4 : Math.round(cpus().length * 0.75),
|
||||
|
||||
projects: [
|
||||
{
|
||||
name: 'chromium',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
testMatch: /.*\.e2e-spec\.ts/,
|
||||
workers: 1,
|
||||
},
|
||||
{
|
||||
name: 'parallel tests',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
testMatch: /.*\.parallel-e2e-spec\.ts/,
|
||||
fullyParallel: true,
|
||||
workers: process.env.CI ? 3 : Math.max(1, Math.round(cpus().length * 0.75) - 1),
|
||||
},
|
||||
|
||||
// {
|
||||
@@ -59,4 +86,8 @@ export default defineConfig({
|
||||
stderr: 'pipe',
|
||||
reuseExistingServer: true,
|
||||
},
|
||||
});
|
||||
};
|
||||
if (playwrightDisableWebserver) {
|
||||
delete config.webServer;
|
||||
}
|
||||
export default defineConfig(config);
|
||||
|
||||
@@ -136,6 +136,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual({
|
||||
...user1Albums[0],
|
||||
assets: [expect.objectContaining({ isFavorite: false })],
|
||||
contributorCounts: [{ userId: user1.userId, assetCount: 1 }],
|
||||
lastModifiedAssetTimestamp: expect.any(String),
|
||||
startDate: expect.any(String),
|
||||
endDate: expect.any(String),
|
||||
@@ -310,6 +311,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual({
|
||||
...user1Albums[0],
|
||||
assets: [expect.objectContaining({ id: user1Albums[0].assets[0].id })],
|
||||
contributorCounts: [{ userId: user1.userId, assetCount: 1 }],
|
||||
lastModifiedAssetTimestamp: expect.any(String),
|
||||
startDate: expect.any(String),
|
||||
endDate: expect.any(String),
|
||||
@@ -345,6 +347,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual({
|
||||
...user1Albums[0],
|
||||
assets: [expect.objectContaining({ id: user1Albums[0].assets[0].id })],
|
||||
contributorCounts: [{ userId: user1.userId, assetCount: 1 }],
|
||||
lastModifiedAssetTimestamp: expect.any(String),
|
||||
startDate: expect.any(String),
|
||||
endDate: expect.any(String),
|
||||
@@ -362,6 +365,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual({
|
||||
...user1Albums[0],
|
||||
assets: [],
|
||||
contributorCounts: [{ userId: user1.userId, assetCount: 1 }],
|
||||
assetCount: 1,
|
||||
lastModifiedAssetTimestamp: expect.any(String),
|
||||
endDate: expect.any(String),
|
||||
@@ -382,6 +386,7 @@ describe('/albums', () => {
|
||||
expect(body).toEqual({
|
||||
...user2Albums[0],
|
||||
assets: [],
|
||||
contributorCounts: [{ userId: user1.userId, assetCount: 1 }],
|
||||
assetCount: 1,
|
||||
lastModifiedAssetTimestamp: expect.any(String),
|
||||
endDate: expect.any(String),
|
||||
|
||||
@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreateDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { JobCommand, JobName, LoginResponseDto, updateConfig } from '@immich/sdk';
|
||||
import { LoginResponseDto, QueueCommand, QueueName, updateConfig } from '@immich/sdk';
|
||||
import { cpSync, rmSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename } from 'node:path';
|
||||
@@ -17,28 +17,28 @@ describe('/jobs', () => {
|
||||
|
||||
describe('PUT /jobs', () => {
|
||||
afterEach(async () => {
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.FaceDetection, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.FaceDetection, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.SmartSearch, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.SmartSearch, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.DuplicateDetection, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.DuplicateDetection, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -59,8 +59,8 @@ describe('/jobs', () => {
|
||||
it('should queue metadata extraction for missing assets', async () => {
|
||||
const path = `${testAssetDir}/formats/raw/Nikon/D700/philadelphia.nef`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Pause,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -77,20 +77,20 @@ describe('/jobs', () => {
|
||||
expect(asset.exifInfo?.make).toBeNull();
|
||||
}
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Empty,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -124,8 +124,8 @@ describe('/jobs', () => {
|
||||
|
||||
cpSync(`${testAssetDir}/formats/raw/Nikon/D80/glarus.nef`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.MetadataExtraction, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -144,8 +144,8 @@ describe('/jobs', () => {
|
||||
it('should queue thumbnail extraction for assets missing thumbs', async () => {
|
||||
const path = `${testAssetDir}/albums/nature/tanners_ridge.jpg`;
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Pause,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Pause,
|
||||
force: false,
|
||||
});
|
||||
|
||||
@@ -153,32 +153,32 @@ describe('/jobs', () => {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetBefore.thumbhash).toBeNull();
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Empty,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Empty,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(assetAfter.thumbhash).not.toBeNull();
|
||||
@@ -193,26 +193,26 @@ describe('/jobs', () => {
|
||||
assetData: { bytes: await readFile(path), filename: basename(path) },
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetBefore = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
cpSync(`${testAssetDir}/albums/nature/notocactus_minimus.jpg`, path);
|
||||
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Resume,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Resume,
|
||||
force: false,
|
||||
});
|
||||
|
||||
// This runs the missing thumbnail job
|
||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||
command: JobCommand.Start,
|
||||
await utils.queueCommand(admin.accessToken, QueueName.ThumbnailGeneration, {
|
||||
command: QueueCommand.Start,
|
||||
force: false,
|
||||
});
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, JobName.ThumbnailGeneration);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.MetadataExtraction);
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.ThumbnailGeneration);
|
||||
|
||||
const assetAfter = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
|
||||
@@ -1006,7 +1006,7 @@ describe('/libraries', () => {
|
||||
rmSync(`${testAssetDir}/temp/xmp`, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should switch from using file metadata to file.xmp metadata when asset refreshes', async () => {
|
||||
it('should switch from using file metadata to file.ext.xmp metadata when asset refreshes', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/xmp`],
|
||||
|
||||
172
e2e/src/api/specs/maintenance.e2e-spec.ts
Normal file
172
e2e/src/api/specs/maintenance.e2e-spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/admin/maintenance', () => {
|
||||
let cookie: string | undefined;
|
||||
let admin: LoginResponseDto;
|
||||
let nonAdmin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup();
|
||||
nonAdmin = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
});
|
||||
|
||||
// => outside of maintenance mode
|
||||
|
||||
describe('GET ~/server/config', async () => {
|
||||
it('should indicate we are out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
expect(body.maintenanceMode).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should not work out of maintenance mode', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({ token: 'token' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Not in maintenance mode'));
|
||||
});
|
||||
});
|
||||
|
||||
// => enter maintenance mode
|
||||
|
||||
describe.sequential('POST /', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance').send({
|
||||
action: 'end',
|
||||
});
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should only work for admins', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`)
|
||||
.send({ action: 'end' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.forbidden);
|
||||
});
|
||||
|
||||
it('should be a no-op if try to exit maintenance mode', async () => {
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ action: 'end' });
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should enter maintenance mode', async () => {
|
||||
const { status, headers } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
action: 'start',
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
|
||||
cookie = headers['set-cookie'][0].split(';')[0];
|
||||
expect(cookie).toEqual(
|
||||
expect.stringMatching(/^immich_maintenance_token=[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*\.[A-Za-z0-9-_]*$/),
|
||||
);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
// => in maintenance mode
|
||||
|
||||
describe.sequential('in maintenance mode', () => {
|
||||
describe('GET ~/server/config', async () => {
|
||||
it('should indicate we are in maintenance mode', async () => {
|
||||
const { status, body } = await request(app).get('/server/config');
|
||||
expect(status).toBe(200);
|
||||
expect(body.maintenanceMode).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /login', async () => {
|
||||
it('should fail without cookie or token in body', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').send({});
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorizedWithMessage('Missing JWT Token'));
|
||||
});
|
||||
|
||||
it('should succeed with cookie', async () => {
|
||||
const { status, body } = await request(app).post('/admin/maintenance/login').set('cookie', cookie!).send({});
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
username: 'Immich Admin',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should succeed with token', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/admin/maintenance/login')
|
||||
.send({
|
||||
token: cookie!.split('=')[1].trim(),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
username: 'Immich Admin',
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /', async () => {
|
||||
it('should be a no-op if try to enter maintenance mode', async () => {
|
||||
const { status } = await request(app)
|
||||
.post('/admin/maintenance')
|
||||
.set('cookie', cookie!)
|
||||
.send({ action: 'start' });
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// => exit maintenance mode
|
||||
|
||||
describe.sequential('POST /', () => {
|
||||
it('should exit maintenance mode', async () => {
|
||||
const { status } = await request(app).post('/admin/maintenance').set('cookie', cookie!).send({
|
||||
action: 'end',
|
||||
});
|
||||
|
||||
expect(status).toBe(201);
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
async () => {
|
||||
const { body } = await request(app).get('/server/config');
|
||||
return body.maintenanceMode;
|
||||
},
|
||||
{
|
||||
interval: 5e2,
|
||||
timeout: 1e4,
|
||||
},
|
||||
)
|
||||
.toBeFalsy();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -113,6 +113,7 @@ describe('/server', () => {
|
||||
importFaces: false,
|
||||
oauth: false,
|
||||
oauthAutoLaunch: false,
|
||||
ocr: false,
|
||||
passwordLogin: true,
|
||||
search: true,
|
||||
sidecar: true,
|
||||
@@ -135,6 +136,7 @@ describe('/server', () => {
|
||||
externalDomain: '',
|
||||
publicUsers: true,
|
||||
isOnboarded: false,
|
||||
maintenanceMode: false,
|
||||
mapDarkStyleUrl: 'https://tiles.immich.cloud/v1/style/dark.json',
|
||||
mapLightStyleUrl: 'https://tiles.immich.cloud/v1/style/light.json',
|
||||
});
|
||||
|
||||
@@ -582,7 +582,7 @@ describe('/tags', () => {
|
||||
expect(body).toEqual([expect.objectContaining({ id: userAsset.id, success: true })]);
|
||||
});
|
||||
|
||||
it('should remove duplicate assets only once', async () => {
|
||||
it.skip('should remove duplicate assets only once', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
await tagAssets(
|
||||
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {
|
||||
LoginResponseDto,
|
||||
QueueName,
|
||||
createStack,
|
||||
deleteUserAdmin,
|
||||
getMyUser,
|
||||
@@ -327,6 +328,8 @@ describe('/admin/users', () => {
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, QueueName.BackgroundTask);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/admin/users/${user.userId}`)
|
||||
.send({ force: true })
|
||||
|
||||
@@ -442,6 +442,176 @@ describe(`immich upload`, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('immich upload --delete-duplicates', () => {
|
||||
it('should delete local duplicate files', async () => {
|
||||
const {
|
||||
stderr: firstStderr,
|
||||
stdout: firstStdout,
|
||||
exitCode: firstExitCode,
|
||||
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(firstStderr).toContain('{message}');
|
||||
expect(firstStdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(firstExitCode).toBe(0);
|
||||
|
||||
await mkdir(`/tmp/albums/nature`, { recursive: true });
|
||||
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`);
|
||||
|
||||
// Upload with --delete-duplicates flag
|
||||
const { stderr, stdout, exitCode } = await immichCli([
|
||||
'upload',
|
||||
`/tmp/albums/nature/silver_fir.jpg`,
|
||||
'--delete-duplicates',
|
||||
]);
|
||||
|
||||
// Check that the duplicate file was deleted
|
||||
const files = await readdir(`/tmp/albums/nature`);
|
||||
await rm(`/tmp/albums/nature`, { recursive: true });
|
||||
expect(files.length).toBe(0);
|
||||
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Found 0 new files and 1 duplicate'),
|
||||
expect.stringContaining('All assets were already uploaded, nothing to do'),
|
||||
]),
|
||||
);
|
||||
expect(stderr).toContain('{message}');
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify no new assets were uploaded
|
||||
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.total).toBe(1);
|
||||
});
|
||||
|
||||
it('should have accurate dry run with --delete-duplicates', async () => {
|
||||
const {
|
||||
stderr: firstStderr,
|
||||
stdout: firstStdout,
|
||||
exitCode: firstExitCode,
|
||||
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(firstStderr).toContain('{message}');
|
||||
expect(firstStdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(firstExitCode).toBe(0);
|
||||
|
||||
await mkdir(`/tmp/albums/nature`, { recursive: true });
|
||||
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`);
|
||||
|
||||
// Upload with --delete-duplicates and --dry-run flags
|
||||
const { stderr, stdout, exitCode } = await immichCli([
|
||||
'upload',
|
||||
`/tmp/albums/nature/silver_fir.jpg`,
|
||||
'--delete-duplicates',
|
||||
'--dry-run',
|
||||
]);
|
||||
|
||||
// Check that the duplicate file was NOT deleted in dry run mode
|
||||
const files = await readdir(`/tmp/albums/nature`);
|
||||
await rm(`/tmp/albums/nature`, { recursive: true });
|
||||
expect(files.length).toBe(1);
|
||||
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Found 0 new files and 1 duplicate'),
|
||||
expect.stringContaining('Would have deleted 1 local asset'),
|
||||
]),
|
||||
);
|
||||
expect(stderr).toContain('{message}');
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify no new assets were uploaded
|
||||
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.total).toBe(1);
|
||||
});
|
||||
|
||||
it('should work with both --delete and --delete-duplicates flags', async () => {
|
||||
// First, upload a file to create a duplicate on the server
|
||||
const {
|
||||
stderr: firstStderr,
|
||||
stdout: firstStdout,
|
||||
exitCode: firstExitCode,
|
||||
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(firstStderr).toContain('{message}');
|
||||
expect(firstStdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(firstExitCode).toBe(0);
|
||||
|
||||
// Both new and duplicate files
|
||||
await mkdir(`/tmp/albums/nature`, { recursive: true });
|
||||
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`); // duplicate
|
||||
await symlink(`${testAssetDir}/albums/nature/el_torcal_rocks.jpg`, `/tmp/albums/nature/el_torcal_rocks.jpg`); // new
|
||||
|
||||
// Upload with both --delete and --delete-duplicates flags
|
||||
const { stderr, stdout, exitCode } = await immichCli([
|
||||
'upload',
|
||||
`/tmp/albums/nature`,
|
||||
'--delete',
|
||||
'--delete-duplicates',
|
||||
]);
|
||||
|
||||
// Check that both files were deleted (new file due to --delete, duplicate due to --delete-duplicates)
|
||||
const files = await readdir(`/tmp/albums/nature`);
|
||||
await rm(`/tmp/albums/nature`, { recursive: true });
|
||||
expect(files.length).toBe(0);
|
||||
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Found 1 new files and 1 duplicate'),
|
||||
expect.stringContaining('Successfully uploaded 1 new asset'),
|
||||
expect.stringContaining('Deleting assets that have been uploaded'),
|
||||
]),
|
||||
);
|
||||
expect(stderr).toContain('{message}');
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify one new asset was uploaded (total should be 2 now)
|
||||
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.total).toBe(2);
|
||||
});
|
||||
|
||||
it('should only delete duplicates when --delete-duplicates is used without --delete', async () => {
|
||||
const {
|
||||
stderr: firstStderr,
|
||||
stdout: firstStdout,
|
||||
exitCode: firstExitCode,
|
||||
} = await immichCli(['upload', `${testAssetDir}/albums/nature/silver_fir.jpg`]);
|
||||
expect(firstStderr).toContain('{message}');
|
||||
expect(firstStdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([expect.stringContaining('Successfully uploaded 1 new asset')]),
|
||||
);
|
||||
expect(firstExitCode).toBe(0);
|
||||
|
||||
// Both new and duplicate files
|
||||
await mkdir(`/tmp/albums/nature`, { recursive: true });
|
||||
await symlink(`${testAssetDir}/albums/nature/silver_fir.jpg`, `/tmp/albums/nature/silver_fir.jpg`); // duplicate
|
||||
await symlink(`${testAssetDir}/albums/nature/el_torcal_rocks.jpg`, `/tmp/albums/nature/el_torcal_rocks.jpg`); // new
|
||||
|
||||
// Upload with only --delete-duplicates flag
|
||||
const { stderr, stdout, exitCode } = await immichCli(['upload', `/tmp/albums/nature`, '--delete-duplicates']);
|
||||
|
||||
// Check that only the duplicate was deleted, new file should remain
|
||||
const files = await readdir(`/tmp/albums/nature`);
|
||||
await rm(`/tmp/albums/nature`, { recursive: true });
|
||||
expect(files).toEqual(['el_torcal_rocks.jpg']);
|
||||
|
||||
expect(stdout.split('\n')).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Found 1 new files and 1 duplicate'),
|
||||
expect.stringContaining('Successfully uploaded 1 new asset'),
|
||||
]),
|
||||
);
|
||||
expect(stderr).toContain('{message}');
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Verify one new asset was uploaded (total should be 2 now)
|
||||
const assets = await getAssetStatistics({}, { headers: asKeyAuth(key) });
|
||||
expect(assets.total).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('immich upload --skip-hash', () => {
|
||||
it('should skip hashing', async () => {
|
||||
const filename = `albums/nature/silver_fir.jpg`;
|
||||
|
||||
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
37
e2e/src/generators/timeline.ts
Normal file
37
e2e/src/generators/timeline.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
export { generateTimelineData } from './timeline/model-objects';
|
||||
|
||||
export { createDefaultTimelineConfig, validateTimelineConfig } from './timeline/timeline-config';
|
||||
|
||||
export type {
|
||||
MockAlbum,
|
||||
MonthSpec,
|
||||
SerializedTimelineData,
|
||||
MockTimelineAsset as TimelineAssetConfig,
|
||||
TimelineConfig,
|
||||
MockTimelineData as TimelineData,
|
||||
} from './timeline/timeline-config';
|
||||
|
||||
export {
|
||||
getAlbum,
|
||||
getAsset,
|
||||
getTimeBucket,
|
||||
getTimeBuckets,
|
||||
toAssetResponseDto,
|
||||
toColumnarFormat,
|
||||
} from './timeline/rest-response';
|
||||
|
||||
export type { Changes } from './timeline/rest-response';
|
||||
|
||||
export { randomImage, randomImageFromString, randomPreview, randomThumbnail } from './timeline/images';
|
||||
|
||||
export {
|
||||
SeededRandom,
|
||||
getMockAsset,
|
||||
parseTimeBucketKey,
|
||||
selectRandom,
|
||||
selectRandomDays,
|
||||
selectRandomMultiple,
|
||||
} from './timeline/utils';
|
||||
|
||||
export { ASSET_DISTRIBUTION, DAY_DISTRIBUTION } from './timeline/distribution-patterns';
|
||||
export type { DayPattern, MonthDistribution } from './timeline/distribution-patterns';
|
||||
183
e2e/src/generators/timeline/distribution-patterns.ts
Normal file
183
e2e/src/generators/timeline/distribution-patterns.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { generateConsecutiveDays, generateDayAssets } from 'src/generators/timeline/model-objects';
|
||||
import { SeededRandom, selectRandomDays } from 'src/generators/timeline/utils';
|
||||
import type { MockTimelineAsset } from './timeline-config';
|
||||
import { GENERATION_CONSTANTS } from './timeline-config';
|
||||
|
||||
type AssetDistributionStrategy = (rng: SeededRandom) => number;
|
||||
|
||||
type DayDistributionStrategy = (
|
||||
year: number,
|
||||
month: number,
|
||||
daysInMonth: number,
|
||||
totalAssets: number,
|
||||
ownerId: string,
|
||||
rng: SeededRandom,
|
||||
) => MockTimelineAsset[];
|
||||
|
||||
/**
|
||||
* Strategies for determining total asset count per month
|
||||
*/
|
||||
export const ASSET_DISTRIBUTION: Record<MonthDistribution, AssetDistributionStrategy | null> = {
|
||||
empty: null, // Special case - handled separately
|
||||
sparse: (rng) => rng.nextInt(3, 9), // 3-8 assets
|
||||
medium: (rng) => rng.nextInt(15, 31), // 15-30 assets
|
||||
dense: (rng) => rng.nextInt(50, 81), // 50-80 assets
|
||||
'very-dense': (rng) => rng.nextInt(80, 151), // 80-150 assets
|
||||
};
|
||||
|
||||
/**
|
||||
* Strategies for distributing assets across days within a month
|
||||
*/
|
||||
export const DAY_DISTRIBUTION: Record<DayPattern, DayDistributionStrategy> = {
|
||||
'single-day': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// All assets on one day in the middle of the month
|
||||
const day = Math.floor(daysInMonth / 2);
|
||||
return generateDayAssets(year, month, day, totalAssets, ownerId, rng);
|
||||
},
|
||||
|
||||
'consecutive-large': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// 3-5 consecutive days with evenly distributed assets
|
||||
const numDays = Math.min(5, Math.floor(totalAssets / 15));
|
||||
const startDay = rng.nextInt(1, daysInMonth - numDays + 2);
|
||||
return generateConsecutiveDays(year, month, startDay, numDays, totalAssets, ownerId, rng);
|
||||
},
|
||||
|
||||
'consecutive-small': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Multiple consecutive days with 1-3 assets each (side-by-side layout)
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const numDays = Math.min(totalAssets, Math.floor(daysInMonth / 2));
|
||||
const startDay = rng.nextInt(1, daysInMonth - numDays + 2);
|
||||
let assetIndex = 0;
|
||||
|
||||
for (let i = 0; i < numDays && assetIndex < totalAssets; i++) {
|
||||
const dayAssets = Math.min(3, rng.nextInt(1, 4));
|
||||
const actualAssets = Math.min(dayAssets, totalAssets - assetIndex);
|
||||
// Create a new RNG for this day
|
||||
const dayRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, startDay + i, actualAssets, ownerId, dayRng));
|
||||
assetIndex += actualAssets;
|
||||
}
|
||||
return assets;
|
||||
},
|
||||
|
||||
alternating: (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Alternate between large (15-25) and small (1-3) days
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
let day = 1;
|
||||
let isLarge = true;
|
||||
let assetIndex = 0;
|
||||
|
||||
while (assetIndex < totalAssets && day <= daysInMonth) {
|
||||
const dayAssets = isLarge ? Math.min(25, rng.nextInt(15, 26)) : rng.nextInt(1, 4);
|
||||
|
||||
const actualAssets = Math.min(dayAssets, totalAssets - assetIndex);
|
||||
// Create a new RNG for this day
|
||||
const dayRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, day, actualAssets, ownerId, dayRng));
|
||||
assetIndex += actualAssets;
|
||||
|
||||
day += isLarge ? 1 : 1; // Could add gaps here
|
||||
isLarge = !isLarge;
|
||||
}
|
||||
return assets;
|
||||
},
|
||||
|
||||
'sparse-scattered': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Spread assets across random days with gaps
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const numDays = Math.min(totalAssets, Math.floor(daysInMonth * GENERATION_CONSTANTS.SPARSE_DAY_COVERAGE));
|
||||
const daysWithPhotos = selectRandomDays(daysInMonth, numDays, rng);
|
||||
let assetIndex = 0;
|
||||
|
||||
for (let i = 0; i < daysWithPhotos.length && assetIndex < totalAssets; i++) {
|
||||
const dayAssets =
|
||||
Math.floor(totalAssets / numDays) + (i === daysWithPhotos.length - 1 ? totalAssets % numDays : 0);
|
||||
// Create a new RNG for this day
|
||||
const dayRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, daysWithPhotos[i], dayAssets, ownerId, dayRng));
|
||||
assetIndex += dayAssets;
|
||||
}
|
||||
return assets;
|
||||
},
|
||||
|
||||
'start-heavy': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Most assets in first week
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const firstWeekAssets = Math.floor(totalAssets * 0.7);
|
||||
const remainingAssets = totalAssets - firstWeekAssets;
|
||||
|
||||
// First 7 days
|
||||
assets.push(...generateConsecutiveDays(year, month, 1, 7, firstWeekAssets, ownerId, rng));
|
||||
|
||||
// Remaining scattered
|
||||
if (remainingAssets > 0) {
|
||||
const midDay = Math.floor(daysInMonth / 2);
|
||||
// Create a new RNG for the remaining assets
|
||||
const remainingRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, midDay, remainingAssets, ownerId, remainingRng));
|
||||
}
|
||||
return assets;
|
||||
},
|
||||
|
||||
'end-heavy': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Most assets in last week
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const lastWeekAssets = Math.floor(totalAssets * 0.7);
|
||||
const remainingAssets = totalAssets - lastWeekAssets;
|
||||
|
||||
// Remaining at start
|
||||
if (remainingAssets > 0) {
|
||||
// Create a new RNG for the start assets
|
||||
const startRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, 2, remainingAssets, ownerId, startRng));
|
||||
}
|
||||
|
||||
// Last 7 days
|
||||
const startDay = daysInMonth - 6;
|
||||
assets.push(...generateConsecutiveDays(year, month, startDay, 7, lastWeekAssets, ownerId, rng));
|
||||
return assets;
|
||||
},
|
||||
|
||||
'mid-heavy': (year, month, daysInMonth, totalAssets, ownerId, rng) => {
|
||||
// Most assets in middle of month
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const midAssets = Math.floor(totalAssets * 0.7);
|
||||
const sideAssets = Math.floor((totalAssets - midAssets) / 2);
|
||||
|
||||
// Start
|
||||
if (sideAssets > 0) {
|
||||
// Create a new RNG for the start assets
|
||||
const startRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, 2, sideAssets, ownerId, startRng));
|
||||
}
|
||||
|
||||
// Middle
|
||||
const midStart = Math.floor(daysInMonth / 2) - 3;
|
||||
assets.push(...generateConsecutiveDays(year, month, midStart, 7, midAssets, ownerId, rng));
|
||||
|
||||
// End
|
||||
const endAssets = totalAssets - midAssets - sideAssets;
|
||||
if (endAssets > 0) {
|
||||
// Create a new RNG for the end assets
|
||||
const endRng = new SeededRandom(rng.nextInt(0, 1_000_000));
|
||||
assets.push(...generateDayAssets(year, month, daysInMonth - 1, endAssets, ownerId, endRng));
|
||||
}
|
||||
return assets;
|
||||
},
|
||||
};
|
||||
export type MonthDistribution =
|
||||
| 'empty' // 0 assets
|
||||
| 'sparse' // 3-8 assets
|
||||
| 'medium' // 15-30 assets
|
||||
| 'dense' // 50-80 assets
|
||||
| 'very-dense'; // 80-150 assets
|
||||
|
||||
export type DayPattern =
|
||||
| 'single-day' // All images in one day
|
||||
| 'consecutive-large' // Multiple days with 15-25 images each
|
||||
| 'consecutive-small' // Multiple days with 1-3 images each (side-by-side)
|
||||
| 'alternating' // Alternating large/small days
|
||||
| 'sparse-scattered' // Few images scattered across month
|
||||
| 'start-heavy' // Most images at start of month
|
||||
| 'end-heavy' // Most images at end of month
|
||||
| 'mid-heavy'; // Most images in middle of month
|
||||
111
e2e/src/generators/timeline/images.ts
Normal file
111
e2e/src/generators/timeline/images.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import sharp from 'sharp';
|
||||
import { SeededRandom } from 'src/generators/timeline/utils';
|
||||
|
||||
export const randomThumbnail = async (seed: string, ratio: number) => {
|
||||
const height = 235;
|
||||
const width = Math.round(height * ratio);
|
||||
return randomImageFromString(seed, { width, height });
|
||||
};
|
||||
|
||||
export const randomPreview = async (seed: string, ratio: number) => {
|
||||
const height = 500;
|
||||
const width = Math.round(height * ratio);
|
||||
return randomImageFromString(seed, { width, height });
|
||||
};
|
||||
|
||||
export const randomImageFromString = async (
|
||||
seed: string = '',
|
||||
{ width = 100, height = 100 }: { width: number; height: number },
|
||||
) => {
|
||||
// Convert string to number for seeding
|
||||
let seedNumber = 0;
|
||||
for (let i = 0; i < seed.length; i++) {
|
||||
seedNumber = (seedNumber << 5) - seedNumber + (seed.codePointAt(i) ?? 0);
|
||||
seedNumber = seedNumber & seedNumber; // Convert to 32bit integer
|
||||
}
|
||||
return randomImage(new SeededRandom(Math.abs(seedNumber)), { width, height });
|
||||
};
|
||||
|
||||
export const randomImage = async (rng: SeededRandom, { width, height }: { width: number; height: number }) => {
|
||||
const r1 = rng.nextInt(0, 256);
|
||||
const g1 = rng.nextInt(0, 256);
|
||||
const b1 = rng.nextInt(0, 256);
|
||||
const r2 = rng.nextInt(0, 256);
|
||||
const g2 = rng.nextInt(0, 256);
|
||||
const b2 = rng.nextInt(0, 256);
|
||||
const patternType = rng.nextInt(0, 5);
|
||||
|
||||
let svgPattern = '';
|
||||
|
||||
switch (patternType) {
|
||||
case 0: {
|
||||
// Solid color
|
||||
svgPattern = `<svg width="${width}" height="${height}">
|
||||
<rect x="0" y="0" width="${width}" height="${height}" fill="rgb(${r1},${g1},${b1})"/>
|
||||
</svg>`;
|
||||
break;
|
||||
}
|
||||
|
||||
case 1: {
|
||||
// Horizontal stripes
|
||||
const stripeHeight = 10;
|
||||
svgPattern = `<svg width="${width}" height="${height}">
|
||||
${Array.from(
|
||||
{ length: height / stripeHeight },
|
||||
(_, i) =>
|
||||
`<rect x="0" y="${i * stripeHeight}" width="${width}" height="${stripeHeight}"
|
||||
fill="rgb(${i % 2 ? r1 : r2},${i % 2 ? g1 : g2},${i % 2 ? b1 : b2})"/>`,
|
||||
).join('')}
|
||||
</svg>`;
|
||||
break;
|
||||
}
|
||||
|
||||
case 2: {
|
||||
// Vertical stripes
|
||||
const stripeWidth = 10;
|
||||
svgPattern = `<svg width="${width}" height="${height}">
|
||||
${Array.from(
|
||||
{ length: width / stripeWidth },
|
||||
(_, i) =>
|
||||
`<rect x="${i * stripeWidth}" y="0" width="${stripeWidth}" height="${height}"
|
||||
fill="rgb(${i % 2 ? r1 : r2},${i % 2 ? g1 : g2},${i % 2 ? b1 : b2})"/>`,
|
||||
).join('')}
|
||||
</svg>`;
|
||||
break;
|
||||
}
|
||||
|
||||
case 3: {
|
||||
// Checkerboard
|
||||
const squareSize = 10;
|
||||
svgPattern = `<svg width="${width}" height="${height}">
|
||||
${Array.from({ length: height / squareSize }, (_, row) =>
|
||||
Array.from({ length: width / squareSize }, (_, col) => {
|
||||
const isEven = (row + col) % 2 === 0;
|
||||
return `<rect x="${col * squareSize}" y="${row * squareSize}"
|
||||
width="${squareSize}" height="${squareSize}"
|
||||
fill="rgb(${isEven ? r1 : r2},${isEven ? g1 : g2},${isEven ? b1 : b2})"/>`;
|
||||
}).join(''),
|
||||
).join('')}
|
||||
</svg>`;
|
||||
break;
|
||||
}
|
||||
|
||||
case 4: {
|
||||
// Diagonal stripes
|
||||
svgPattern = `<svg width="${width}" height="${height}">
|
||||
<defs>
|
||||
<pattern id="diagonal" x="0" y="0" width="20" height="20" patternUnits="userSpaceOnUse">
|
||||
<rect x="0" y="0" width="10" height="20" fill="rgb(${r1},${g1},${b1})"/>
|
||||
<rect x="10" y="0" width="10" height="20" fill="rgb(${r2},${g2},${b2})"/>
|
||||
</pattern>
|
||||
</defs>
|
||||
<rect x="0" y="0" width="${width}" height="${height}" fill="url(#diagonal)" transform="rotate(45 50 50)"/>
|
||||
</svg>`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const svgBuffer = Buffer.from(svgPattern);
|
||||
const jpegData = await sharp(svgBuffer).jpeg({ quality: 50 }).toBuffer();
|
||||
return jpegData;
|
||||
};
|
||||
265
e2e/src/generators/timeline/model-objects.ts
Normal file
265
e2e/src/generators/timeline/model-objects.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
/**
|
||||
* Generator functions for timeline model objects
|
||||
*/
|
||||
|
||||
import { faker } from '@faker-js/faker';
|
||||
import { AssetVisibility } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { SeededRandom } from 'src/generators/timeline/utils';
|
||||
import type { DayPattern, MonthDistribution } from './distribution-patterns';
|
||||
import { ASSET_DISTRIBUTION, DAY_DISTRIBUTION } from './distribution-patterns';
|
||||
import type { MockTimelineAsset, MockTimelineData, SerializedTimelineData, TimelineConfig } from './timeline-config';
|
||||
import { ASPECT_RATIO_WEIGHTS, GENERATION_CONSTANTS, validateTimelineConfig } from './timeline-config';
|
||||
|
||||
/**
|
||||
* Generate a random aspect ratio based on weighted probabilities
|
||||
*/
|
||||
export function generateAspectRatio(rng: SeededRandom): string {
|
||||
const random = rng.next();
|
||||
let cumulative = 0;
|
||||
|
||||
for (const [ratio, weight] of Object.entries(ASPECT_RATIO_WEIGHTS)) {
|
||||
cumulative += weight;
|
||||
if (random < cumulative) {
|
||||
return ratio;
|
||||
}
|
||||
}
|
||||
return '16:9'; // Default fallback
|
||||
}
|
||||
|
||||
export function generateThumbhash(rng: SeededRandom): string {
|
||||
return Array.from({ length: 10 }, () => rng.nextInt(0, 256).toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
|
||||
export function generateDuration(rng: SeededRandom): string {
|
||||
return `${rng.nextInt(GENERATION_CONSTANTS.MIN_VIDEO_DURATION_SECONDS, GENERATION_CONSTANTS.MAX_VIDEO_DURATION_SECONDS)}.${rng.nextInt(0, 1000).toString().padStart(3, '0')}`;
|
||||
}
|
||||
|
||||
export function generateUUID(): string {
|
||||
return faker.string.uuid();
|
||||
}
|
||||
|
||||
export function generateAsset(
|
||||
year: number,
|
||||
month: number,
|
||||
day: number,
|
||||
ownerId: string,
|
||||
rng: SeededRandom,
|
||||
): MockTimelineAsset {
|
||||
const from = DateTime.fromObject({ year, month, day }).setZone('UTC');
|
||||
const to = from.endOf('day');
|
||||
const date = faker.date.between({ from: from.toJSDate(), to: to.toJSDate() });
|
||||
const isVideo = rng.next() < GENERATION_CONSTANTS.VIDEO_PROBABILITY;
|
||||
|
||||
const assetId = generateUUID();
|
||||
const hasGPS = rng.next() < GENERATION_CONSTANTS.GPS_PERCENTAGE;
|
||||
|
||||
const ratio = generateAspectRatio(rng);
|
||||
|
||||
const asset: MockTimelineAsset = {
|
||||
id: assetId,
|
||||
ownerId,
|
||||
ratio: Number.parseFloat(ratio.split(':')[0]) / Number.parseFloat(ratio.split(':')[1]),
|
||||
thumbhash: generateThumbhash(rng),
|
||||
localDateTime: date.toISOString(),
|
||||
fileCreatedAt: date.toISOString(),
|
||||
isFavorite: rng.next() < GENERATION_CONSTANTS.FAVORITE_PROBABILITY,
|
||||
isTrashed: false,
|
||||
isVideo,
|
||||
isImage: !isVideo,
|
||||
duration: isVideo ? generateDuration(rng) : null,
|
||||
projectionType: null,
|
||||
livePhotoVideoId: null,
|
||||
city: hasGPS ? faker.location.city() : null,
|
||||
country: hasGPS ? faker.location.country() : null,
|
||||
people: null,
|
||||
latitude: hasGPS ? faker.location.latitude() : null,
|
||||
longitude: hasGPS ? faker.location.longitude() : null,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
stack: null,
|
||||
fileSizeInByte: faker.number.int({ min: 510, max: 5_000_000 }),
|
||||
checksum: faker.string.alphanumeric({ length: 5 }),
|
||||
};
|
||||
|
||||
return asset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate assets for a specific day
|
||||
*/
|
||||
export function generateDayAssets(
|
||||
year: number,
|
||||
month: number,
|
||||
day: number,
|
||||
assetCount: number,
|
||||
ownerId: string,
|
||||
rng: SeededRandom,
|
||||
): MockTimelineAsset[] {
|
||||
return Array.from({ length: assetCount }, () => generateAsset(year, month, day, ownerId, rng));
|
||||
}
|
||||
|
||||
/**
|
||||
* Distribute assets evenly across consecutive days
|
||||
*
|
||||
* @returns Array of generated timeline assets
|
||||
*/
|
||||
export function generateConsecutiveDays(
|
||||
year: number,
|
||||
month: number,
|
||||
startDay: number,
|
||||
numDays: number,
|
||||
totalAssets: number,
|
||||
ownerId: string,
|
||||
rng: SeededRandom,
|
||||
): MockTimelineAsset[] {
|
||||
const assets: MockTimelineAsset[] = [];
|
||||
const assetsPerDay = Math.floor(totalAssets / numDays);
|
||||
|
||||
for (let i = 0; i < numDays; i++) {
|
||||
const dayAssets =
|
||||
i === numDays - 1
|
||||
? totalAssets - assetsPerDay * (numDays - 1) // Remainder on last day
|
||||
: assetsPerDay;
|
||||
// Create a new RNG with a different seed for each day
|
||||
const dayRng = new SeededRandom(rng.nextInt(0, 1_000_000) + i * 100);
|
||||
assets.push(...generateDayAssets(year, month, startDay + i, dayAssets, ownerId, dayRng));
|
||||
}
|
||||
|
||||
return assets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate assets for a month with specified distribution pattern
|
||||
*/
|
||||
export function generateMonthAssets(
|
||||
year: number,
|
||||
month: number,
|
||||
ownerId: string,
|
||||
distribution: MonthDistribution = 'medium',
|
||||
pattern: DayPattern = 'consecutive-large',
|
||||
rng: SeededRandom,
|
||||
): MockTimelineAsset[] {
|
||||
const daysInMonth = new Date(year, month, 0).getDate();
|
||||
|
||||
if (distribution === 'empty') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const distributionStrategy = ASSET_DISTRIBUTION[distribution];
|
||||
if (!distributionStrategy) {
|
||||
console.warn(`Unknown distribution: ${distribution}, defaulting to medium`);
|
||||
return [];
|
||||
}
|
||||
const totalAssets = distributionStrategy(rng);
|
||||
|
||||
const dayStrategy = DAY_DISTRIBUTION[pattern];
|
||||
if (!dayStrategy) {
|
||||
console.warn(`Unknown pattern: ${pattern}, defaulting to consecutive-large`);
|
||||
// Fallback to consecutive-large pattern
|
||||
const numDays = Math.min(5, Math.floor(totalAssets / 15));
|
||||
const startDay = rng.nextInt(1, daysInMonth - numDays + 2);
|
||||
const assets = generateConsecutiveDays(year, month, startDay, numDays, totalAssets, ownerId, rng);
|
||||
assets.sort((a, b) => DateTime.fromISO(b.localDateTime).diff(DateTime.fromISO(a.localDateTime)).milliseconds);
|
||||
return assets;
|
||||
}
|
||||
|
||||
const assets = dayStrategy(year, month, daysInMonth, totalAssets, ownerId, rng);
|
||||
assets.sort((a, b) => DateTime.fromISO(b.localDateTime).diff(DateTime.fromISO(a.localDateTime)).milliseconds);
|
||||
return assets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main generator function for timeline data
|
||||
*/
|
||||
export function generateTimelineData(config: TimelineConfig): MockTimelineData {
|
||||
validateTimelineConfig(config);
|
||||
|
||||
const buckets = new Map<string, MockTimelineAsset[]>();
|
||||
const monthStats: Record<string, { count: number; distribution: MonthDistribution; pattern: DayPattern }> = {};
|
||||
|
||||
const globalRng = new SeededRandom(config.seed || GENERATION_CONSTANTS.DEFAULT_SEED);
|
||||
faker.seed(globalRng.nextInt(0, 1_000_000));
|
||||
for (const monthConfig of config.months) {
|
||||
const { year, month, distribution, pattern } = monthConfig;
|
||||
|
||||
const monthSeed = globalRng.nextInt(0, 1_000_000);
|
||||
const monthRng = new SeededRandom(monthSeed);
|
||||
|
||||
const monthAssets = generateMonthAssets(
|
||||
year,
|
||||
month,
|
||||
config.ownerId || generateUUID(),
|
||||
distribution,
|
||||
pattern,
|
||||
monthRng,
|
||||
);
|
||||
|
||||
if (monthAssets.length > 0) {
|
||||
const monthKey = `${year}-${month.toString().padStart(2, '0')}`;
|
||||
monthStats[monthKey] = {
|
||||
count: monthAssets.length,
|
||||
distribution,
|
||||
pattern,
|
||||
};
|
||||
|
||||
// Create bucket key (YYYY-MM-01)
|
||||
const bucketKey = `${year}-${month.toString().padStart(2, '0')}-01`;
|
||||
buckets.set(bucketKey, monthAssets);
|
||||
}
|
||||
}
|
||||
|
||||
// Create a mock album from random assets
|
||||
const allAssets = [...buckets.values()].flat();
|
||||
|
||||
// Select 10-30 random assets for the album (or all assets if less than 10)
|
||||
const albumSize = Math.min(allAssets.length, globalRng.nextInt(10, 31));
|
||||
const selectedAssetConfigs: MockTimelineAsset[] = [];
|
||||
const usedIndices = new Set<number>();
|
||||
|
||||
while (selectedAssetConfigs.length < albumSize && usedIndices.size < allAssets.length) {
|
||||
const randomIndex = globalRng.nextInt(0, allAssets.length);
|
||||
if (!usedIndices.has(randomIndex)) {
|
||||
usedIndices.add(randomIndex);
|
||||
selectedAssetConfigs.push(allAssets[randomIndex]);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort selected assets by date (newest first)
|
||||
selectedAssetConfigs.sort(
|
||||
(a, b) => DateTime.fromISO(b.localDateTime).diff(DateTime.fromISO(a.localDateTime)).milliseconds,
|
||||
);
|
||||
|
||||
const selectedAssets = selectedAssetConfigs.map((asset) => asset.id);
|
||||
|
||||
const now = new Date().toISOString();
|
||||
const album = {
|
||||
id: generateUUID(),
|
||||
albumName: 'Test Album',
|
||||
description: 'A mock album for testing',
|
||||
assetIds: selectedAssets,
|
||||
thumbnailAssetId: selectedAssets.length > 0 ? selectedAssets[0] : null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
};
|
||||
|
||||
// Write to file if configured
|
||||
if (config.writeToFile) {
|
||||
const outputPath = config.outputPath || '/tmp/timeline-data.json';
|
||||
|
||||
// Convert Map to object for serialization
|
||||
const serializedData: SerializedTimelineData = {
|
||||
buckets: Object.fromEntries(buckets),
|
||||
album,
|
||||
};
|
||||
|
||||
try {
|
||||
writeFileSync(outputPath, JSON.stringify(serializedData, null, 2));
|
||||
console.log(`Timeline data written to ${outputPath}`);
|
||||
} catch (error) {
|
||||
console.error(`Failed to write timeline data to ${outputPath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return { buckets, album };
|
||||
}
|
||||
436
e2e/src/generators/timeline/rest-response.ts
Normal file
436
e2e/src/generators/timeline/rest-response.ts
Normal file
@@ -0,0 +1,436 @@
|
||||
/**
|
||||
* REST API output functions for converting timeline data to API response formats
|
||||
*/
|
||||
|
||||
import {
|
||||
AssetTypeEnum,
|
||||
AssetVisibility,
|
||||
UserAvatarColor,
|
||||
type AlbumResponseDto,
|
||||
type AssetResponseDto,
|
||||
type ExifResponseDto,
|
||||
type TimeBucketAssetResponseDto,
|
||||
type TimeBucketsResponseDto,
|
||||
type UserResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { signupDto } from 'src/fixtures';
|
||||
import { parseTimeBucketKey } from 'src/generators/timeline/utils';
|
||||
import type { MockTimelineAsset, MockTimelineData } from './timeline-config';
|
||||
|
||||
/**
|
||||
* Convert timeline/asset models to columnar format (parallel arrays)
|
||||
*/
|
||||
export function toColumnarFormat(assets: MockTimelineAsset[]): TimeBucketAssetResponseDto {
|
||||
const result: TimeBucketAssetResponseDto = {
|
||||
id: [],
|
||||
ownerId: [],
|
||||
ratio: [],
|
||||
thumbhash: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
isFavorite: [],
|
||||
isTrashed: [],
|
||||
isImage: [],
|
||||
duration: [],
|
||||
projectionType: [],
|
||||
livePhotoVideoId: [],
|
||||
city: [],
|
||||
country: [],
|
||||
visibility: [],
|
||||
};
|
||||
|
||||
for (const asset of assets) {
|
||||
result.id.push(asset.id);
|
||||
result.ownerId.push(asset.ownerId);
|
||||
result.ratio.push(asset.ratio);
|
||||
result.thumbhash.push(asset.thumbhash);
|
||||
result.fileCreatedAt.push(asset.fileCreatedAt);
|
||||
result.localOffsetHours.push(0); // Assuming UTC for mocks
|
||||
result.isFavorite.push(asset.isFavorite);
|
||||
result.isTrashed.push(asset.isTrashed);
|
||||
result.isImage.push(asset.isImage);
|
||||
result.duration.push(asset.duration);
|
||||
result.projectionType.push(asset.projectionType);
|
||||
result.livePhotoVideoId.push(asset.livePhotoVideoId);
|
||||
result.city.push(asset.city);
|
||||
result.country.push(asset.country);
|
||||
result.visibility.push(asset.visibility);
|
||||
}
|
||||
|
||||
if (assets.some((a) => a.latitude !== null || a.longitude !== null)) {
|
||||
result.latitude = assets.map((a) => a.latitude);
|
||||
result.longitude = assets.map((a) => a.longitude);
|
||||
}
|
||||
|
||||
result.stack = assets.map(() => null);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a single bucket from timeline data (mimics getTimeBucket API)
|
||||
* Automatically handles both ISO timestamp and simple month formats
|
||||
* Returns data in columnar format matching the actual API
|
||||
* When albumId is provided, only returns assets from that album
|
||||
*/
|
||||
export function getTimeBucket(
|
||||
timelineData: MockTimelineData,
|
||||
timeBucket: string,
|
||||
isTrashed: boolean | undefined,
|
||||
isArchived: boolean | undefined,
|
||||
isFavorite: boolean | undefined,
|
||||
albumId: string | undefined,
|
||||
changes: Changes,
|
||||
): TimeBucketAssetResponseDto {
|
||||
const bucketKey = parseTimeBucketKey(timeBucket);
|
||||
let assets = timelineData.buckets.get(bucketKey);
|
||||
|
||||
if (!assets) {
|
||||
return toColumnarFormat([]);
|
||||
}
|
||||
|
||||
// Create sets for quick lookups
|
||||
const deletedAssetIds = new Set(changes.assetDeletions);
|
||||
const archivedAssetIds = new Set(changes.assetArchivals);
|
||||
const favoritedAssetIds = new Set(changes.assetFavorites);
|
||||
|
||||
// Filter assets based on trashed/archived status
|
||||
assets = assets.filter((asset) =>
|
||||
shouldIncludeAsset(asset, isTrashed, isArchived, isFavorite, deletedAssetIds, archivedAssetIds, favoritedAssetIds),
|
||||
);
|
||||
|
||||
// Filter to only include assets from the specified album
|
||||
if (albumId) {
|
||||
const album = timelineData.album;
|
||||
if (!album || album.id !== albumId) {
|
||||
return toColumnarFormat([]);
|
||||
}
|
||||
|
||||
// Create a Set for faster lookup
|
||||
const albumAssetIds = new Set([...album.assetIds, ...changes.albumAdditions]);
|
||||
assets = assets.filter((asset) => albumAssetIds.has(asset.id));
|
||||
}
|
||||
|
||||
// Override properties for assets in changes arrays
|
||||
const assetsWithOverrides = assets.map((asset) => {
|
||||
if (deletedAssetIds.has(asset.id) || archivedAssetIds.has(asset.id) || favoritedAssetIds.has(asset.id)) {
|
||||
return {
|
||||
...asset,
|
||||
isFavorite: favoritedAssetIds.has(asset.id) ? true : asset.isFavorite,
|
||||
isTrashed: deletedAssetIds.has(asset.id) ? true : asset.isTrashed,
|
||||
visibility: archivedAssetIds.has(asset.id) ? AssetVisibility.Archive : asset.visibility,
|
||||
};
|
||||
}
|
||||
return asset;
|
||||
});
|
||||
|
||||
return toColumnarFormat(assetsWithOverrides);
|
||||
}
|
||||
|
||||
export type Changes = {
|
||||
// ids of assets that are newly added to the album
|
||||
albumAdditions: string[];
|
||||
// ids of assets that are newly deleted
|
||||
assetDeletions: string[];
|
||||
// ids of assets that are newly archived
|
||||
assetArchivals: string[];
|
||||
// ids of assets that are newly favorited
|
||||
assetFavorites: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to determine if an asset should be included based on filter criteria
|
||||
* @param asset - The asset to check
|
||||
* @param isTrashed - Filter for trashed status (undefined means no filter)
|
||||
* @param isArchived - Filter for archived status (undefined means no filter)
|
||||
* @param isFavorite - Filter for favorite status (undefined means no filter)
|
||||
* @param deletedAssetIds - Set of IDs for assets that have been deleted
|
||||
* @param archivedAssetIds - Set of IDs for assets that have been archived
|
||||
* @param favoritedAssetIds - Set of IDs for assets that have been favorited
|
||||
* @returns true if the asset matches all filter criteria
|
||||
*/
|
||||
function shouldIncludeAsset(
|
||||
asset: MockTimelineAsset,
|
||||
isTrashed: boolean | undefined,
|
||||
isArchived: boolean | undefined,
|
||||
isFavorite: boolean | undefined,
|
||||
deletedAssetIds: Set<string>,
|
||||
archivedAssetIds: Set<string>,
|
||||
favoritedAssetIds: Set<string>,
|
||||
): boolean {
|
||||
// Determine actual status (property or in changes)
|
||||
const actuallyTrashed = asset.isTrashed || deletedAssetIds.has(asset.id);
|
||||
const actuallyArchived = asset.visibility === 'archive' || archivedAssetIds.has(asset.id);
|
||||
const actuallyFavorited = asset.isFavorite || favoritedAssetIds.has(asset.id);
|
||||
|
||||
// Apply filters
|
||||
if (isTrashed !== undefined && actuallyTrashed !== isTrashed) {
|
||||
return false;
|
||||
}
|
||||
if (isArchived !== undefined && actuallyArchived !== isArchived) {
|
||||
return false;
|
||||
}
|
||||
if (isFavorite !== undefined && actuallyFavorited !== isFavorite) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Get summary for all buckets (mimics getTimeBuckets API)
|
||||
* When albumId is provided, only includes buckets that contain assets from that album
|
||||
*/
|
||||
export function getTimeBuckets(
|
||||
timelineData: MockTimelineData,
|
||||
isTrashed: boolean | undefined,
|
||||
isArchived: boolean | undefined,
|
||||
isFavorite: boolean | undefined,
|
||||
albumId: string | undefined,
|
||||
changes: Changes,
|
||||
): TimeBucketsResponseDto[] {
|
||||
const summary: TimeBucketsResponseDto[] = [];
|
||||
|
||||
// Create sets for quick lookups
|
||||
const deletedAssetIds = new Set(changes.assetDeletions);
|
||||
const archivedAssetIds = new Set(changes.assetArchivals);
|
||||
const favoritedAssetIds = new Set(changes.assetFavorites);
|
||||
|
||||
// If no albumId is specified, return summary for all assets
|
||||
if (albumId) {
|
||||
// Filter to only include buckets with assets from the specified album
|
||||
const album = timelineData.album;
|
||||
if (!album || album.id !== albumId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Create a Set for faster lookup
|
||||
const albumAssetIds = new Set([...album.assetIds, ...changes.albumAdditions]);
|
||||
for (const removed of changes.assetDeletions) {
|
||||
albumAssetIds.delete(removed);
|
||||
}
|
||||
for (const [bucketKey, assets] of timelineData.buckets) {
|
||||
// Count how many assets in this bucket are in the album and match trashed/archived filters
|
||||
const albumAssetsInBucket = assets.filter((asset) => {
|
||||
// Must be in the album
|
||||
if (!albumAssetIds.has(asset.id)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return shouldIncludeAsset(
|
||||
asset,
|
||||
isTrashed,
|
||||
isArchived,
|
||||
isFavorite,
|
||||
deletedAssetIds,
|
||||
archivedAssetIds,
|
||||
favoritedAssetIds,
|
||||
);
|
||||
});
|
||||
|
||||
if (albumAssetsInBucket.length > 0) {
|
||||
summary.push({
|
||||
timeBucket: bucketKey,
|
||||
count: albumAssetsInBucket.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const [bucketKey, assets] of timelineData.buckets) {
|
||||
// Filter assets based on trashed/archived status
|
||||
const filteredAssets = assets.filter((asset) =>
|
||||
shouldIncludeAsset(
|
||||
asset,
|
||||
isTrashed,
|
||||
isArchived,
|
||||
isFavorite,
|
||||
deletedAssetIds,
|
||||
archivedAssetIds,
|
||||
favoritedAssetIds,
|
||||
),
|
||||
);
|
||||
|
||||
if (filteredAssets.length > 0) {
|
||||
summary.push({
|
||||
timeBucket: bucketKey,
|
||||
count: filteredAssets.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort summary by date (newest first) using luxon
|
||||
summary.sort((a, b) => {
|
||||
const dateA = DateTime.fromISO(a.timeBucket);
|
||||
const dateB = DateTime.fromISO(b.timeBucket);
|
||||
return dateB.diff(dateA).milliseconds;
|
||||
});
|
||||
|
||||
return summary;
|
||||
}
|
||||
|
||||
const createDefaultOwner = (ownerId: string) => {
|
||||
const defaultOwner: UserResponseDto = {
|
||||
id: ownerId,
|
||||
email: signupDto.admin.email,
|
||||
name: signupDto.admin.name,
|
||||
profileImagePath: '',
|
||||
profileChangedAt: new Date().toISOString(),
|
||||
avatarColor: UserAvatarColor.Blue,
|
||||
};
|
||||
return defaultOwner;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a TimelineAssetConfig to a full AssetResponseDto
|
||||
* This matches the response from GET /api/assets/:id
|
||||
*/
|
||||
export function toAssetResponseDto(asset: MockTimelineAsset, owner?: UserResponseDto): AssetResponseDto {
|
||||
const now = new Date().toISOString();
|
||||
|
||||
// Default owner if not provided
|
||||
const defaultOwner = createDefaultOwner(asset.ownerId);
|
||||
|
||||
const exifInfo: ExifResponseDto = {
|
||||
make: null,
|
||||
model: null,
|
||||
exifImageWidth: asset.ratio > 1 ? 4000 : 3000,
|
||||
exifImageHeight: asset.ratio > 1 ? Math.round(4000 / asset.ratio) : Math.round(3000 * asset.ratio),
|
||||
fileSizeInByte: asset.fileSizeInByte,
|
||||
orientation: '1',
|
||||
dateTimeOriginal: asset.fileCreatedAt,
|
||||
modifyDate: asset.fileCreatedAt,
|
||||
timeZone: asset.latitude === null ? null : 'UTC',
|
||||
lensModel: null,
|
||||
fNumber: null,
|
||||
focalLength: null,
|
||||
iso: null,
|
||||
exposureTime: null,
|
||||
latitude: asset.latitude,
|
||||
longitude: asset.longitude,
|
||||
city: asset.city,
|
||||
country: asset.country,
|
||||
state: null,
|
||||
description: null,
|
||||
};
|
||||
|
||||
return {
|
||||
id: asset.id,
|
||||
deviceAssetId: `device-${asset.id}`,
|
||||
ownerId: asset.ownerId,
|
||||
owner: owner || defaultOwner,
|
||||
libraryId: `library-${asset.ownerId}`,
|
||||
deviceId: `device-${asset.ownerId}`,
|
||||
type: asset.isVideo ? AssetTypeEnum.Video : AssetTypeEnum.Image,
|
||||
originalPath: `/original/${asset.id}.${asset.isVideo ? 'mp4' : 'jpg'}`,
|
||||
originalFileName: `${asset.id}.${asset.isVideo ? 'mp4' : 'jpg'}`,
|
||||
originalMimeType: asset.isVideo ? 'video/mp4' : 'image/jpeg',
|
||||
thumbhash: asset.thumbhash,
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileCreatedAt,
|
||||
localDateTime: asset.localDateTime,
|
||||
updatedAt: now,
|
||||
createdAt: asset.fileCreatedAt,
|
||||
isFavorite: asset.isFavorite,
|
||||
isArchived: false,
|
||||
isTrashed: asset.isTrashed,
|
||||
visibility: asset.visibility,
|
||||
duration: asset.duration || '0:00:00.00000',
|
||||
exifInfo,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
tags: [],
|
||||
people: [],
|
||||
unassignedFaces: [],
|
||||
stack: asset.stack,
|
||||
isOffline: false,
|
||||
hasMetadata: true,
|
||||
duplicateId: null,
|
||||
resized: true,
|
||||
checksum: asset.checksum,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single asset by ID from timeline data
|
||||
* This matches the response from GET /api/assets/:id
|
||||
*/
|
||||
export function getAsset(
|
||||
timelineData: MockTimelineData,
|
||||
assetId: string,
|
||||
owner?: UserResponseDto,
|
||||
): AssetResponseDto | undefined {
|
||||
// Search through all buckets for the asset
|
||||
const buckets = [...timelineData.buckets.values()];
|
||||
for (const assets of buckets) {
|
||||
const asset = assets.find((a) => a.id === assetId);
|
||||
if (asset) {
|
||||
return toAssetResponseDto(asset, owner);
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a mock album from timeline data
|
||||
* This matches the response from GET /api/albums/:id
|
||||
*/
|
||||
export function getAlbum(
|
||||
timelineData: MockTimelineData,
|
||||
ownerId: string,
|
||||
albumId: string | undefined,
|
||||
changes: Changes,
|
||||
): AlbumResponseDto | undefined {
|
||||
if (!timelineData.album) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// If albumId is provided and doesn't match, return undefined
|
||||
if (albumId && albumId !== timelineData.album.id) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const album = timelineData.album;
|
||||
const albumOwner = createDefaultOwner(ownerId);
|
||||
|
||||
// Get the actual asset objects from the timeline data
|
||||
const albumAssets: AssetResponseDto[] = [];
|
||||
const allAssets = [...timelineData.buckets.values()].flat();
|
||||
|
||||
for (const assetId of album.assetIds) {
|
||||
const assetConfig = allAssets.find((a) => a.id === assetId);
|
||||
if (assetConfig) {
|
||||
albumAssets.push(toAssetResponseDto(assetConfig, albumOwner));
|
||||
}
|
||||
}
|
||||
for (const assetId of changes.albumAdditions ?? []) {
|
||||
const assetConfig = allAssets.find((a) => a.id === assetId);
|
||||
if (assetConfig) {
|
||||
albumAssets.push(toAssetResponseDto(assetConfig, albumOwner));
|
||||
}
|
||||
}
|
||||
|
||||
albumAssets.sort((a, b) => DateTime.fromISO(b.localDateTime).diff(DateTime.fromISO(a.localDateTime)).milliseconds);
|
||||
|
||||
// For a basic mock album, we don't include any albumUsers (shared users)
|
||||
// The owner is represented by the owner field, not in albumUsers
|
||||
const response: AlbumResponseDto = {
|
||||
id: album.id,
|
||||
albumName: album.albumName,
|
||||
description: album.description,
|
||||
albumThumbnailAssetId: album.thumbnailAssetId,
|
||||
createdAt: album.createdAt,
|
||||
updatedAt: album.updatedAt,
|
||||
ownerId: albumOwner.id,
|
||||
owner: albumOwner,
|
||||
albumUsers: [], // Empty array for non-shared album
|
||||
shared: false,
|
||||
hasSharedLink: false,
|
||||
isActivityEnabled: true,
|
||||
assetCount: albumAssets.length,
|
||||
assets: albumAssets,
|
||||
startDate: albumAssets.length > 0 ? albumAssets.at(-1)?.fileCreatedAt : undefined,
|
||||
endDate: albumAssets.length > 0 ? albumAssets[0].fileCreatedAt : undefined,
|
||||
lastModifiedAssetTimestamp: albumAssets.length > 0 ? albumAssets[0].fileCreatedAt : undefined,
|
||||
};
|
||||
|
||||
return response;
|
||||
}
|
||||
200
e2e/src/generators/timeline/timeline-config.ts
Normal file
200
e2e/src/generators/timeline/timeline-config.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import type { AssetVisibility } from '@immich/sdk';
|
||||
import { DayPattern, MonthDistribution } from 'src/generators/timeline/distribution-patterns';
|
||||
|
||||
// Constants for generation parameters
|
||||
export const GENERATION_CONSTANTS = {
|
||||
VIDEO_PROBABILITY: 0.15, // 15% of assets are videos
|
||||
GPS_PERCENTAGE: 0.7, // 70% of assets have GPS data
|
||||
FAVORITE_PROBABILITY: 0.1, // 10% of assets are favorited
|
||||
MIN_VIDEO_DURATION_SECONDS: 5,
|
||||
MAX_VIDEO_DURATION_SECONDS: 300,
|
||||
DEFAULT_SEED: 12_345,
|
||||
DEFAULT_OWNER_ID: 'user-1',
|
||||
MAX_SELECT_ATTEMPTS: 10,
|
||||
SPARSE_DAY_COVERAGE: 0.4, // 40% of days have photos in sparse pattern
|
||||
} as const;
|
||||
|
||||
// Aspect ratio distribution weights (must sum to 1)
|
||||
export const ASPECT_RATIO_WEIGHTS = {
|
||||
'4:3': 0.35, // 35% 4:3 landscape
|
||||
'3:2': 0.25, // 25% 3:2 landscape
|
||||
'16:9': 0.2, // 20% 16:9 landscape
|
||||
'2:3': 0.1, // 10% 2:3 portrait
|
||||
'1:1': 0.09, // 9% 1:1 square
|
||||
'3:1': 0.01, // 1% 3:1 panorama
|
||||
} as const;
|
||||
|
||||
export type AspectRatio = {
|
||||
width: number;
|
||||
height: number;
|
||||
ratio: number;
|
||||
name: string;
|
||||
};
|
||||
|
||||
// Mock configuration for asset generation - will be transformed to API response formats
|
||||
export type MockTimelineAsset = {
|
||||
id: string;
|
||||
ownerId: string;
|
||||
ratio: number;
|
||||
thumbhash: string | null;
|
||||
localDateTime: string;
|
||||
fileCreatedAt: string;
|
||||
isFavorite: boolean;
|
||||
isTrashed: boolean;
|
||||
isVideo: boolean;
|
||||
isImage: boolean;
|
||||
duration: string | null;
|
||||
projectionType: string | null;
|
||||
livePhotoVideoId: string | null;
|
||||
city: string | null;
|
||||
country: string | null;
|
||||
people: string[] | null;
|
||||
latitude: number | null;
|
||||
longitude: number | null;
|
||||
visibility: AssetVisibility;
|
||||
stack: null;
|
||||
checksum: string;
|
||||
fileSizeInByte: number;
|
||||
};
|
||||
|
||||
export type MonthSpec = {
|
||||
year: number;
|
||||
month: number; // 1-12
|
||||
distribution: MonthDistribution;
|
||||
pattern: DayPattern;
|
||||
};
|
||||
|
||||
/**
|
||||
* Configuration for timeline data generation
|
||||
*/
|
||||
export type TimelineConfig = {
|
||||
ownerId?: string;
|
||||
months: MonthSpec[];
|
||||
seed?: number;
|
||||
writeToFile?: boolean;
|
||||
outputPath?: string;
|
||||
};
|
||||
|
||||
export type MockAlbum = {
|
||||
id: string;
|
||||
albumName: string;
|
||||
description: string;
|
||||
assetIds: string[]; // IDs of assets in the album
|
||||
thumbnailAssetId: string | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
|
||||
export type MockTimelineData = {
|
||||
buckets: Map<string, MockTimelineAsset[]>;
|
||||
album: MockAlbum; // Mock album created from random assets
|
||||
};
|
||||
|
||||
export type SerializedTimelineData = {
|
||||
buckets: Record<string, MockTimelineAsset[]>;
|
||||
album: MockAlbum;
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates a TimelineConfig object to ensure all values are within expected ranges
|
||||
*/
|
||||
export function validateTimelineConfig(config: TimelineConfig): void {
|
||||
if (!config.months || config.months.length === 0) {
|
||||
throw new Error('TimelineConfig must contain at least one month');
|
||||
}
|
||||
|
||||
const seenMonths = new Set<string>();
|
||||
|
||||
for (const month of config.months) {
|
||||
if (month.month < 1 || month.month > 12) {
|
||||
throw new Error(`Invalid month: ${month.month}. Must be between 1 and 12`);
|
||||
}
|
||||
|
||||
if (month.year < 1900 || month.year > 2100) {
|
||||
throw new Error(`Invalid year: ${month.year}. Must be between 1900 and 2100`);
|
||||
}
|
||||
|
||||
const monthKey = `${month.year}-${month.month}`;
|
||||
if (seenMonths.has(monthKey)) {
|
||||
throw new Error(`Duplicate month found: ${monthKey}`);
|
||||
}
|
||||
seenMonths.add(monthKey);
|
||||
|
||||
// Validate distribution if provided
|
||||
if (month.distribution && !['empty', 'sparse', 'medium', 'dense', 'very-dense'].includes(month.distribution)) {
|
||||
throw new Error(
|
||||
`Invalid distribution: ${month.distribution}. Must be one of: empty, sparse, medium, dense, very-dense`,
|
||||
);
|
||||
}
|
||||
|
||||
const validPatterns = [
|
||||
'single-day',
|
||||
'consecutive-large',
|
||||
'consecutive-small',
|
||||
'alternating',
|
||||
'sparse-scattered',
|
||||
'start-heavy',
|
||||
'end-heavy',
|
||||
'mid-heavy',
|
||||
];
|
||||
if (month.pattern && !validPatterns.includes(month.pattern)) {
|
||||
throw new Error(`Invalid pattern: ${month.pattern}. Must be one of: ${validPatterns.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate seed if provided
|
||||
if (config.seed !== undefined && (config.seed < 0 || !Number.isInteger(config.seed))) {
|
||||
throw new Error('Seed must be a non-negative integer');
|
||||
}
|
||||
|
||||
// Validate ownerId if provided
|
||||
if (config.ownerId !== undefined && config.ownerId.trim() === '') {
|
||||
throw new Error('Owner ID cannot be an empty string');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a default timeline configuration
|
||||
*/
|
||||
export function createDefaultTimelineConfig(): TimelineConfig {
|
||||
const months: MonthSpec[] = [
|
||||
// 2024 - Mix of patterns
|
||||
{ year: 2024, month: 12, distribution: 'very-dense', pattern: 'alternating' },
|
||||
{ year: 2024, month: 11, distribution: 'dense', pattern: 'consecutive-large' },
|
||||
{ year: 2024, month: 10, distribution: 'medium', pattern: 'mid-heavy' },
|
||||
{ year: 2024, month: 9, distribution: 'sparse', pattern: 'consecutive-small' },
|
||||
{ year: 2024, month: 8, distribution: 'empty', pattern: 'single-day' },
|
||||
{ year: 2024, month: 7, distribution: 'dense', pattern: 'start-heavy' },
|
||||
{ year: 2024, month: 6, distribution: 'medium', pattern: 'sparse-scattered' },
|
||||
{ year: 2024, month: 5, distribution: 'sparse', pattern: 'single-day' },
|
||||
{ year: 2024, month: 4, distribution: 'very-dense', pattern: 'consecutive-large' },
|
||||
{ year: 2024, month: 3, distribution: 'empty', pattern: 'single-day' },
|
||||
{ year: 2024, month: 2, distribution: 'medium', pattern: 'end-heavy' },
|
||||
{ year: 2024, month: 1, distribution: 'dense', pattern: 'alternating' },
|
||||
|
||||
// 2023 - Testing year boundaries and more patterns
|
||||
{ year: 2023, month: 12, distribution: 'very-dense', pattern: 'end-heavy' },
|
||||
{ year: 2023, month: 11, distribution: 'sparse', pattern: 'consecutive-small' },
|
||||
{ year: 2023, month: 10, distribution: 'empty', pattern: 'single-day' },
|
||||
{ year: 2023, month: 9, distribution: 'medium', pattern: 'alternating' },
|
||||
{ year: 2023, month: 8, distribution: 'dense', pattern: 'mid-heavy' },
|
||||
{ year: 2023, month: 7, distribution: 'sparse', pattern: 'sparse-scattered' },
|
||||
{ year: 2023, month: 6, distribution: 'medium', pattern: 'consecutive-large' },
|
||||
{ year: 2023, month: 5, distribution: 'empty', pattern: 'single-day' },
|
||||
{ year: 2023, month: 4, distribution: 'sparse', pattern: 'single-day' },
|
||||
{ year: 2023, month: 3, distribution: 'dense', pattern: 'start-heavy' },
|
||||
{ year: 2023, month: 2, distribution: 'medium', pattern: 'alternating' },
|
||||
{ year: 2023, month: 1, distribution: 'very-dense', pattern: 'consecutive-large' },
|
||||
];
|
||||
|
||||
for (let year = 2022; year >= 2000; year--) {
|
||||
for (let month = 12; month >= 1; month--) {
|
||||
months.push({ year, month, distribution: 'medium', pattern: 'sparse-scattered' });
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
months,
|
||||
seed: 42,
|
||||
};
|
||||
}
|
||||
186
e2e/src/generators/timeline/utils.ts
Normal file
186
e2e/src/generators/timeline/utils.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { DateTime } from 'luxon';
|
||||
import { GENERATION_CONSTANTS, MockTimelineAsset } from 'src/generators/timeline/timeline-config';
|
||||
|
||||
/**
|
||||
* Linear Congruential Generator for deterministic pseudo-random numbers
|
||||
*/
|
||||
export class SeededRandom {
|
||||
private seed: number;
|
||||
|
||||
constructor(seed: number) {
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate next random number in range [0, 1)
|
||||
*/
|
||||
next(): number {
|
||||
// LCG parameters from Numerical Recipes
|
||||
this.seed = (this.seed * 1_664_525 + 1_013_904_223) % 2_147_483_647;
|
||||
return this.seed / 2_147_483_647;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate random integer in range [min, max)
|
||||
*/
|
||||
nextInt(min: number, max: number): number {
|
||||
return Math.floor(this.next() * (max - min)) + min;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate random boolean with given probability
|
||||
*/
|
||||
nextBoolean(probability = 0.5): boolean {
|
||||
return this.next() < probability;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Select random days using seed variation to avoid collisions.
|
||||
*
|
||||
* @param daysInMonth - Total number of days in the month
|
||||
* @param numDays - Number of days to select
|
||||
* @param rng - Random number generator instance
|
||||
* @returns Array of selected day numbers, sorted in descending order
|
||||
*/
|
||||
export function selectRandomDays(daysInMonth: number, numDays: number, rng: SeededRandom): number[] {
|
||||
const selectedDays = new Set<number>();
|
||||
const maxAttempts = numDays * GENERATION_CONSTANTS.MAX_SELECT_ATTEMPTS; // Safety limit
|
||||
let attempts = 0;
|
||||
|
||||
while (selectedDays.size < numDays && attempts < maxAttempts) {
|
||||
const day = rng.nextInt(1, daysInMonth + 1);
|
||||
selectedDays.add(day);
|
||||
attempts++;
|
||||
}
|
||||
|
||||
// Fallback: if we couldn't select enough random days, fill with sequential days
|
||||
if (selectedDays.size < numDays) {
|
||||
for (let day = 1; day <= daysInMonth && selectedDays.size < numDays; day++) {
|
||||
selectedDays.add(day);
|
||||
}
|
||||
}
|
||||
|
||||
return [...selectedDays].toSorted((a, b) => b - a);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select item from array using seeded random
|
||||
*/
|
||||
export function selectRandom<T>(arr: T[], rng: SeededRandom): T {
|
||||
if (arr.length === 0) {
|
||||
throw new Error('Cannot select from empty array');
|
||||
}
|
||||
const index = rng.nextInt(0, arr.length);
|
||||
return arr[index];
|
||||
}
|
||||
|
||||
/**
|
||||
* Select multiple random items from array using seeded random without duplicates
|
||||
*/
|
||||
export function selectRandomMultiple<T>(arr: T[], count: number, rng: SeededRandom): T[] {
|
||||
if (arr.length === 0) {
|
||||
throw new Error('Cannot select from empty array');
|
||||
}
|
||||
if (count < 0) {
|
||||
throw new Error('Count must be non-negative');
|
||||
}
|
||||
if (count > arr.length) {
|
||||
throw new Error('Count cannot exceed array length');
|
||||
}
|
||||
|
||||
const result: T[] = [];
|
||||
const selectedIndices = new Set<number>();
|
||||
|
||||
while (result.length < count) {
|
||||
const index = rng.nextInt(0, arr.length);
|
||||
if (!selectedIndices.has(index)) {
|
||||
selectedIndices.add(index);
|
||||
result.push(arr[index]);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse timeBucket parameter to extract year-month key
|
||||
* Handles both formats:
|
||||
* - ISO timestamp: "2024-12-01T00:00:00.000Z" -> "2024-12-01"
|
||||
* - Simple format: "2024-12-01" -> "2024-12-01"
|
||||
*/
|
||||
export function parseTimeBucketKey(timeBucket: string): string {
|
||||
if (!timeBucket) {
|
||||
throw new Error('timeBucket parameter cannot be empty');
|
||||
}
|
||||
|
||||
const dt = DateTime.fromISO(timeBucket, { zone: 'utc' });
|
||||
|
||||
if (!dt.isValid) {
|
||||
// Fallback to regex if not a valid ISO string
|
||||
const match = timeBucket.match(/^(\d{4}-\d{2}-\d{2})/);
|
||||
return match ? match[1] : timeBucket;
|
||||
}
|
||||
|
||||
// Format as YYYY-MM-01 (first day of month)
|
||||
return `${dt.year}-${String(dt.month).padStart(2, '0')}-01`;
|
||||
}
|
||||
|
||||
export function getMockAsset(
|
||||
asset: MockTimelineAsset,
|
||||
sortedDescendingAssets: MockTimelineAsset[],
|
||||
direction: 'next' | 'previous',
|
||||
unit: 'day' | 'month' | 'year' = 'day',
|
||||
): MockTimelineAsset | null {
|
||||
const currentDateTime = DateTime.fromISO(asset.localDateTime, { zone: 'utc' });
|
||||
|
||||
const currentIndex = sortedDescendingAssets.findIndex((a) => a.id === asset.id);
|
||||
|
||||
if (currentIndex === -1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const step = direction === 'next' ? 1 : -1;
|
||||
const startIndex = currentIndex + step;
|
||||
|
||||
if (direction === 'next' && currentIndex >= sortedDescendingAssets.length - 1) {
|
||||
return null;
|
||||
}
|
||||
if (direction === 'previous' && currentIndex <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isInDifferentPeriod = (date1: DateTime, date2: DateTime): boolean => {
|
||||
if (unit === 'day') {
|
||||
return !date1.startOf('day').equals(date2.startOf('day'));
|
||||
} else if (unit === 'month') {
|
||||
return date1.year !== date2.year || date1.month !== date2.month;
|
||||
} else {
|
||||
return date1.year !== date2.year;
|
||||
}
|
||||
};
|
||||
|
||||
if (direction === 'next') {
|
||||
// Search forward in array (backwards in time)
|
||||
for (let i = startIndex; i < sortedDescendingAssets.length; i++) {
|
||||
const nextAsset = sortedDescendingAssets[i];
|
||||
const nextDate = DateTime.fromISO(nextAsset.localDateTime, { zone: 'utc' });
|
||||
|
||||
if (isInDifferentPeriod(nextDate, currentDateTime)) {
|
||||
return nextAsset;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search backward in array (forwards in time)
|
||||
for (let i = startIndex; i >= 0; i--) {
|
||||
const prevAsset = sortedDescendingAssets[i];
|
||||
const prevDate = DateTime.fromISO(prevAsset.localDateTime, { zone: 'utc' });
|
||||
|
||||
if (isInDifferentPeriod(prevDate, currentDateTime)) {
|
||||
return prevAsset;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user