mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 09:13:13 +03:00
Compare commits
13 Commits
v2.2.1
...
fix/restri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9029ec5bb6 | ||
|
|
02456a148e | ||
|
|
517c3e1d4c | ||
|
|
619de2a5e4 | ||
|
|
79d0e3e1ed | ||
|
|
f5ff36a1f8 | ||
|
|
b5efc9c16e | ||
|
|
1036076b0d | ||
|
|
c76324c611 | ||
|
|
0ddb92e1ec | ||
|
|
d08a520aa2 | ||
|
|
7bdf0f6c50 | ||
|
|
2b33a58448 |
24
.github/workflows/build-mobile.yml
vendored
24
.github/workflows/build-mobile.yml
vendored
@@ -20,6 +20,30 @@ on:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY:
|
||||
required: true
|
||||
IOS_CERTIFICATE_P12:
|
||||
required: true
|
||||
IOS_CERTIFICATE_PASSWORD:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
FASTLANE_TEAM_ID:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
2
.github/workflows/fix-format.yml
vendored
2
.github/workflows/fix-format.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
run: pnpm --recursive install && pnpm run --recursive --parallel fix:format
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
|
||||
14
.github/workflows/prepare-release.yml
vendored
14
.github/workflows/prepare-release.yml
vendored
@@ -99,6 +99,20 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
environment: production
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.99",
|
||||
"version": "2.2.100",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
|
||||
4
docs/static/archived-versions.json
vendored
4
docs/static/archived-versions.json
vendored
@@ -1,4 +1,8 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.2",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
|
||||
@@ -1140,6 +1140,16 @@ describe('/asset', () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: 'metadata/gps-position/empty_gps.jpg',
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
exifInfo: {
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
it.each(tests)(`should upload and generate a thumbnail for different file types`, async ({ input, expected }) => {
|
||||
|
||||
Submodule e2e/test-assets updated: 37f60ea537...68e8b5853c
@@ -1,8 +1,10 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
|
||||
from rapidocr.ch_ppocr_det.utils import DBPostProcess
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
from rapidocr.utils import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
|
||||
@@ -10,11 +12,10 @@ from rapidocr.utils.typings import ModelType as RapidModelType
|
||||
|
||||
from immich_ml.config import log
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import decode_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
from .schemas import OcrOptions, TextDetectionOutput
|
||||
from .schemas import TextDetectionOutput
|
||||
|
||||
|
||||
class TextDetector(InferenceModel):
|
||||
@@ -24,13 +25,20 @@ class TextDetector(InferenceModel):
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
self.max_resolution = 736
|
||||
self.min_score = 0.5
|
||||
self.score_mode = "fast"
|
||||
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
|
||||
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
|
||||
self._empty: TextDetectionOutput = {
|
||||
"image": np.empty(0, dtype=np.float32),
|
||||
"boxes": np.empty(0, dtype=np.float32),
|
||||
"scores": np.empty(0, dtype=np.float32),
|
||||
}
|
||||
self.postprocess = DBPostProcess(
|
||||
thresh=0.3,
|
||||
box_thresh=model_kwargs.get("minScore", 0.5),
|
||||
max_candidates=1000,
|
||||
unclip_ratio=1.6,
|
||||
use_dilation=True,
|
||||
score_mode="fast",
|
||||
)
|
||||
|
||||
def _download(self) -> None:
|
||||
model_info = InferSession.get_model_url(
|
||||
@@ -52,35 +60,65 @@ class TextDetector(InferenceModel):
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
# TODO: support other runtime sessions
|
||||
session = OrtSession(self.model_path)
|
||||
self.model = RapidTextDetector(
|
||||
OcrOptions(
|
||||
session=session.session,
|
||||
limit_side_len=self.max_resolution,
|
||||
limit_type="min",
|
||||
box_thresh=self.min_score,
|
||||
score_mode=self.score_mode,
|
||||
)
|
||||
)
|
||||
return session
|
||||
return OrtSession(self.model_path)
|
||||
|
||||
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
|
||||
results = self.model(decode_cv2(inputs))
|
||||
if results.boxes is None or results.scores is None or results.img is None:
|
||||
# partly adapted from RapidOCR
|
||||
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
|
||||
w, h = inputs.size
|
||||
if w < 32 or h < 32:
|
||||
return self._empty
|
||||
out = self.session.run(None, {"x": self._transform(inputs)})[0]
|
||||
boxes, scores = self.postprocess(out, (h, w))
|
||||
if len(boxes) == 0:
|
||||
return self._empty
|
||||
return {
|
||||
"image": results.img,
|
||||
"boxes": np.array(results.boxes, dtype=np.float32),
|
||||
"scores": np.array(results.scores, dtype=np.float32),
|
||||
"boxes": self.sorted_boxes(boxes),
|
||||
"scores": np.array(scores, dtype=np.float32),
|
||||
}
|
||||
|
||||
# adapted from RapidOCR
|
||||
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
|
||||
if img.height < img.width:
|
||||
ratio = float(self.max_resolution) / img.height
|
||||
else:
|
||||
ratio = float(self.max_resolution) / img.width
|
||||
|
||||
resize_h = int(img.height * ratio)
|
||||
resize_w = int(img.width * ratio)
|
||||
|
||||
resize_h = int(round(resize_h / 32) * 32)
|
||||
resize_w = int(round(resize_w / 32) * 32)
|
||||
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
|
||||
|
||||
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
|
||||
img_np -= self.mean
|
||||
img_np *= self.std_inv
|
||||
img_np = np.transpose(img_np, (2, 0, 1))
|
||||
return np.expand_dims(img_np, axis=0)
|
||||
|
||||
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
if len(dt_boxes) == 0:
|
||||
return dt_boxes
|
||||
|
||||
# Sort by y, then identify lines, then sort by (line, x)
|
||||
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
|
||||
sorted_y = dt_boxes[y_order, 0, 1]
|
||||
|
||||
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
|
||||
line_ids[0] = 0
|
||||
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
|
||||
|
||||
# Create composite sort key for final ordering
|
||||
# Shift line_ids by large factor, add x for tie-breaking
|
||||
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
|
||||
final_order = np.argsort(sort_key, kind="stable")
|
||||
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
|
||||
return sorted_boxes
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
if (max_resolution := kwargs.get("maxResolution")) is not None:
|
||||
self.max_resolution = max_resolution
|
||||
self.model.limit_side_len = max_resolution
|
||||
if (min_score := kwargs.get("minScore")) is not None:
|
||||
self.min_score = min_score
|
||||
self.model.postprocess_op.box_thresh = min_score
|
||||
self.postprocess.box_thresh = min_score
|
||||
if (score_mode := kwargs.get("scoreMode")) is not None:
|
||||
self.score_mode = score_mode
|
||||
self.model.postprocess_op.score_mode = score_mode
|
||||
self.postprocess.score_mode = score_mode
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL.Image import Image
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_rec import TextRecInput
|
||||
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
@@ -14,6 +13,7 @@ from rapidocr.utils.vis_res import VisRes
|
||||
|
||||
from immich_ml.config import log, settings
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import pil_to_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
@@ -65,17 +65,16 @@ class TextRecognizer(InferenceModel):
|
||||
)
|
||||
return session
|
||||
|
||||
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
|
||||
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, box_scores = texts["boxes"], texts["scores"]
|
||||
if boxes.shape[0] == 0:
|
||||
return self._empty
|
||||
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
|
||||
if rec.txts is None:
|
||||
return self._empty
|
||||
|
||||
height, width = img.shape[0:2]
|
||||
boxes[:, :, 0] /= width
|
||||
boxes[:, :, 1] /= height
|
||||
boxes[:, :, 0] /= img.width
|
||||
boxes[:, :, 1] /= img.height
|
||||
|
||||
text_scores = np.array(rec.scores)
|
||||
valid_text_score_idx = text_scores > self.min_score
|
||||
@@ -87,7 +86,7 @@ class TextRecognizer(InferenceModel):
|
||||
"textScore": text_scores[valid_text_score_idx],
|
||||
}
|
||||
|
||||
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
|
||||
img_crop_width = np.maximum(
|
||||
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
|
||||
).astype(np.int32)
|
||||
@@ -98,22 +97,55 @@ class TextRecognizer(InferenceModel):
|
||||
pts_std[:, 1:3, 0] = img_crop_width[:, None]
|
||||
pts_std[:, 2:4, 1] = img_crop_height[:, None]
|
||||
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
|
||||
imgs: list[NDArray[np.float32]] = []
|
||||
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
|
||||
M = cv2.getPerspectiveTransform(box, pts_std)
|
||||
dst_img: NDArray[np.float32] = cv2.warpPerspective(
|
||||
img,
|
||||
M,
|
||||
dst_size,
|
||||
borderMode=cv2.BORDER_REPLICATE,
|
||||
flags=cv2.INTER_CUBIC,
|
||||
) # type: ignore
|
||||
dst_height, dst_width = dst_img.shape[0:2]
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
|
||||
all_coeffs = self._get_perspective_transform(pts_std, boxes)
|
||||
imgs: list[NDArray[np.uint8]] = []
|
||||
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
|
||||
dst_img = img.transform(
|
||||
size=tuple(dst_size),
|
||||
method=Image.Transform.PERSPECTIVE,
|
||||
data=tuple(coeffs),
|
||||
resample=Image.Resampling.BICUBIC,
|
||||
)
|
||||
|
||||
dst_width, dst_height = dst_img.size
|
||||
if dst_height * 1.0 / dst_width >= 1.5:
|
||||
dst_img = np.rot90(dst_img)
|
||||
imgs.append(dst_img)
|
||||
dst_img = dst_img.rotate(90, expand=True)
|
||||
imgs.append(pil_to_cv2(dst_img))
|
||||
|
||||
return imgs
|
||||
|
||||
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
N = src.shape[0]
|
||||
x, y = src[:, :, 0], src[:, :, 1]
|
||||
u, v = dst[:, :, 0], dst[:, :, 1]
|
||||
A = np.zeros((N, 8, 9), dtype=np.float32)
|
||||
|
||||
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
|
||||
A[:, ::2, 0] = x
|
||||
A[:, ::2, 1] = y
|
||||
A[:, ::2, 2] = 1
|
||||
A[:, ::2, 6] = -u * x
|
||||
A[:, ::2, 7] = -u * y
|
||||
A[:, ::2, 8] = -u
|
||||
|
||||
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
|
||||
A[:, 1::2, 3] = x
|
||||
A[:, 1::2, 4] = y
|
||||
A[:, 1::2, 5] = 1
|
||||
A[:, 1::2, 6] = -v * x
|
||||
A[:, 1::2, 7] = -v * y
|
||||
A[:, 1::2, 8] = -v
|
||||
|
||||
# Solve using SVD for all matrices at once
|
||||
_, _, Vt = np.linalg.svd(A)
|
||||
H = Vt[:, -1, :].reshape(N, 3, 3)
|
||||
H = H / H[:, 2:3, 2:3]
|
||||
|
||||
# Extract the 8 coefficients for each transformation
|
||||
return np.column_stack(
|
||||
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
|
||||
) # pyright: ignore[reportReturnType]
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
self.min_score = kwargs.get("minScore", self.min_score)
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing_extensions import TypedDict
|
||||
|
||||
|
||||
class TextDetectionOutput(TypedDict):
|
||||
image: npt.NDArray[np.float32]
|
||||
boxes: npt.NDArray[np.float32]
|
||||
scores: npt.NDArray[np.float32]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "immich-ml"
|
||||
version = "2.2.1"
|
||||
version = "2.2.2"
|
||||
description = ""
|
||||
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
|
||||
requires-python = ">=3.10,<4.0"
|
||||
@@ -22,7 +22,6 @@ dependencies = [
|
||||
"rich>=13.4.2",
|
||||
"tokenizers>=0.15.0,<1.0",
|
||||
"uvicorn[standard]>=0.22.0,<1.0",
|
||||
"setuptools>=78.1.0",
|
||||
"rapidocr>=3.1.0",
|
||||
]
|
||||
|
||||
|
||||
2
machine-learning/uv.lock
generated
2
machine-learning/uv.lock
generated
@@ -1100,7 +1100,6 @@ dependencies = [
|
||||
{ name = "python-multipart" },
|
||||
{ name = "rapidocr" },
|
||||
{ name = "rich" },
|
||||
{ name = "setuptools" },
|
||||
{ name = "tokenizers" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
@@ -1188,7 +1187,6 @@ requires-dist = [
|
||||
{ name = "rapidocr", specifier = ">=3.1.0" },
|
||||
{ name = "rich", specifier = ">=13.4.2" },
|
||||
{ name = "rknn-toolkit-lite2", marker = "extra == 'rknn'", specifier = ">=2.3.0,<3" },
|
||||
{ name = "setuptools", specifier = ">=78.1.0" },
|
||||
{ name = "tokenizers", specifier = ">=0.15.0,<1.0" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.22.0,<1.0" },
|
||||
]
|
||||
|
||||
@@ -43,8 +43,8 @@ class BackgroundEngineLock(context: Context) : BackgroundWorkerLockApi, ImmichPl
|
||||
|
||||
override fun onAttachedToEngine(binding: FlutterPlugin.FlutterPluginBinding) {
|
||||
super.onAttachedToEngine(binding)
|
||||
checkAndEnforceBackgroundLock(binding.applicationContext)
|
||||
engineCount.incrementAndGet()
|
||||
checkAndEnforceBackgroundLock(binding.applicationContext)
|
||||
Log.i(TAG, "Flutter engine attached. Attached Engines count: $engineCount")
|
||||
}
|
||||
|
||||
|
||||
@@ -295,12 +295,12 @@ class BackgroundWorkerFlutterApi(private val binaryMessenger: BinaryMessenger, p
|
||||
}
|
||||
}
|
||||
}
|
||||
fun onAndroidUpload(callback: (Result<Unit>) -> Unit)
|
||||
fun onAndroidUpload(maxMinutesArg: Long?, callback: (Result<Unit>) -> Unit)
|
||||
{
|
||||
val separatedMessageChannelSuffix = if (messageChannelSuffix.isNotEmpty()) ".$messageChannelSuffix" else ""
|
||||
val channelName = "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFlutterApi.onAndroidUpload$separatedMessageChannelSuffix"
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, channelName, codec)
|
||||
channel.send(null) {
|
||||
channel.send(listOf(maxMinutesArg)) {
|
||||
if (it is List<*>) {
|
||||
if (it.size > 1) {
|
||||
callback(Result.failure(FlutterError(it[0] as String, it[1] as String, it[2] as String?)))
|
||||
|
||||
@@ -107,7 +107,7 @@ class BackgroundWorker(context: Context, params: WorkerParameters) :
|
||||
* This method acts as a bridge between the native Android background task system and Flutter.
|
||||
*/
|
||||
override fun onInitialized() {
|
||||
flutterApi?.onAndroidUpload { handleHostResult(it) }
|
||||
flutterApi?.onAndroidUpload(maxMinutesArg = 20) { handleHostResult(it) }
|
||||
}
|
||||
|
||||
// TODO: Move this to a separate NotificationManager class
|
||||
|
||||
@@ -5,8 +5,10 @@ import android.provider.MediaStore
|
||||
import android.util.Log
|
||||
import androidx.work.BackoffPolicy
|
||||
import androidx.work.Constraints
|
||||
import androidx.work.ExistingPeriodicWorkPolicy
|
||||
import androidx.work.ExistingWorkPolicy
|
||||
import androidx.work.OneTimeWorkRequest
|
||||
import androidx.work.OneTimeWorkRequestBuilder
|
||||
import androidx.work.PeriodicWorkRequestBuilder
|
||||
import androidx.work.WorkManager
|
||||
import io.flutter.embedding.engine.FlutterEngineCache
|
||||
import java.util.concurrent.TimeUnit
|
||||
@@ -18,6 +20,7 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
|
||||
|
||||
override fun enable() {
|
||||
enqueueMediaObserver(ctx)
|
||||
enqueuePeriodicWorker(ctx)
|
||||
}
|
||||
|
||||
override fun saveNotificationMessage(title: String, body: String) {
|
||||
@@ -27,12 +30,14 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
|
||||
override fun configure(settings: BackgroundWorkerSettings) {
|
||||
BackgroundWorkerPreferences(ctx).updateSettings(settings)
|
||||
enqueueMediaObserver(ctx)
|
||||
enqueuePeriodicWorker(ctx)
|
||||
}
|
||||
|
||||
override fun disable() {
|
||||
WorkManager.getInstance(ctx).apply {
|
||||
cancelUniqueWork(OBSERVER_WORKER_NAME)
|
||||
cancelUniqueWork(BACKGROUND_WORKER_NAME)
|
||||
cancelUniqueWork(PERIODIC_WORKER_NAME)
|
||||
}
|
||||
Log.i(TAG, "Cancelled background upload tasks")
|
||||
}
|
||||
@@ -40,6 +45,7 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
|
||||
companion object {
|
||||
private const val BACKGROUND_WORKER_NAME = "immich/BackgroundWorkerV1"
|
||||
private const val OBSERVER_WORKER_NAME = "immich/MediaObserverV1"
|
||||
private const val PERIODIC_WORKER_NAME = "immich/PeriodicBackgroundWorkerV1"
|
||||
const val ENGINE_CACHE_KEY = "immich::background_worker::engine"
|
||||
|
||||
|
||||
@@ -55,7 +61,7 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
|
||||
setRequiresCharging(settings.requiresCharging)
|
||||
}.build()
|
||||
|
||||
val work = OneTimeWorkRequest.Builder(MediaObserver::class.java)
|
||||
val work = OneTimeWorkRequestBuilder<MediaObserver>()
|
||||
.setConstraints(constraints)
|
||||
.build()
|
||||
WorkManager.getInstance(ctx)
|
||||
@@ -67,10 +73,30 @@ class BackgroundWorkerApiImpl(context: Context) : BackgroundWorkerFgHostApi {
|
||||
)
|
||||
}
|
||||
|
||||
fun enqueuePeriodicWorker(ctx: Context) {
|
||||
val settings = BackgroundWorkerPreferences(ctx).getSettings()
|
||||
val constraints = Constraints.Builder().apply {
|
||||
setRequiresCharging(settings.requiresCharging)
|
||||
}.build()
|
||||
|
||||
val work =
|
||||
PeriodicWorkRequestBuilder<PeriodicWorker>(
|
||||
1,
|
||||
TimeUnit.HOURS,
|
||||
15,
|
||||
TimeUnit.MINUTES
|
||||
).setConstraints(constraints)
|
||||
.build()
|
||||
|
||||
WorkManager.getInstance(ctx)
|
||||
.enqueueUniquePeriodicWork(PERIODIC_WORKER_NAME, ExistingPeriodicWorkPolicy.UPDATE, work)
|
||||
|
||||
Log.i(TAG, "Enqueued periodic background worker with name: $PERIODIC_WORKER_NAME")
|
||||
}
|
||||
|
||||
fun enqueueBackgroundWorker(ctx: Context) {
|
||||
val constraints = Constraints.Builder().setRequiresBatteryNotLow(true).build()
|
||||
|
||||
val work = OneTimeWorkRequest.Builder(BackgroundWorker::class.java)
|
||||
val work = OneTimeWorkRequestBuilder<BackgroundWorker>()
|
||||
.setConstraints(constraints)
|
||||
.setBackoffCriteria(BackoffPolicy.EXPONENTIAL, 1, TimeUnit.MINUTES)
|
||||
.build()
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
package app.alextran.immich.background
|
||||
|
||||
import android.content.Context
|
||||
import android.util.Log
|
||||
import androidx.work.Worker
|
||||
import androidx.work.WorkerParameters
|
||||
|
||||
class PeriodicWorker(context: Context, params: WorkerParameters) : Worker(context, params) {
|
||||
private val ctx: Context = context.applicationContext
|
||||
|
||||
override fun doWork(): Result {
|
||||
Log.i("PeriodicWorker", "Periodic worker triggered, starting background worker")
|
||||
BackgroundWorkerApiImpl.enqueueBackgroundWorker(ctx)
|
||||
return Result.success()
|
||||
}
|
||||
}
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 3024,
|
||||
"android.injected.version.name" => "2.2.1",
|
||||
"android.injected.version.code" => 3025,
|
||||
"android.injected.version.name" => "2.2.2",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -133,11 +133,15 @@
|
||||
/* Begin PBXFileSystemSynchronizedRootGroup section */
|
||||
B231F52D2E93A44A00BC45D1 /* Core */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Core;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Sync;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -526,14 +530,10 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Copy Pods Resources";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
|
||||
@@ -562,14 +562,10 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Embed Pods Frameworks";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
|
||||
@@ -718,7 +714,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -862,7 +858,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -892,7 +888,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/Runner.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
@@ -926,7 +922,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -969,7 +965,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -1009,7 +1005,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = WidgetExtension/WidgetExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
@@ -1048,7 +1044,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -1092,7 +1088,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
@@ -1133,7 +1129,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = ShareExtension/ShareExtension.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 231;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
|
||||
@@ -295,7 +295,7 @@ class BackgroundWorkerBgHostApiSetup {
|
||||
/// Generated protocol from Pigeon that represents Flutter messages that can be called from Swift.
|
||||
protocol BackgroundWorkerFlutterApiProtocol {
|
||||
func onIosUpload(isRefresh isRefreshArg: Bool, maxSeconds maxSecondsArg: Int64?, completion: @escaping (Result<Void, PigeonError>) -> Void)
|
||||
func onAndroidUpload(completion: @escaping (Result<Void, PigeonError>) -> Void)
|
||||
func onAndroidUpload(maxMinutes maxMinutesArg: Int64?, completion: @escaping (Result<Void, PigeonError>) -> Void)
|
||||
func cancel(completion: @escaping (Result<Void, PigeonError>) -> Void)
|
||||
}
|
||||
class BackgroundWorkerFlutterApi: BackgroundWorkerFlutterApiProtocol {
|
||||
@@ -326,10 +326,10 @@ class BackgroundWorkerFlutterApi: BackgroundWorkerFlutterApiProtocol {
|
||||
}
|
||||
}
|
||||
}
|
||||
func onAndroidUpload(completion: @escaping (Result<Void, PigeonError>) -> Void) {
|
||||
func onAndroidUpload(maxMinutes maxMinutesArg: Int64?, completion: @escaping (Result<Void, PigeonError>) -> Void) {
|
||||
let channelName: String = "dev.flutter.pigeon.immich_mobile.BackgroundWorkerFlutterApi.onAndroidUpload\(messageChannelSuffix)"
|
||||
let channel = FlutterBasicMessageChannel(name: channelName, binaryMessenger: binaryMessenger, codec: codec)
|
||||
channel.sendMessage(nil) { response in
|
||||
channel.sendMessage([maxMinutesArg] as [Any?]) { response in
|
||||
guard let listResponse = response as? [Any?] else {
|
||||
completion(.failure(createConnectionError(withChannelName: channelName)))
|
||||
return
|
||||
|
||||
@@ -80,7 +80,7 @@
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>2.1.0</string>
|
||||
<string>2.2.1</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleURLTypes</key>
|
||||
@@ -107,7 +107,7 @@
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>231</string>
|
||||
<string>233</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
|
||||
@@ -169,7 +169,7 @@ platform :ios do
|
||||
targets: ["Runner", "ShareExtension", "WidgetExtension"]
|
||||
)
|
||||
increment_version_number(
|
||||
version_number: "2.2.1"
|
||||
version_number: "2.2.2"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -15,18 +15,18 @@ For _fastlane_ installation instructions, see [Installing _fastlane_](https://do
|
||||
|
||||
## iOS
|
||||
|
||||
### ios release_dev
|
||||
### ios gha_testflight_dev
|
||||
|
||||
```sh
|
||||
[bundle exec] fastlane ios release_dev
|
||||
[bundle exec] fastlane ios gha_testflight_dev
|
||||
```
|
||||
|
||||
iOS Development Build to TestFlight (requires separate bundle ID)
|
||||
|
||||
### ios release_ci
|
||||
### ios gha_release_prod
|
||||
|
||||
```sh
|
||||
[bundle exec] fastlane ios release_ci
|
||||
[bundle exec] fastlane ios gha_release_prod
|
||||
```
|
||||
|
||||
iOS Release to TestFlight
|
||||
|
||||
@@ -122,46 +122,54 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> onAndroidUpload() async {
|
||||
_logger.info('Android background processing started');
|
||||
final sw = Stopwatch()..start();
|
||||
try {
|
||||
if (!await _syncAssets(hashTimeout: Duration(minutes: _isBackupEnabled ? 3 : 6))) {
|
||||
_logger.warning("Remote sync did not complete successfully, skipping backup");
|
||||
return;
|
||||
}
|
||||
await _handleBackup();
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Failed to complete Android background processing", error, stack);
|
||||
} finally {
|
||||
sw.stop();
|
||||
_logger.info("Android background processing completed in ${sw.elapsed.inSeconds}s");
|
||||
await _cleanup();
|
||||
}
|
||||
Future<void> onAndroidUpload(int? maxMinutes) async {
|
||||
final hashTimeout = Duration(minutes: _isBackupEnabled ? 3 : 6);
|
||||
final backupTimeout = maxMinutes != null ? Duration(minutes: maxMinutes - 1) : null;
|
||||
return _backgroundLoop(
|
||||
hashTimeout: hashTimeout,
|
||||
backupTimeout: backupTimeout,
|
||||
debugLabel: 'Android background upload',
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> onIosUpload(bool isRefresh, int? maxSeconds) async {
|
||||
_logger.info('iOS background upload started with maxSeconds: ${maxSeconds}s');
|
||||
final hashTimeout = isRefresh ? const Duration(seconds: 5) : Duration(minutes: _isBackupEnabled ? 3 : 6);
|
||||
final backupTimeout = maxSeconds != null ? Duration(seconds: maxSeconds - 1) : null;
|
||||
return _backgroundLoop(hashTimeout: hashTimeout, backupTimeout: backupTimeout, debugLabel: 'iOS background upload');
|
||||
}
|
||||
|
||||
Future<void> _backgroundLoop({
|
||||
required Duration hashTimeout,
|
||||
required Duration? backupTimeout,
|
||||
required String debugLabel,
|
||||
}) async {
|
||||
_logger.info(
|
||||
'$debugLabel started hashTimeout: ${hashTimeout.inSeconds}s, backupTimeout: ${backupTimeout?.inMinutes ?? '~'}m',
|
||||
);
|
||||
final sw = Stopwatch()..start();
|
||||
try {
|
||||
final timeout = isRefresh ? const Duration(seconds: 5) : Duration(minutes: _isBackupEnabled ? 3 : 6);
|
||||
if (!await _syncAssets(hashTimeout: timeout)) {
|
||||
if (!await _syncAssets(hashTimeout: hashTimeout)) {
|
||||
_logger.warning("Remote sync did not complete successfully, skipping backup");
|
||||
return;
|
||||
}
|
||||
|
||||
final backupFuture = _handleBackup();
|
||||
if (maxSeconds != null) {
|
||||
await backupFuture.timeout(Duration(seconds: maxSeconds - 1), onTimeout: () {});
|
||||
if (backupTimeout != null) {
|
||||
await backupFuture.timeout(
|
||||
backupTimeout,
|
||||
onTimeout: () {
|
||||
_cancellationToken.cancel();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
await backupFuture;
|
||||
}
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Failed to complete iOS background upload", error, stack);
|
||||
_logger.severe("Failed to complete $debugLabel", error, stack);
|
||||
} finally {
|
||||
sw.stop();
|
||||
_logger.info("iOS background upload completed in ${sw.elapsed.inSeconds}s");
|
||||
_logger.info("$debugLabel completed in ${sw.elapsed.inSeconds}s");
|
||||
await _cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,7 +132,8 @@ class SyncStreamService {
|
||||
return;
|
||||
// SyncCompleteV1 is used to signal the completion of the sync process. Cleanup stale assets and signal completion
|
||||
case SyncEntityType.syncCompleteV1:
|
||||
return _syncStreamRepository.pruneAssets();
|
||||
return;
|
||||
// return _syncStreamRepository.pruneAssets();
|
||||
// Request to reset the client state. Clear everything related to remote entities
|
||||
case SyncEntityType.syncResetV1:
|
||||
return _syncStreamRepository.reset();
|
||||
|
||||
@@ -14,7 +14,6 @@ import 'package:immich_mobile/providers/infrastructure/album.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/memory.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/people.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/readonly_mode.provider.dart';
|
||||
import 'package:immich_mobile/providers/routes.provider.dart';
|
||||
import 'package:immich_mobile/providers/search/search_input_focus.provider.dart';
|
||||
import 'package:immich_mobile/providers/tab.provider.dart';
|
||||
import 'package:immich_mobile/providers/timeline/multiselect.provider.dart';
|
||||
@@ -108,8 +107,6 @@ class _TabShellPageState extends ConsumerState<TabShellPage> {
|
||||
}
|
||||
|
||||
void _onNavigationSelected(TabsRouter router, int index, WidgetRef ref) {
|
||||
ref.read(currentTabIndexProvider.notifier).state = index;
|
||||
|
||||
// On Photos page menu tapped
|
||||
if (router.activeIndex == kPhotoTabIndex && index == kPhotoTabIndex) {
|
||||
EventStream.shared.emit(const ScrollToTopEvent());
|
||||
|
||||
10
mobile/lib/platform/background_worker_api.g.dart
generated
10
mobile/lib/platform/background_worker_api.g.dart
generated
@@ -273,7 +273,7 @@ abstract class BackgroundWorkerFlutterApi {
|
||||
|
||||
Future<void> onIosUpload(bool isRefresh, int? maxSeconds);
|
||||
|
||||
Future<void> onAndroidUpload();
|
||||
Future<void> onAndroidUpload(int? maxMinutes);
|
||||
|
||||
Future<void> cancel();
|
||||
|
||||
@@ -327,8 +327,14 @@ abstract class BackgroundWorkerFlutterApi {
|
||||
pigeonVar_channel.setMessageHandler(null);
|
||||
} else {
|
||||
pigeonVar_channel.setMessageHandler((Object? message) async {
|
||||
assert(
|
||||
message != null,
|
||||
'Argument for dev.flutter.pigeon.immich_mobile.BackgroundWorkerFlutterApi.onAndroidUpload was null.',
|
||||
);
|
||||
final List<Object?> args = (message as List<Object?>?)!;
|
||||
final int? arg_maxMinutes = (args[0] as int?);
|
||||
try {
|
||||
await api.onAndroidUpload();
|
||||
await api.onAndroidUpload(arg_maxMinutes);
|
||||
return wrapResponse(empty: true);
|
||||
} on PlatformException catch (e) {
|
||||
return wrapResponse(error: e);
|
||||
|
||||
@@ -73,27 +73,29 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
);
|
||||
}
|
||||
|
||||
search() async {
|
||||
if (filter.value.isEmpty) {
|
||||
searchFilter(SearchFilter filter) async {
|
||||
if (filter.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (preFilter == null && filter.value == previousFilter.value) {
|
||||
if (preFilter == null && filter == previousFilter.value) {
|
||||
return;
|
||||
}
|
||||
|
||||
isSearching.value = true;
|
||||
ref.watch(paginatedSearchProvider.notifier).clear();
|
||||
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter.value);
|
||||
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter);
|
||||
|
||||
if (!hasResult) {
|
||||
context.showSnackBar(searchInfoSnackBar('search_no_result'.t(context: context)));
|
||||
}
|
||||
|
||||
previousFilter.value = filter.value;
|
||||
previousFilter.value = filter;
|
||||
isSearching.value = false;
|
||||
}
|
||||
|
||||
search() => searchFilter(filter.value);
|
||||
|
||||
loadMoreSearchResult() async {
|
||||
isSearching.value = true;
|
||||
final hasResult = await ref.watch(paginatedSearchProvider.notifier).search(filter.value);
|
||||
@@ -108,7 +110,7 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
searchPreFilter() {
|
||||
if (preFilter != null) {
|
||||
Future.delayed(Duration.zero, () {
|
||||
search();
|
||||
searchFilter(preFilter);
|
||||
|
||||
if (preFilter.location.city != null) {
|
||||
locationCurrentFilterWidget.value = Text(preFilter.location.city!, style: context.textTheme.labelLarge);
|
||||
@@ -122,7 +124,7 @@ class DriftSearchPage extends HookConsumerWidget {
|
||||
searchPreFilter();
|
||||
|
||||
return null;
|
||||
}, []);
|
||||
}, [preFilter]);
|
||||
|
||||
showPeoplePicker() {
|
||||
handleOnSelect(Set<PersonDto> value) {
|
||||
|
||||
@@ -3,14 +3,12 @@ import 'dart:async';
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/extensions/translate_extensions.dart';
|
||||
import 'package:immich_mobile/models/search/search_filter.model.dart';
|
||||
import 'package:immich_mobile/presentation/pages/search/paginated_search.provider.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/action_buttons/base_action_button.widget.dart';
|
||||
import 'package:immich_mobile/presentation/widgets/asset_viewer/asset_viewer.state.dart';
|
||||
import 'package:immich_mobile/providers/routes.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
|
||||
class SimilarPhotosActionButton extends ConsumerWidget {
|
||||
@@ -38,20 +36,7 @@ class SimilarPhotosActionButton extends ConsumerWidget {
|
||||
),
|
||||
);
|
||||
|
||||
/// Using and currentTabIndex to make sure we are using the correct
|
||||
/// navigation behavior. We want to be able to navigate back to the
|
||||
/// main timline using View In Timeline button without the need of
|
||||
/// waiting for the timeline to be rebuild. At the same time, we want
|
||||
/// to refresh the search page when tapping the Similar Photos button
|
||||
/// while already in the Search tab.
|
||||
final currentTabIndex = (ref.read(currentTabIndexProvider.notifier).state);
|
||||
|
||||
if (currentTabIndex != kSearchTabIndex) {
|
||||
unawaited(context.router.navigate(const DriftSearchRoute()));
|
||||
ref.read(currentTabIndexProvider.notifier).state = kSearchTabIndex;
|
||||
} else {
|
||||
unawaited(context.router.popAndPush(const DriftSearchRoute()));
|
||||
}
|
||||
unawaited(context.navigateTo(const DriftSearchRoute()));
|
||||
}
|
||||
|
||||
@override
|
||||
|
||||
@@ -5,4 +5,3 @@ final inLockedViewProvider = StateProvider<bool>((ref) => false);
|
||||
final currentRouteNameProvider = StateProvider<String?>((ref) => null);
|
||||
final previousRouteNameProvider = StateProvider<String?>((ref) => null);
|
||||
final previousRouteDataProvider = StateProvider<RouteSettings?>((ref) => null);
|
||||
final currentTabIndexProvider = StateProvider<int>((ref) => 0);
|
||||
|
||||
2
mobile/openapi/README.md
generated
2
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 2.2.1
|
||||
- API version: 2.2.2
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ abstract class BackgroundWorkerFlutterApi {
|
||||
|
||||
// Android Only: Called when the Android background upload is triggered
|
||||
@async
|
||||
void onAndroidUpload();
|
||||
void onAndroidUpload(int? maxMinutes);
|
||||
|
||||
@async
|
||||
void cancel();
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 2.2.1+3024
|
||||
version: 2.2.2+3025
|
||||
|
||||
environment:
|
||||
sdk: '>=3.8.0 <4.0.0'
|
||||
|
||||
@@ -10006,7 +10006,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.2",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.2",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 2.2.1
|
||||
* 2.2.2
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.2",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
|
||||
@@ -236,8 +236,8 @@ export class MetadataService extends BaseService {
|
||||
latitude: number | null = null,
|
||||
longitude: number | null = null;
|
||||
if (this.hasGeo(exifTags)) {
|
||||
latitude = exifTags.GPSLatitude;
|
||||
longitude = exifTags.GPSLongitude;
|
||||
latitude = Number(exifTags.GPSLatitude);
|
||||
longitude = Number(exifTags.GPSLongitude);
|
||||
if (reverseGeocoding.enabled) {
|
||||
geo = await this.mapRepository.reverseGeocode({ latitude, longitude });
|
||||
}
|
||||
@@ -894,12 +894,10 @@ export class MetadataService extends BaseService {
|
||||
};
|
||||
}
|
||||
|
||||
private hasGeo(tags: ImmichTags): tags is ImmichTags & { GPSLatitude: number; GPSLongitude: number } {
|
||||
return (
|
||||
tags.GPSLatitude !== undefined &&
|
||||
tags.GPSLongitude !== undefined &&
|
||||
(tags.GPSLatitude !== 0 || tags.GPSLatitude !== 0)
|
||||
);
|
||||
private hasGeo(tags: ImmichTags) {
|
||||
const lat = Number(tags.GPSLatitude);
|
||||
const lng = Number(tags.GPSLongitude);
|
||||
return !Number.isNaN(lat) && !Number.isNaN(lng) && (lat !== 0 || lng !== 0);
|
||||
}
|
||||
|
||||
private getAutoStackId(tags: ImmichTags | null): string | null {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-web",
|
||||
"version": "2.2.1",
|
||||
"version": "2.2.2",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
||||
@@ -30,10 +30,10 @@
|
||||
let showSuggestions = $state(false);
|
||||
let isSearchSuggestions = $state(false);
|
||||
let selectedId: string | undefined = $state();
|
||||
let isFocus = $state(false);
|
||||
let close: (() => Promise<void>) | undefined;
|
||||
|
||||
const listboxId = generateId();
|
||||
const searchTypeId = generateId();
|
||||
|
||||
onDestroy(() => {
|
||||
searchStore.isSearchEnabled = false;
|
||||
@@ -161,12 +161,10 @@
|
||||
|
||||
const openDropdown = () => {
|
||||
showSuggestions = true;
|
||||
isFocus = true;
|
||||
};
|
||||
|
||||
const closeDropdown = () => {
|
||||
showSuggestions = false;
|
||||
isFocus = false;
|
||||
searchHistoryBox?.clearSelection();
|
||||
};
|
||||
|
||||
@@ -251,6 +249,7 @@
|
||||
aria-activedescendant={selectedId ?? ''}
|
||||
aria-expanded={showSuggestions && isSearchSuggestions}
|
||||
aria-autocomplete="list"
|
||||
aria-describedby={searchTypeId}
|
||||
use:shortcuts={[
|
||||
{ shortcut: { key: 'Escape' }, onShortcut: onEscape },
|
||||
{ shortcut: { ctrl: true, shift: true, key: 'k' }, onShortcut: onFilterClick },
|
||||
@@ -287,12 +286,12 @@
|
||||
/>
|
||||
</div>
|
||||
|
||||
{#if isFocus}
|
||||
{#if searchStore.isSearchEnabled}
|
||||
<div
|
||||
class="absolute inset-y-0 flex items-center"
|
||||
id={searchTypeId}
|
||||
class="absolute inset-y-0 flex items-center end-16"
|
||||
class:max-md:hidden={value}
|
||||
class:end-16={isFocus}
|
||||
class:end-28={isFocus && value.length > 0}
|
||||
class:end-28={value.length > 0}
|
||||
>
|
||||
<p
|
||||
class="bg-immich-primary text-white dark:bg-immich-dark-primary/90 dark:text-black/75 rounded-full px-3 py-1 text-xs"
|
||||
|
||||
@@ -234,6 +234,7 @@ export class TimelineManager extends VirtualScrollManager {
|
||||
await this.initTask.reset();
|
||||
await this.#init(options);
|
||||
this.updateViewportGeometry(false);
|
||||
this.#createScrubberMonths();
|
||||
}
|
||||
|
||||
async #init(options: TimelineManagerOptions) {
|
||||
|
||||
Reference in New Issue
Block a user