mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 17:23:10 +03:00
Compare commits
2 Commits
1c64d21148
...
feat/mobil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8fe932d891 | ||
|
|
c70146c4a8 |
@@ -105,6 +105,7 @@ dependencies {
|
||||
def serialization_version = '1.8.1'
|
||||
def compose_version = '1.1.1'
|
||||
def gson_version = '2.10.1'
|
||||
def room_version = "2.8.3"
|
||||
|
||||
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
|
||||
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:$kotlin_coroutines_version"
|
||||
@@ -113,6 +114,8 @@ dependencies {
|
||||
implementation "com.google.guava:guava:$guava_version"
|
||||
implementation "com.github.bumptech.glide:glide:$glide_version"
|
||||
implementation "org.jetbrains.kotlinx:kotlinx-serialization-json:$serialization_version"
|
||||
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-guava:1.10.2"
|
||||
implementation "com.squareup.okhttp3:okhttp:5.3.1"
|
||||
|
||||
ksp "com.github.bumptech.glide:ksp:$glide_version"
|
||||
coreLibraryDesugaring 'com.android.tools:desugar_jdk_libs:2.1.2'
|
||||
@@ -127,6 +130,10 @@ dependencies {
|
||||
implementation "androidx.compose.ui:ui-tooling:$compose_version"
|
||||
implementation "androidx.compose.material3:material3:1.2.1"
|
||||
implementation "androidx.lifecycle:lifecycle-runtime-ktx:2.6.2"
|
||||
|
||||
// Room Database
|
||||
implementation "androidx.room:room-runtime:$room_version"
|
||||
ksp "androidx.room:room-compiler:$room_version"
|
||||
}
|
||||
|
||||
// This is uncommented in F-Droid build script
|
||||
|
||||
@@ -7,11 +7,13 @@ import androidx.work.Configuration
|
||||
import androidx.work.WorkManager
|
||||
import app.alextran.immich.background.BackgroundEngineLock
|
||||
import app.alextran.immich.background.BackgroundWorkerApiImpl
|
||||
import app.alextran.immich.upload.NetworkMonitor
|
||||
|
||||
class ImmichApp : Application() {
|
||||
override fun onCreate() {
|
||||
super.onCreate()
|
||||
val config = Configuration.Builder().build()
|
||||
NetworkMonitor.initialize(this)
|
||||
WorkManager.initialize(this, config)
|
||||
// always start BackupWorker after WorkManager init; this fixes the following bug:
|
||||
// After the process is killed (by user or system), the first trigger (taking a new picture) is lost.
|
||||
|
||||
@@ -15,6 +15,8 @@ import app.alextran.immich.images.ThumbnailsImpl
|
||||
import app.alextran.immich.sync.NativeSyncApi
|
||||
import app.alextran.immich.sync.NativeSyncApiImpl26
|
||||
import app.alextran.immich.sync.NativeSyncApiImpl30
|
||||
import app.alextran.immich.upload.UploadApi
|
||||
import app.alextran.immich.upload.UploadTaskImpl
|
||||
import io.flutter.embedding.android.FlutterFragmentActivity
|
||||
import io.flutter.embedding.engine.FlutterEngine
|
||||
|
||||
@@ -39,6 +41,7 @@ class MainActivity : FlutterFragmentActivity() {
|
||||
ThumbnailApi.setUp(messenger, ThumbnailsImpl(ctx))
|
||||
BackgroundWorkerFgHostApi.setUp(messenger, BackgroundWorkerApiImpl(ctx))
|
||||
ConnectivityApi.setUp(messenger, ConnectivityApiImpl(ctx))
|
||||
UploadApi.setUp(messenger, UploadTaskImpl(ctx))
|
||||
|
||||
flutterEngine.plugins.add(BackgroundServicePlugin())
|
||||
flutterEngine.plugins.add(HttpSSLOptionsPlugin())
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import androidx.room.TypeConverter
|
||||
import com.google.gson.Gson
|
||||
import com.google.gson.reflect.TypeToken
|
||||
import java.net.URL
|
||||
import java.util.Date
|
||||
|
||||
class Converters {
|
||||
private val gson = Gson()
|
||||
|
||||
@TypeConverter
|
||||
fun fromTimestamp(value: Long?): Date? = value?.let { Date(it * 1000) }
|
||||
|
||||
@TypeConverter
|
||||
fun dateToTimestamp(date: Date?): Long? = date?.let { it.time / 1000 }
|
||||
|
||||
@TypeConverter
|
||||
fun fromUrl(value: String?): URL? = value?.let { URL(it) }
|
||||
|
||||
@TypeConverter
|
||||
fun urlToString(url: URL?): String? = url?.toString()
|
||||
|
||||
@TypeConverter
|
||||
fun fromStoreKey(value: Int?): StoreKey? = value?.let { StoreKey.fromInt(it) }
|
||||
|
||||
@TypeConverter
|
||||
fun storeKeyToInt(storeKey: StoreKey?): Int? = storeKey?.rawValue
|
||||
|
||||
@TypeConverter
|
||||
fun fromTaskStatus(value: Int?): TaskStatus? = value?.let { TaskStatus.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun taskStatusToInt(status: TaskStatus?): Int? = status?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromBackupSelection(value: Int?): BackupSelection? = value?.let { BackupSelection.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun backupSelectionToInt(selection: BackupSelection?): Int? = selection?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromAvatarColor(value: Int?): AvatarColor? = value?.let { AvatarColor.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun avatarColorToInt(color: AvatarColor?): Int? = color?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromAlbumUserRole(value: Int?): AlbumUserRole? = value?.let { AlbumUserRole.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun albumUserRoleToInt(role: AlbumUserRole?): Int? = role?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromMemoryType(value: Int?): MemoryType? = value?.let { MemoryType.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun memoryTypeToInt(type: MemoryType?): Int? = type?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromAssetVisibility(value: Int?): AssetVisibility? = value?.let { AssetVisibility.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun assetVisibilityToInt(visibility: AssetVisibility?): Int? = visibility?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromSourceType(value: String?): SourceType? = value?.let { SourceType.fromString(it) }
|
||||
|
||||
@TypeConverter
|
||||
fun sourceTypeToString(type: SourceType?): String? = type?.value
|
||||
|
||||
@TypeConverter
|
||||
fun fromUploadMethod(value: Int?): UploadMethod? = value?.let { UploadMethod.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun uploadMethodToInt(method: UploadMethod?): Int? = method?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromUploadErrorCode(value: Int?): UploadErrorCode? = value?.let { UploadErrorCode.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun uploadErrorCodeToInt(code: UploadErrorCode?): Int? = code?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromAssetType(value: Int?): AssetType? = value?.let { AssetType.entries[it] }
|
||||
|
||||
@TypeConverter
|
||||
fun assetTypeToInt(type: AssetType?): Int? = type?.ordinal
|
||||
|
||||
@TypeConverter
|
||||
fun fromStringMap(value: String?): Map<String, String>? {
|
||||
val type = object : TypeToken<Map<String, String>>() {}.type
|
||||
return gson.fromJson(value, type)
|
||||
}
|
||||
|
||||
@TypeConverter
|
||||
fun stringMapToString(map: Map<String, String>?): String? = gson.toJson(map)
|
||||
|
||||
@TypeConverter
|
||||
fun fromEndpointStatus(value: String?): EndpointStatus? = value?.let { EndpointStatus.fromString(it) }
|
||||
|
||||
@TypeConverter
|
||||
fun endpointStatusToString(status: EndpointStatus?): String? = status?.value
|
||||
|
||||
@TypeConverter
|
||||
fun fromEndpointList(value: String?): List<Endpoint>? {
|
||||
val type = object : TypeToken<List<Endpoint>>() {}.type
|
||||
return gson.fromJson(value, type)
|
||||
}
|
||||
|
||||
@TypeConverter
|
||||
fun endpointListToString(list: List<Endpoint>?): String? = gson.toJson(list)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import android.content.Context
|
||||
import androidx.room.Database
|
||||
import androidx.room.Room
|
||||
import androidx.room.RoomDatabase
|
||||
import androidx.room.TypeConverters
|
||||
|
||||
|
||||
@Database(
|
||||
entities = [
|
||||
AssetFace::class,
|
||||
AuthUser::class,
|
||||
LocalAlbum::class,
|
||||
LocalAlbumAsset::class,
|
||||
LocalAsset::class,
|
||||
MemoryAsset::class,
|
||||
Memory::class,
|
||||
Partner::class,
|
||||
Person::class,
|
||||
RemoteAlbum::class,
|
||||
RemoteAlbumAsset::class,
|
||||
RemoteAlbumUser::class,
|
||||
RemoteAsset::class,
|
||||
RemoteExif::class,
|
||||
Stack::class,
|
||||
Store::class,
|
||||
UploadTask::class,
|
||||
UploadTaskStat::class,
|
||||
User::class,
|
||||
UserMetadata::class
|
||||
],
|
||||
version = 1,
|
||||
exportSchema = false
|
||||
)
|
||||
@TypeConverters(Converters::class)
|
||||
abstract class AppDatabase : RoomDatabase() {
|
||||
abstract fun localAssetDao(): LocalAssetDao
|
||||
abstract fun storeDao(): StoreDao
|
||||
abstract fun uploadTaskDao(): UploadTaskDao
|
||||
abstract fun uploadTaskStatDao(): UploadTaskStatDao
|
||||
|
||||
companion object {
|
||||
@Volatile
|
||||
private var INSTANCE: AppDatabase? = null
|
||||
|
||||
fun getDatabase(context: Context): AppDatabase {
|
||||
return INSTANCE ?: synchronized(this) {
|
||||
val instance = Room.databaseBuilder(
|
||||
context.applicationContext,
|
||||
AppDatabase::class.java,
|
||||
"app_database"
|
||||
).build()
|
||||
INSTANCE = instance
|
||||
instance
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,267 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import java.net.URL
|
||||
import java.util.Date
|
||||
|
||||
enum class StoreKey(val rawValue: Int) {
|
||||
VERSION(0),
|
||||
DEVICE_ID_HASH(3),
|
||||
BACKUP_TRIGGER_DELAY(8),
|
||||
TILES_PER_ROW(103),
|
||||
GROUP_ASSETS_BY(105),
|
||||
UPLOAD_ERROR_NOTIFICATION_GRACE_PERIOD(106),
|
||||
THUMBNAIL_CACHE_SIZE(110),
|
||||
IMAGE_CACHE_SIZE(111),
|
||||
ALBUM_THUMBNAIL_CACHE_SIZE(112),
|
||||
SELECTED_ALBUM_SORT_ORDER(113),
|
||||
LOG_LEVEL(115),
|
||||
MAP_RELATIVE_DATE(119),
|
||||
MAP_THEME_MODE(124),
|
||||
|
||||
ASSET_ETAG(1),
|
||||
CURRENT_USER(2),
|
||||
DEVICE_ID(4),
|
||||
ACCESS_TOKEN(11),
|
||||
SERVER_ENDPOINT(12),
|
||||
SSL_CLIENT_CERT_DATA(15),
|
||||
SSL_CLIENT_PASSWD(16),
|
||||
THEME_MODE(102),
|
||||
CUSTOM_HEADERS(127),
|
||||
PRIMARY_COLOR(128),
|
||||
PREFERRED_WIFI_NAME(133),
|
||||
|
||||
EXTERNAL_ENDPOINT_LIST(135),
|
||||
|
||||
LOCAL_ENDPOINT(134),
|
||||
SERVER_URL(10),
|
||||
|
||||
BACKUP_FAILED_SINCE(5),
|
||||
|
||||
BACKUP_REQUIRE_WIFI(6),
|
||||
BACKUP_REQUIRE_CHARGING(7),
|
||||
AUTO_BACKUP(13),
|
||||
BACKGROUND_BACKUP(14),
|
||||
LOAD_PREVIEW(100),
|
||||
LOAD_ORIGINAL(101),
|
||||
DYNAMIC_LAYOUT(104),
|
||||
BACKGROUND_BACKUP_TOTAL_PROGRESS(107),
|
||||
BACKGROUND_BACKUP_SINGLE_PROGRESS(108),
|
||||
STORAGE_INDICATOR(109),
|
||||
ADVANCED_TROUBLESHOOTING(114),
|
||||
PREFER_REMOTE_IMAGE(116),
|
||||
LOOP_VIDEO(117),
|
||||
MAP_SHOW_FAVORITE_ONLY(118),
|
||||
SELF_SIGNED_CERT(120),
|
||||
MAP_INCLUDE_ARCHIVED(121),
|
||||
IGNORE_ICLOUD_ASSETS(122),
|
||||
SELECTED_ALBUM_SORT_REVERSE(123),
|
||||
MAP_WITH_PARTNERS(125),
|
||||
ENABLE_HAPTIC_FEEDBACK(126),
|
||||
DYNAMIC_THEME(129),
|
||||
COLORFUL_INTERFACE(130),
|
||||
SYNC_ALBUMS(131),
|
||||
AUTO_ENDPOINT_SWITCHING(132),
|
||||
LOAD_ORIGINAL_VIDEO(136),
|
||||
MANAGE_LOCAL_MEDIA_ANDROID(137),
|
||||
READONLY_MODE_ENABLED(138),
|
||||
AUTO_PLAY_VIDEO(139),
|
||||
PHOTO_MANAGER_CUSTOM_FILTER(1000),
|
||||
BETA_PROMPT_SHOWN(1001),
|
||||
BETA_TIMELINE(1002),
|
||||
ENABLE_BACKUP(1003),
|
||||
USE_WIFI_FOR_UPLOAD_VIDEOS(1004),
|
||||
USE_WIFI_FOR_UPLOAD_PHOTOS(1005),
|
||||
NEED_BETA_MIGRATION(1006),
|
||||
SHOULD_RESET_SYNC(1007);
|
||||
|
||||
companion object {
|
||||
fun fromInt(value: Int): StoreKey? = entries.find { it.rawValue == value }
|
||||
|
||||
// Int keys
|
||||
val version = TypedStoreKey<Int>(VERSION)
|
||||
val deviceIdHash = TypedStoreKey<Int>(DEVICE_ID_HASH)
|
||||
val backupTriggerDelay = TypedStoreKey<Int>(BACKUP_TRIGGER_DELAY)
|
||||
val tilesPerRow = TypedStoreKey<Int>(TILES_PER_ROW)
|
||||
val groupAssetsBy = TypedStoreKey<Int>(GROUP_ASSETS_BY)
|
||||
val uploadErrorNotificationGracePeriod = TypedStoreKey<Int>(UPLOAD_ERROR_NOTIFICATION_GRACE_PERIOD)
|
||||
val thumbnailCacheSize = TypedStoreKey<Int>(THUMBNAIL_CACHE_SIZE)
|
||||
val imageCacheSize = TypedStoreKey<Int>(IMAGE_CACHE_SIZE)
|
||||
val albumThumbnailCacheSize = TypedStoreKey<Int>(ALBUM_THUMBNAIL_CACHE_SIZE)
|
||||
val selectedAlbumSortOrder = TypedStoreKey<Int>(SELECTED_ALBUM_SORT_ORDER)
|
||||
val logLevel = TypedStoreKey<Int>(LOG_LEVEL)
|
||||
val mapRelativeDate = TypedStoreKey<Int>(MAP_RELATIVE_DATE)
|
||||
val mapThemeMode = TypedStoreKey<Int>(MAP_THEME_MODE)
|
||||
|
||||
// String keys
|
||||
val assetETag = TypedStoreKey<String>(ASSET_ETAG)
|
||||
val currentUser = TypedStoreKey<String>(CURRENT_USER)
|
||||
val deviceId = TypedStoreKey<String>(DEVICE_ID)
|
||||
val accessToken = TypedStoreKey<String>(ACCESS_TOKEN)
|
||||
val sslClientCertData = TypedStoreKey<String>(SSL_CLIENT_CERT_DATA)
|
||||
val sslClientPasswd = TypedStoreKey<String>(SSL_CLIENT_PASSWD)
|
||||
val themeMode = TypedStoreKey<String>(THEME_MODE)
|
||||
val customHeaders = TypedStoreKey<Map<String, String>>(CUSTOM_HEADERS)
|
||||
val primaryColor = TypedStoreKey<String>(PRIMARY_COLOR)
|
||||
val preferredWifiName = TypedStoreKey<String>(PREFERRED_WIFI_NAME)
|
||||
|
||||
// Endpoint keys
|
||||
val externalEndpointList = TypedStoreKey<List<Endpoint>>(EXTERNAL_ENDPOINT_LIST)
|
||||
|
||||
// URL keys
|
||||
val localEndpoint = TypedStoreKey<URL>(LOCAL_ENDPOINT)
|
||||
val serverEndpoint = TypedStoreKey<URL>(SERVER_ENDPOINT)
|
||||
val serverUrl = TypedStoreKey<URL>(SERVER_URL)
|
||||
|
||||
// Date keys
|
||||
val backupFailedSince = TypedStoreKey<Date>(BACKUP_FAILED_SINCE)
|
||||
|
||||
// Bool keys
|
||||
val backupRequireWifi = TypedStoreKey<Boolean>(BACKUP_REQUIRE_WIFI)
|
||||
val backupRequireCharging = TypedStoreKey<Boolean>(BACKUP_REQUIRE_CHARGING)
|
||||
val autoBackup = TypedStoreKey<Boolean>(AUTO_BACKUP)
|
||||
val backgroundBackup = TypedStoreKey<Boolean>(BACKGROUND_BACKUP)
|
||||
val loadPreview = TypedStoreKey<Boolean>(LOAD_PREVIEW)
|
||||
val loadOriginal = TypedStoreKey<Boolean>(LOAD_ORIGINAL)
|
||||
val dynamicLayout = TypedStoreKey<Boolean>(DYNAMIC_LAYOUT)
|
||||
val backgroundBackupTotalProgress = TypedStoreKey<Boolean>(BACKGROUND_BACKUP_TOTAL_PROGRESS)
|
||||
val backgroundBackupSingleProgress = TypedStoreKey<Boolean>(BACKGROUND_BACKUP_SINGLE_PROGRESS)
|
||||
val storageIndicator = TypedStoreKey<Boolean>(STORAGE_INDICATOR)
|
||||
val advancedTroubleshooting = TypedStoreKey<Boolean>(ADVANCED_TROUBLESHOOTING)
|
||||
val preferRemoteImage = TypedStoreKey<Boolean>(PREFER_REMOTE_IMAGE)
|
||||
val loopVideo = TypedStoreKey<Boolean>(LOOP_VIDEO)
|
||||
val mapShowFavoriteOnly = TypedStoreKey<Boolean>(MAP_SHOW_FAVORITE_ONLY)
|
||||
val selfSignedCert = TypedStoreKey<Boolean>(SELF_SIGNED_CERT)
|
||||
val mapIncludeArchived = TypedStoreKey<Boolean>(MAP_INCLUDE_ARCHIVED)
|
||||
val ignoreIcloudAssets = TypedStoreKey<Boolean>(IGNORE_ICLOUD_ASSETS)
|
||||
val selectedAlbumSortReverse = TypedStoreKey<Boolean>(SELECTED_ALBUM_SORT_REVERSE)
|
||||
val mapwithPartners = TypedStoreKey<Boolean>(MAP_WITH_PARTNERS)
|
||||
val enableHapticFeedback = TypedStoreKey<Boolean>(ENABLE_HAPTIC_FEEDBACK)
|
||||
val dynamicTheme = TypedStoreKey<Boolean>(DYNAMIC_THEME)
|
||||
val colorfulInterface = TypedStoreKey<Boolean>(COLORFUL_INTERFACE)
|
||||
val syncAlbums = TypedStoreKey<Boolean>(SYNC_ALBUMS)
|
||||
val autoEndpointSwitching = TypedStoreKey<Boolean>(AUTO_ENDPOINT_SWITCHING)
|
||||
val loadOriginalVideo = TypedStoreKey<Boolean>(LOAD_ORIGINAL_VIDEO)
|
||||
val manageLocalMediaAndroid = TypedStoreKey<Boolean>(MANAGE_LOCAL_MEDIA_ANDROID)
|
||||
val readonlyModeEnabled = TypedStoreKey<Boolean>(READONLY_MODE_ENABLED)
|
||||
val autoPlayVideo = TypedStoreKey<Boolean>(AUTO_PLAY_VIDEO)
|
||||
val photoManagerCustomFilter = TypedStoreKey<Boolean>(PHOTO_MANAGER_CUSTOM_FILTER)
|
||||
val betaPromptShown = TypedStoreKey<Boolean>(BETA_PROMPT_SHOWN)
|
||||
val betaTimeline = TypedStoreKey<Boolean>(BETA_TIMELINE)
|
||||
val enableBackup = TypedStoreKey<Boolean>(ENABLE_BACKUP)
|
||||
val useWifiForUploadVideos = TypedStoreKey<Boolean>(USE_WIFI_FOR_UPLOAD_VIDEOS)
|
||||
val useWifiForUploadPhotos = TypedStoreKey<Boolean>(USE_WIFI_FOR_UPLOAD_PHOTOS)
|
||||
val needBetaMigration = TypedStoreKey<Boolean>(NEED_BETA_MIGRATION)
|
||||
val shouldResetSync = TypedStoreKey<Boolean>(SHOULD_RESET_SYNC)
|
||||
}
|
||||
}
|
||||
|
||||
enum class TaskStatus {
|
||||
DOWNLOAD_PENDING,
|
||||
DOWNLOAD_QUEUED,
|
||||
DOWNLOAD_FAILED,
|
||||
UPLOAD_PENDING,
|
||||
UPLOAD_QUEUED,
|
||||
UPLOAD_FAILED,
|
||||
UPLOAD_COMPLETE
|
||||
}
|
||||
|
||||
enum class BackupSelection {
|
||||
SELECTED,
|
||||
NONE,
|
||||
EXCLUDED
|
||||
}
|
||||
|
||||
enum class AvatarColor {
|
||||
PRIMARY,
|
||||
PINK,
|
||||
RED,
|
||||
YELLOW,
|
||||
BLUE,
|
||||
GREEN,
|
||||
PURPLE,
|
||||
ORANGE,
|
||||
GRAY,
|
||||
AMBER
|
||||
}
|
||||
|
||||
enum class AlbumUserRole {
|
||||
EDITOR,
|
||||
VIEWER
|
||||
}
|
||||
|
||||
enum class MemoryType {
|
||||
ON_THIS_DAY
|
||||
}
|
||||
|
||||
enum class AssetVisibility {
|
||||
TIMELINE,
|
||||
HIDDEN,
|
||||
ARCHIVE,
|
||||
LOCKED
|
||||
}
|
||||
|
||||
enum class SourceType(val value: String) {
|
||||
MACHINE_LEARNING("machine-learning"),
|
||||
EXIF("exif"),
|
||||
MANUAL("manual");
|
||||
|
||||
companion object {
|
||||
fun fromString(value: String): SourceType? = entries.find { it.value == value }
|
||||
}
|
||||
}
|
||||
|
||||
enum class UploadMethod {
|
||||
MULTIPART,
|
||||
RESUMABLE
|
||||
}
|
||||
|
||||
enum class UploadErrorCode {
|
||||
UNKNOWN,
|
||||
ASSET_NOT_FOUND,
|
||||
FILE_NOT_FOUND,
|
||||
RESOURCE_NOT_FOUND,
|
||||
INVALID_RESOURCE,
|
||||
ENCODING_FAILED,
|
||||
WRITE_FAILED,
|
||||
NOT_ENOUGH_SPACE,
|
||||
NETWORK_ERROR,
|
||||
PHOTOS_INTERNAL_ERROR,
|
||||
PHOTOS_UNKNOWN_ERROR,
|
||||
NO_SERVER_URL,
|
||||
NO_DEVICE_ID,
|
||||
NO_ACCESS_TOKEN,
|
||||
INTERRUPTED,
|
||||
CANCELLED,
|
||||
DOWNLOAD_STALLED,
|
||||
FORCE_QUIT,
|
||||
OUT_OF_RESOURCES,
|
||||
BACKGROUND_UPDATES_DISABLED,
|
||||
UPLOAD_TIMEOUT,
|
||||
ICLOUD_RATE_LIMIT,
|
||||
ICLOUD_THROTTLED,
|
||||
INVALID_SERVER_RESPONSE,
|
||||
}
|
||||
|
||||
enum class AssetType {
|
||||
OTHER,
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
AUDIO
|
||||
}
|
||||
|
||||
enum class EndpointStatus(val value: String) {
|
||||
LOADING("loading"),
|
||||
VALID("valid"),
|
||||
ERROR("error"),
|
||||
UNKNOWN("unknown");
|
||||
|
||||
companion object {
|
||||
fun fromString(value: String): EndpointStatus? = entries.find { it.value == value }
|
||||
}
|
||||
}
|
||||
|
||||
// Endpoint data class
|
||||
data class Endpoint(
|
||||
val url: String,
|
||||
val status: EndpointStatus
|
||||
)
|
||||
@@ -0,0 +1,168 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import androidx.room.Dao
|
||||
import androidx.room.Insert
|
||||
import androidx.room.OnConflictStrategy
|
||||
import androidx.room.Query
|
||||
import app.alextran.immich.upload.TaskConfig
|
||||
import java.util.Date
|
||||
|
||||
@Dao
|
||||
interface LocalAssetDao {
|
||||
@Query("""
|
||||
SELECT a.id, a.type FROM local_asset_entity a
|
||||
WHERE EXISTS (
|
||||
SELECT 1 FROM local_album_asset_entity laa
|
||||
INNER JOIN local_album_entity la ON laa.album_id = la.id
|
||||
WHERE laa.asset_id = a.id
|
||||
AND la.backup_selection = 0 -- selected
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM local_album_asset_entity laa2
|
||||
INNER JOIN local_album_entity la2 ON laa2.album_id = la2.id
|
||||
WHERE laa2.asset_id = a.id
|
||||
AND la2.backup_selection = 2 -- excluded
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM remote_asset_entity ra
|
||||
WHERE ra.checksum = a.checksum
|
||||
AND ra.owner_id = (SELECT string_value FROM store_entity WHERE id = 14) -- current_user
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM upload_tasks ut
|
||||
WHERE ut.local_id = a.id
|
||||
)
|
||||
LIMIT :limit
|
||||
""")
|
||||
suspend fun getCandidatesForBackup(limit: Int): List<BackupCandidate>
|
||||
}
|
||||
|
||||
@Dao
|
||||
interface StoreDao {
|
||||
@Query("SELECT * FROM store_entity WHERE id = :key")
|
||||
suspend fun get(key: StoreKey): Store?
|
||||
|
||||
@Insert(onConflict = OnConflictStrategy.REPLACE)
|
||||
suspend fun insert(store: Store)
|
||||
|
||||
// Extension functions for type-safe access
|
||||
suspend fun <T> get(
|
||||
typedKey: TypedStoreKey<T>,
|
||||
storage: StorageType<T>
|
||||
): T? {
|
||||
val store = get(typedKey.key) ?: return null
|
||||
|
||||
return when (storage) {
|
||||
is StorageType.IntStorage,
|
||||
is StorageType.BoolStorage,
|
||||
is StorageType.DateStorage -> {
|
||||
store.intValue?.let { storage.fromDb(it) }
|
||||
}
|
||||
else -> {
|
||||
store.stringValue?.let { storage.fromDb(it) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
suspend fun <T> set(
|
||||
typedKey: TypedStoreKey<T>,
|
||||
value: T,
|
||||
storage: StorageType<T>
|
||||
) {
|
||||
val dbValue = storage.toDb(value)
|
||||
|
||||
val store = when (storage) {
|
||||
is StorageType.IntStorage,
|
||||
is StorageType.BoolStorage,
|
||||
is StorageType.DateStorage -> {
|
||||
Store(
|
||||
id = typedKey.key,
|
||||
stringValue = null,
|
||||
intValue = dbValue as Int
|
||||
)
|
||||
}
|
||||
else -> {
|
||||
Store(
|
||||
id = typedKey.key,
|
||||
stringValue = dbValue as String,
|
||||
intValue = null
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
insert(store)
|
||||
}
|
||||
}
|
||||
|
||||
@Dao
|
||||
interface UploadTaskDao {
|
||||
@Insert(onConflict = OnConflictStrategy.IGNORE)
|
||||
suspend fun insertAll(tasks: List<UploadTask>)
|
||||
|
||||
@Query("""
|
||||
SELECT id FROM upload_tasks
|
||||
WHERE status IN (:statuses)
|
||||
""")
|
||||
suspend fun getTaskIdsByStatus(statuses: List<TaskStatus>): List<Long>
|
||||
|
||||
@Query("""
|
||||
UPDATE upload_tasks
|
||||
SET status = 3, -- upload_pending
|
||||
file_path = NULL,
|
||||
attempts = 0
|
||||
WHERE id IN (:taskIds)
|
||||
""")
|
||||
suspend fun resetOrphanedTasks(taskIds: List<Long>)
|
||||
|
||||
@Query("""
|
||||
SELECT
|
||||
t.attempts,
|
||||
a.checksum,
|
||||
a.created_at as createdAt,
|
||||
a.name as fileName,
|
||||
t.file_path as filePath,
|
||||
a.is_favorite as isFavorite,
|
||||
a.id as localId,
|
||||
t.priority,
|
||||
t.id as taskId,
|
||||
a.type,
|
||||
a.updated_at as updatedAt
|
||||
FROM upload_tasks t
|
||||
INNER JOIN local_asset_entity a ON t.local_id = a.id
|
||||
WHERE t.status = 3 -- upload_pending
|
||||
AND t.attempts < :maxAttempts
|
||||
AND a.checksum IS NOT NULL
|
||||
AND (t.retry_after IS NULL OR t.retry_after <= :currentTime)
|
||||
ORDER BY t.priority DESC, t.created_at ASC
|
||||
LIMIT :limit
|
||||
""")
|
||||
suspend fun getTasksForUpload(limit: Int, maxAttempts: Int = TaskConfig.MAX_ATTEMPTS, currentTime: Long = System.currentTimeMillis() / 1000): List<LocalAssetTaskData>
|
||||
|
||||
@Query("SELECT EXISTS(SELECT 1 FROM upload_tasks WHERE status = 3 LIMIT 1)") // upload_pending
|
||||
suspend fun hasPendingTasks(): Boolean
|
||||
|
||||
@Query("""
|
||||
UPDATE upload_tasks
|
||||
SET attempts = :attempts,
|
||||
last_error = :errorCode,
|
||||
status = :status,
|
||||
retry_after = :retryAfter
|
||||
WHERE id = :taskId
|
||||
""")
|
||||
suspend fun updateTaskAfterFailure(
|
||||
taskId: Long,
|
||||
attempts: Int,
|
||||
errorCode: UploadErrorCode,
|
||||
status: TaskStatus,
|
||||
retryAfter: Date?
|
||||
)
|
||||
|
||||
@Query("UPDATE upload_tasks SET status = :status WHERE id = :id")
|
||||
suspend fun updateStatus(id: Long, status: TaskStatus)
|
||||
}
|
||||
|
||||
@Dao
|
||||
interface UploadTaskStatDao {
|
||||
@Query("SELECT * FROM upload_task_stats")
|
||||
suspend fun getStats(): UploadTaskStat?
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import com.google.gson.Gson
|
||||
import java.net.URL
|
||||
import java.util.Date
|
||||
|
||||
// Sealed interface representing storage types
|
||||
sealed interface StorageType<T> {
|
||||
fun toDb(value: T): Any
|
||||
fun fromDb(value: Any): T
|
||||
|
||||
data object IntStorage : StorageType<Int> {
|
||||
override fun toDb(value: Int) = value
|
||||
override fun fromDb(value: Any) = value as Int
|
||||
}
|
||||
|
||||
data object BoolStorage : StorageType<Boolean> {
|
||||
override fun toDb(value: Boolean) = if (value) 1 else 0
|
||||
override fun fromDb(value: Any) = (value as Int) == 1
|
||||
}
|
||||
|
||||
data object StringStorage : StorageType<String> {
|
||||
override fun toDb(value: String) = value
|
||||
override fun fromDb(value: Any) = value as String
|
||||
}
|
||||
|
||||
data object DateStorage : StorageType<Date> {
|
||||
override fun toDb(value: Date) = value.time / 1000
|
||||
override fun fromDb(value: Any) = Date((value as Long) * 1000)
|
||||
}
|
||||
|
||||
data object UrlStorage : StorageType<URL> {
|
||||
override fun toDb(value: URL) = value.toString()
|
||||
override fun fromDb(value: Any) = URL(value as String)
|
||||
}
|
||||
|
||||
class JsonStorage<T>(
|
||||
private val clazz: Class<T>,
|
||||
private val gson: Gson = Gson()
|
||||
) : StorageType<T> {
|
||||
override fun toDb(value: T) = gson.toJson(value)
|
||||
override fun fromDb(value: Any) = gson.fromJson(value as String, clazz)
|
||||
}
|
||||
}
|
||||
|
||||
// Typed key wrapper
|
||||
@JvmInline
|
||||
value class TypedStoreKey<T>(val key: StoreKey) {
|
||||
companion object {
|
||||
// Factory methods for type-safe key creation
|
||||
inline fun <reified T> of(key: StoreKey): TypedStoreKey<T> = TypedStoreKey(key)
|
||||
}
|
||||
}
|
||||
|
||||
// Registry mapping keys to their storage types
|
||||
object StoreRegistry {
|
||||
private val intKeys = setOf(
|
||||
StoreKey.VERSION,
|
||||
StoreKey.DEVICE_ID_HASH,
|
||||
StoreKey.BACKUP_TRIGGER_DELAY
|
||||
)
|
||||
|
||||
private val stringKeys = setOf(
|
||||
StoreKey.CURRENT_USER,
|
||||
StoreKey.DEVICE_ID,
|
||||
StoreKey.ACCESS_TOKEN
|
||||
)
|
||||
|
||||
fun usesIntStorage(key: StoreKey): Boolean = key in intKeys
|
||||
fun usesStringStorage(key: StoreKey): Boolean = key in stringKeys
|
||||
}
|
||||
|
||||
// Storage type registry for automatic selection
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
object StorageTypes {
|
||||
inline fun <reified T> get(): StorageType<T> = when (T::class) {
|
||||
Int::class -> StorageType.IntStorage as StorageType<T>
|
||||
Boolean::class -> StorageType.BoolStorage as StorageType<T>
|
||||
String::class -> StorageType.StringStorage as StorageType<T>
|
||||
Date::class -> StorageType.DateStorage as StorageType<T>
|
||||
URL::class -> StorageType.UrlStorage as StorageType<T>
|
||||
else -> StorageType.JsonStorage(T::class.java)
|
||||
}
|
||||
}
|
||||
|
||||
// Simplified extension functions with automatic storage
|
||||
suspend inline fun <reified T> StoreDao.get(typedKey: TypedStoreKey<T>): T? {
|
||||
return get(typedKey, StorageTypes.get<T>())
|
||||
}
|
||||
|
||||
suspend inline fun <reified T> StoreDao.set(typedKey: TypedStoreKey<T>, value: T) {
|
||||
set(typedKey, value, StorageTypes.get<T>())
|
||||
}
|
||||
@@ -0,0 +1,405 @@
|
||||
package app.alextran.immich.schema
|
||||
|
||||
import androidx.room.*
|
||||
import java.net.URL
|
||||
import java.util.Date
|
||||
|
||||
@Entity(tableName = "asset_face_entity")
|
||||
data class AssetFace(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "asset_id")
|
||||
val assetId: String,
|
||||
@ColumnInfo(name = "person_id")
|
||||
val personId: String?,
|
||||
@ColumnInfo(name = "image_width")
|
||||
val imageWidth: Int,
|
||||
@ColumnInfo(name = "image_height")
|
||||
val imageHeight: Int,
|
||||
@ColumnInfo(name = "bounding_box_x1")
|
||||
val boundingBoxX1: Int,
|
||||
@ColumnInfo(name = "bounding_box_y1")
|
||||
val boundingBoxY1: Int,
|
||||
@ColumnInfo(name = "bounding_box_x2")
|
||||
val boundingBoxX2: Int,
|
||||
@ColumnInfo(name = "bounding_box_y2")
|
||||
val boundingBoxY2: Int,
|
||||
@ColumnInfo(name = "source_type")
|
||||
val sourceType: SourceType
|
||||
)
|
||||
|
||||
@Entity(tableName = "auth_user_entity")
|
||||
data class AuthUser(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
val name: String,
|
||||
val email: String,
|
||||
@ColumnInfo(name = "is_admin")
|
||||
val isAdmin: Boolean,
|
||||
@ColumnInfo(name = "has_profile_image")
|
||||
val hasProfileImage: Boolean,
|
||||
@ColumnInfo(name = "profile_changed_at")
|
||||
val profileChangedAt: Date,
|
||||
@ColumnInfo(name = "avatar_color")
|
||||
val avatarColor: AvatarColor,
|
||||
@ColumnInfo(name = "quota_size_in_bytes")
|
||||
val quotaSizeInBytes: Int,
|
||||
@ColumnInfo(name = "quota_usage_in_bytes")
|
||||
val quotaUsageInBytes: Int,
|
||||
@ColumnInfo(name = "pin_code")
|
||||
val pinCode: String?
|
||||
)
|
||||
|
||||
@Entity(tableName = "local_album_entity")
|
||||
data class LocalAlbum(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "backup_selection")
|
||||
val backupSelection: BackupSelection,
|
||||
@ColumnInfo(name = "linked_remote_album_id")
|
||||
val linkedRemoteAlbumId: String?,
|
||||
@ColumnInfo(name = "marker")
|
||||
val marker: Boolean?,
|
||||
val name: String,
|
||||
@ColumnInfo(name = "is_ios_shared_album")
|
||||
val isIosSharedAlbum: Boolean,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "local_album_asset_entity",
|
||||
primaryKeys = ["asset_id", "album_id"]
|
||||
)
|
||||
data class LocalAlbumAsset(
|
||||
@ColumnInfo(name = "asset_id")
|
||||
val assetId: String,
|
||||
@ColumnInfo(name = "album_id")
|
||||
val albumId: String,
|
||||
@ColumnInfo(name = "marker")
|
||||
val marker: String?
|
||||
)
|
||||
|
||||
@Entity(tableName = "local_asset_entity")
|
||||
data class LocalAsset(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
val checksum: String?,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
@ColumnInfo(name = "duration_in_seconds")
|
||||
val durationInSeconds: Int?,
|
||||
val height: Int?,
|
||||
@ColumnInfo(name = "is_favorite")
|
||||
val isFavorite: Boolean,
|
||||
val name: String,
|
||||
val orientation: String,
|
||||
val type: AssetType,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date,
|
||||
val width: Int?
|
||||
)
|
||||
|
||||
data class BackupCandidate(
|
||||
val id: String,
|
||||
val type: AssetType
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "memory_asset_entity",
|
||||
primaryKeys = ["asset_id", "album_id"]
|
||||
)
|
||||
data class MemoryAsset(
|
||||
@ColumnInfo(name = "asset_id")
|
||||
val assetId: String,
|
||||
@ColumnInfo(name = "album_id")
|
||||
val albumId: String
|
||||
)
|
||||
|
||||
@Entity(tableName = "memory_entity")
|
||||
data class Memory(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date,
|
||||
@ColumnInfo(name = "deleted_at")
|
||||
val deletedAt: Date?,
|
||||
@ColumnInfo(name = "owner_id")
|
||||
val ownerId: String,
|
||||
val type: MemoryType,
|
||||
val data: String,
|
||||
@ColumnInfo(name = "is_saved")
|
||||
val isSaved: Boolean,
|
||||
@ColumnInfo(name = "memory_at")
|
||||
val memoryAt: Date,
|
||||
@ColumnInfo(name = "seen_at")
|
||||
val seenAt: Date?,
|
||||
@ColumnInfo(name = "show_at")
|
||||
val showAt: Date?,
|
||||
@ColumnInfo(name = "hide_at")
|
||||
val hideAt: Date?
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "partner_entity",
|
||||
primaryKeys = ["shared_by_id", "shared_with_id"]
|
||||
)
|
||||
data class Partner(
|
||||
@ColumnInfo(name = "shared_by_id")
|
||||
val sharedById: String,
|
||||
@ColumnInfo(name = "shared_with_id")
|
||||
val sharedWithId: String,
|
||||
@ColumnInfo(name = "in_timeline")
|
||||
val inTimeline: Boolean
|
||||
)
|
||||
|
||||
@Entity(tableName = "person_entity")
|
||||
data class Person(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date,
|
||||
@ColumnInfo(name = "owner_id")
|
||||
val ownerId: String,
|
||||
val name: String,
|
||||
@ColumnInfo(name = "face_asset_id")
|
||||
val faceAssetId: String?,
|
||||
@ColumnInfo(name = "is_favorite")
|
||||
val isFavorite: Boolean,
|
||||
@ColumnInfo(name = "is_hidden")
|
||||
val isHidden: Boolean,
|
||||
val color: String?,
|
||||
@ColumnInfo(name = "birth_date")
|
||||
val birthDate: Date?
|
||||
)
|
||||
|
||||
@Entity(tableName = "remote_album_entity")
|
||||
data class RemoteAlbum(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
val description: String?,
|
||||
@ColumnInfo(name = "is_activity_enabled")
|
||||
val isActivityEnabled: Boolean,
|
||||
val name: String,
|
||||
val order: Int,
|
||||
@ColumnInfo(name = "owner_id")
|
||||
val ownerId: String,
|
||||
@ColumnInfo(name = "thumbnail_asset_id")
|
||||
val thumbnailAssetId: String?,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "remote_album_asset_entity",
|
||||
primaryKeys = ["asset_id", "album_id"]
|
||||
)
|
||||
data class RemoteAlbumAsset(
|
||||
@ColumnInfo(name = "asset_id")
|
||||
val assetId: String,
|
||||
@ColumnInfo(name = "album_id")
|
||||
val albumId: String
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "remote_album_user_entity",
|
||||
primaryKeys = ["album_id", "user_id"]
|
||||
)
|
||||
data class RemoteAlbumUser(
|
||||
@ColumnInfo(name = "album_id")
|
||||
val albumId: String,
|
||||
@ColumnInfo(name = "user_id")
|
||||
val userId: String,
|
||||
val role: AlbumUserRole
|
||||
)
|
||||
|
||||
@Entity(tableName = "remote_asset_entity")
|
||||
data class RemoteAsset(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
val checksum: String,
|
||||
@ColumnInfo(name = "is_favorite")
|
||||
val isFavorite: Boolean,
|
||||
@ColumnInfo(name = "deleted_at")
|
||||
val deletedAt: Date?,
|
||||
@ColumnInfo(name = "owner_id")
|
||||
val ownerId: String,
|
||||
@ColumnInfo(name = "local_date_time")
|
||||
val localDateTime: Date?,
|
||||
@ColumnInfo(name = "thumb_hash")
|
||||
val thumbHash: String?,
|
||||
@ColumnInfo(name = "library_id")
|
||||
val libraryId: String?,
|
||||
@ColumnInfo(name = "live_photo_video_id")
|
||||
val livePhotoVideoId: String?,
|
||||
@ColumnInfo(name = "stack_id")
|
||||
val stackId: String?,
|
||||
val visibility: AssetVisibility
|
||||
)
|
||||
|
||||
@Entity(tableName = "remote_exif_entity")
|
||||
data class RemoteExif(
|
||||
@PrimaryKey
|
||||
@ColumnInfo(name = "asset_id")
|
||||
val assetId: String,
|
||||
val city: String?,
|
||||
val state: String?,
|
||||
val country: String?,
|
||||
@ColumnInfo(name = "date_time_original")
|
||||
val dateTimeOriginal: Date?,
|
||||
val description: String?,
|
||||
val height: Int?,
|
||||
val width: Int?,
|
||||
@ColumnInfo(name = "exposure_time")
|
||||
val exposureTime: String?,
|
||||
@ColumnInfo(name = "f_number")
|
||||
val fNumber: Double?,
|
||||
@ColumnInfo(name = "file_size")
|
||||
val fileSize: Int?,
|
||||
@ColumnInfo(name = "focal_length")
|
||||
val focalLength: Double?,
|
||||
val latitude: Double?,
|
||||
val longitude: Double?,
|
||||
val iso: Int?,
|
||||
val make: String?,
|
||||
val model: String?,
|
||||
val lens: String?,
|
||||
val orientation: String?,
|
||||
@ColumnInfo(name = "time_zone")
|
||||
val timeZone: String?,
|
||||
val rating: Int?,
|
||||
@ColumnInfo(name = "projection_type")
|
||||
val projectionType: String?
|
||||
)
|
||||
|
||||
@Entity(tableName = "stack_entity")
|
||||
data class Stack(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
@ColumnInfo(name = "updated_at")
|
||||
val updatedAt: Date,
|
||||
@ColumnInfo(name = "owner_id")
|
||||
val ownerId: String,
|
||||
@ColumnInfo(name = "primary_asset_id")
|
||||
val primaryAssetId: String
|
||||
)
|
||||
|
||||
@Entity(tableName = "store_entity")
|
||||
data class Store(
|
||||
@PrimaryKey
|
||||
val id: StoreKey,
|
||||
@ColumnInfo(name = "string_value")
|
||||
val stringValue: String?,
|
||||
@ColumnInfo(name = "int_value")
|
||||
val intValue: Int?
|
||||
)
|
||||
|
||||
@Entity(tableName = "upload_task_entity")
|
||||
data class UploadTask(
|
||||
@PrimaryKey(autoGenerate = true)
|
||||
val id: Long = 0,
|
||||
val attempts: Int,
|
||||
@ColumnInfo(name = "created_at")
|
||||
val createdAt: Date,
|
||||
@ColumnInfo(name = "file_path")
|
||||
val filePath: URL?,
|
||||
@ColumnInfo(name = "is_live_photo")
|
||||
val isLivePhoto: Boolean?,
|
||||
@ColumnInfo(name = "last_error")
|
||||
val lastError: UploadErrorCode?,
|
||||
@ColumnInfo(name = "live_photo_video_id")
|
||||
val livePhotoVideoId: String?,
|
||||
@ColumnInfo(name = "local_id")
|
||||
val localId: String,
|
||||
val method: UploadMethod,
|
||||
val priority: Float,
|
||||
@ColumnInfo(name = "retry_after")
|
||||
val retryAfter: Date?,
|
||||
val status: TaskStatus
|
||||
)
|
||||
|
||||
// Data class for query results
|
||||
data class LocalAssetTaskData(
|
||||
val attempts: Int,
|
||||
val checksum: String,
|
||||
val createdAt: Date,
|
||||
val fileName: String,
|
||||
val filePath: URL?,
|
||||
val isFavorite: Boolean,
|
||||
val localId: String,
|
||||
val priority: Float,
|
||||
val taskId: Long,
|
||||
val type: AssetType,
|
||||
val updatedAt: Date
|
||||
)
|
||||
|
||||
@Entity(tableName = "upload_task_stats")
|
||||
data class UploadTaskStat(
|
||||
@ColumnInfo(name = "pending_downloads")
|
||||
val pendingDownloads: Int,
|
||||
@ColumnInfo(name = "pending_uploads")
|
||||
val pendingUploads: Int,
|
||||
@ColumnInfo(name = "queued_downloads")
|
||||
val queuedDownloads: Int,
|
||||
@ColumnInfo(name = "queued_uploads")
|
||||
val queuedUploads: Int,
|
||||
@ColumnInfo(name = "failed_downloads")
|
||||
val failedDownloads: Int,
|
||||
@ColumnInfo(name = "failed_uploads")
|
||||
val failedUploads: Int,
|
||||
@ColumnInfo(name = "completed_uploads")
|
||||
val completedUploads: Int
|
||||
)
|
||||
|
||||
@Entity(tableName = "user_entity")
|
||||
data class User(
|
||||
@PrimaryKey
|
||||
val id: String,
|
||||
val name: String,
|
||||
val email: String,
|
||||
@ColumnInfo(name = "has_profile_image")
|
||||
val hasProfileImage: Boolean,
|
||||
@ColumnInfo(name = "profile_changed_at")
|
||||
val profileChangedAt: Date,
|
||||
@ColumnInfo(name = "avatar_color")
|
||||
val avatarColor: AvatarColor
|
||||
)
|
||||
|
||||
@Entity(
|
||||
tableName = "user_metadata_entity",
|
||||
primaryKeys = ["user_id", "key"]
|
||||
)
|
||||
data class UserMetadata(
|
||||
@ColumnInfo(name = "user_id")
|
||||
val userId: String,
|
||||
val key: Date,
|
||||
val value: ByteArray
|
||||
) {
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) return true
|
||||
if (javaClass != other?.javaClass) return false
|
||||
|
||||
other as UserMetadata
|
||||
|
||||
if (userId != other.userId) return false
|
||||
if (key != other.key) return false
|
||||
if (!value.contentEquals(other.value)) return false
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
var result = userId.hashCode()
|
||||
result = 31 * result + key.hashCode()
|
||||
result = 31 * result + value.contentHashCode()
|
||||
return result
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
package app.alextran.immich.upload
|
||||
|
||||
import android.content.Context
|
||||
import android.net.ConnectivityManager
|
||||
import android.net.Network
|
||||
import android.net.NetworkCapabilities
|
||||
import android.net.NetworkRequest
|
||||
|
||||
object NetworkMonitor {
|
||||
@Volatile
|
||||
private var isConnected = false
|
||||
|
||||
@Volatile
|
||||
private var isWifi = false
|
||||
|
||||
fun initialize(context: Context) {
|
||||
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
|
||||
|
||||
val networkRequest = NetworkRequest.Builder()
|
||||
.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
|
||||
.build()
|
||||
|
||||
connectivityManager.registerNetworkCallback(networkRequest, object : ConnectivityManager.NetworkCallback() {
|
||||
override fun onAvailable(network: Network) {
|
||||
isConnected = true
|
||||
checkWifi(connectivityManager, network)
|
||||
}
|
||||
|
||||
override fun onLost(network: Network) {
|
||||
isConnected = false
|
||||
isWifi = false
|
||||
}
|
||||
|
||||
override fun onCapabilitiesChanged(network: Network, capabilities: NetworkCapabilities) {
|
||||
checkWifi(connectivityManager, network)
|
||||
}
|
||||
|
||||
private fun checkWifi(cm: ConnectivityManager, network: Network) {
|
||||
val capabilities = cm.getNetworkCapabilities(network)
|
||||
isWifi = capabilities?.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) == true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fun isConnected(): Boolean = isConnected
|
||||
|
||||
fun isWifiConnected(context: Context): Boolean {
|
||||
if (!isConnected) return false
|
||||
|
||||
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
|
||||
val capabilities = connectivityManager.getNetworkCapabilities(connectivityManager.activeNetwork)
|
||||
return capabilities?.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) == true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
package app.alextran.immich.upload
|
||||
|
||||
object TaskConfig {
|
||||
const val MAX_ATTEMPTS = 3
|
||||
const val MAX_PENDING_DOWNLOADS = 10
|
||||
const val MAX_PENDING_UPLOADS = 10
|
||||
const val MAX_ACTIVE_UPLOADS = 3
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
@file:Suppress("UNCHECKED_CAST", "ArrayInDataClass")
|
||||
|
||||
package app.alextran.immich.upload
|
||||
|
||||
import android.util.Log
|
||||
import io.flutter.plugin.common.BasicMessageChannel
|
||||
import io.flutter.plugin.common.BinaryMessenger
|
||||
import io.flutter.plugin.common.EventChannel
|
||||
import io.flutter.plugin.common.MessageCodec
|
||||
import io.flutter.plugin.common.StandardMethodCodec
|
||||
import io.flutter.plugin.common.StandardMessageCodec
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
private object UploadTaskPigeonUtils {
|
||||
|
||||
fun wrapResult(result: Any?): List<Any?> {
|
||||
return listOf(result)
|
||||
}
|
||||
|
||||
fun wrapError(exception: Throwable): List<Any?> {
|
||||
return if (exception is FlutterError) {
|
||||
listOf(
|
||||
exception.code,
|
||||
exception.message,
|
||||
exception.details
|
||||
)
|
||||
} else {
|
||||
listOf(
|
||||
exception.javaClass.simpleName,
|
||||
exception.toString(),
|
||||
"Cause: " + exception.cause + ", Stacktrace: " + Log.getStackTraceString(exception)
|
||||
)
|
||||
}
|
||||
}
|
||||
fun deepEquals(a: Any?, b: Any?): Boolean {
|
||||
if (a is ByteArray && b is ByteArray) {
|
||||
return a.contentEquals(b)
|
||||
}
|
||||
if (a is IntArray && b is IntArray) {
|
||||
return a.contentEquals(b)
|
||||
}
|
||||
if (a is LongArray && b is LongArray) {
|
||||
return a.contentEquals(b)
|
||||
}
|
||||
if (a is DoubleArray && b is DoubleArray) {
|
||||
return a.contentEquals(b)
|
||||
}
|
||||
if (a is Array<*> && b is Array<*>) {
|
||||
return a.size == b.size &&
|
||||
a.indices.all{ deepEquals(a[it], b[it]) }
|
||||
}
|
||||
if (a is List<*> && b is List<*>) {
|
||||
return a.size == b.size &&
|
||||
a.indices.all{ deepEquals(a[it], b[it]) }
|
||||
}
|
||||
if (a is Map<*, *> && b is Map<*, *>) {
|
||||
return a.size == b.size && a.all {
|
||||
(b as Map<Any?, Any?>).containsKey(it.key) &&
|
||||
deepEquals(it.value, b[it.key])
|
||||
}
|
||||
}
|
||||
return a == b
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Error class for passing custom error details to Flutter via a thrown PlatformException.
|
||||
* @property code The error code.
|
||||
* @property message The error message.
|
||||
* @property details The error details. Must be a datatype supported by the api codec.
|
||||
*/
|
||||
class FlutterError (
|
||||
val code: String,
|
||||
override val message: String? = null,
|
||||
val details: Any? = null
|
||||
) : Throwable()
|
||||
|
||||
enum class UploadApiErrorCode(val raw: Int) {
|
||||
UNKNOWN(0),
|
||||
ASSET_NOT_FOUND(1),
|
||||
FILE_NOT_FOUND(2),
|
||||
RESOURCE_NOT_FOUND(3),
|
||||
INVALID_RESOURCE(4),
|
||||
ENCODING_FAILED(5),
|
||||
WRITE_FAILED(6),
|
||||
NOT_ENOUGH_SPACE(7),
|
||||
NETWORK_ERROR(8),
|
||||
PHOTOS_INTERNAL_ERROR(9),
|
||||
PHOTOS_UNKNOWN_ERROR(10),
|
||||
NO_SERVER_URL(11),
|
||||
NO_DEVICE_ID(12),
|
||||
NO_ACCESS_TOKEN(13),
|
||||
INTERRUPTED(14),
|
||||
CANCELLED(15),
|
||||
DOWNLOAD_STALLED(16),
|
||||
FORCE_QUIT(17),
|
||||
OUT_OF_RESOURCES(18),
|
||||
BACKGROUND_UPDATES_DISABLED(19),
|
||||
UPLOAD_TIMEOUT(20),
|
||||
I_CLOUD_RATE_LIMIT(21),
|
||||
I_CLOUD_THROTTLED(22);
|
||||
|
||||
companion object {
|
||||
fun ofRaw(raw: Int): UploadApiErrorCode? {
|
||||
return values().firstOrNull { it.raw == raw }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum class UploadApiStatus(val raw: Int) {
|
||||
DOWNLOAD_PENDING(0),
|
||||
DOWNLOAD_QUEUED(1),
|
||||
DOWNLOAD_FAILED(2),
|
||||
UPLOAD_PENDING(3),
|
||||
UPLOAD_QUEUED(4),
|
||||
UPLOAD_FAILED(5),
|
||||
UPLOAD_COMPLETE(6),
|
||||
UPLOAD_SKIPPED(7);
|
||||
|
||||
companion object {
|
||||
fun ofRaw(raw: Int): UploadApiStatus? {
|
||||
return values().firstOrNull { it.raw == raw }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Generated class from Pigeon that represents data sent in messages. */
|
||||
data class UploadApiTaskStatus (
|
||||
val id: String,
|
||||
val filename: String,
|
||||
val status: UploadApiStatus,
|
||||
val errorCode: UploadApiErrorCode? = null,
|
||||
val httpStatusCode: Long? = null
|
||||
)
|
||||
{
|
||||
companion object {
|
||||
fun fromList(pigeonVar_list: List<Any?>): UploadApiTaskStatus {
|
||||
val id = pigeonVar_list[0] as String
|
||||
val filename = pigeonVar_list[1] as String
|
||||
val status = pigeonVar_list[2] as UploadApiStatus
|
||||
val errorCode = pigeonVar_list[3] as UploadApiErrorCode?
|
||||
val httpStatusCode = pigeonVar_list[4] as Long?
|
||||
return UploadApiTaskStatus(id, filename, status, errorCode, httpStatusCode)
|
||||
}
|
||||
}
|
||||
fun toList(): List<Any?> {
|
||||
return listOf(
|
||||
id,
|
||||
filename,
|
||||
status,
|
||||
errorCode,
|
||||
httpStatusCode,
|
||||
)
|
||||
}
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is UploadApiTaskStatus) {
|
||||
return false
|
||||
}
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
return UploadTaskPigeonUtils.deepEquals(toList(), other.toList()) }
|
||||
|
||||
override fun hashCode(): Int = toList().hashCode()
|
||||
}
|
||||
|
||||
/** Generated class from Pigeon that represents data sent in messages. */
|
||||
data class UploadApiTaskProgress (
|
||||
val id: String,
|
||||
val progress: Double,
|
||||
val speed: Double? = null,
|
||||
val totalBytes: Long? = null
|
||||
)
|
||||
{
|
||||
companion object {
|
||||
fun fromList(pigeonVar_list: List<Any?>): UploadApiTaskProgress {
|
||||
val id = pigeonVar_list[0] as String
|
||||
val progress = pigeonVar_list[1] as Double
|
||||
val speed = pigeonVar_list[2] as Double?
|
||||
val totalBytes = pigeonVar_list[3] as Long?
|
||||
return UploadApiTaskProgress(id, progress, speed, totalBytes)
|
||||
}
|
||||
}
|
||||
fun toList(): List<Any?> {
|
||||
return listOf(
|
||||
id,
|
||||
progress,
|
||||
speed,
|
||||
totalBytes,
|
||||
)
|
||||
}
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (other !is UploadApiTaskProgress) {
|
||||
return false
|
||||
}
|
||||
if (this === other) {
|
||||
return true
|
||||
}
|
||||
return UploadTaskPigeonUtils.deepEquals(toList(), other.toList()) }
|
||||
|
||||
override fun hashCode(): Int = toList().hashCode()
|
||||
}
|
||||
private open class UploadTaskPigeonCodec : StandardMessageCodec() {
|
||||
override fun readValueOfType(type: Byte, buffer: ByteBuffer): Any? {
|
||||
return when (type) {
|
||||
129.toByte() -> {
|
||||
return (readValue(buffer) as Long?)?.let {
|
||||
UploadApiErrorCode.ofRaw(it.toInt())
|
||||
}
|
||||
}
|
||||
130.toByte() -> {
|
||||
return (readValue(buffer) as Long?)?.let {
|
||||
UploadApiStatus.ofRaw(it.toInt())
|
||||
}
|
||||
}
|
||||
131.toByte() -> {
|
||||
return (readValue(buffer) as? List<Any?>)?.let {
|
||||
UploadApiTaskStatus.fromList(it)
|
||||
}
|
||||
}
|
||||
132.toByte() -> {
|
||||
return (readValue(buffer) as? List<Any?>)?.let {
|
||||
UploadApiTaskProgress.fromList(it)
|
||||
}
|
||||
}
|
||||
else -> super.readValueOfType(type, buffer)
|
||||
}
|
||||
}
|
||||
override fun writeValue(stream: ByteArrayOutputStream, value: Any?) {
|
||||
when (value) {
|
||||
is UploadApiErrorCode -> {
|
||||
stream.write(129)
|
||||
writeValue(stream, value.raw)
|
||||
}
|
||||
is UploadApiStatus -> {
|
||||
stream.write(130)
|
||||
writeValue(stream, value.raw)
|
||||
}
|
||||
is UploadApiTaskStatus -> {
|
||||
stream.write(131)
|
||||
writeValue(stream, value.toList())
|
||||
}
|
||||
is UploadApiTaskProgress -> {
|
||||
stream.write(132)
|
||||
writeValue(stream, value.toList())
|
||||
}
|
||||
else -> super.writeValue(stream, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val UploadTaskPigeonMethodCodec = StandardMethodCodec(UploadTaskPigeonCodec())
|
||||
|
||||
|
||||
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
|
||||
interface UploadApi {
|
||||
fun initialize(callback: (Result<Unit>) -> Unit)
|
||||
fun refresh(callback: (Result<Unit>) -> Unit)
|
||||
fun cancelAll(callback: (Result<Unit>) -> Unit)
|
||||
fun enqueueAssets(localIds: List<String>, callback: (Result<Unit>) -> Unit)
|
||||
fun enqueueFiles(paths: List<String>, callback: (Result<Unit>) -> Unit)
|
||||
|
||||
companion object {
|
||||
/** The codec used by UploadApi. */
|
||||
val codec: MessageCodec<Any?> by lazy {
|
||||
UploadTaskPigeonCodec()
|
||||
}
|
||||
/** Sets up an instance of `UploadApi` to handle messages through the `binaryMessenger`. */
|
||||
@JvmOverloads
|
||||
fun setUp(binaryMessenger: BinaryMessenger, api: UploadApi?, messageChannelSuffix: String = "") {
|
||||
val separatedMessageChannelSuffix = if (messageChannelSuffix.isNotEmpty()) ".$messageChannelSuffix" else ""
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.initialize$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { _, reply ->
|
||||
api.initialize{ result: Result<Unit> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.refresh$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { _, reply ->
|
||||
api.refresh{ result: Result<Unit> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.cancelAll$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { _, reply ->
|
||||
api.cancelAll{ result: Result<Unit> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueAssets$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { message, reply ->
|
||||
val args = message as List<Any?>
|
||||
val localIdsArg = args[0] as List<String>
|
||||
api.enqueueAssets(localIdsArg) { result: Result<Unit> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueFiles$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { message, reply ->
|
||||
val args = message as List<Any?>
|
||||
val pathsArg = args[0] as List<String>
|
||||
api.enqueueFiles(pathsArg) { result: Result<Unit> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
reply.reply(UploadTaskPigeonUtils.wrapResult(null))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class UploadTaskPigeonStreamHandler<T>(
|
||||
val wrapper: UploadTaskPigeonEventChannelWrapper<T>
|
||||
) : EventChannel.StreamHandler {
|
||||
var pigeonSink: PigeonEventSink<T>? = null
|
||||
|
||||
override fun onListen(p0: Any?, sink: EventChannel.EventSink) {
|
||||
pigeonSink = PigeonEventSink<T>(sink)
|
||||
wrapper.onListen(p0, pigeonSink!!)
|
||||
}
|
||||
|
||||
override fun onCancel(p0: Any?) {
|
||||
pigeonSink = null
|
||||
wrapper.onCancel(p0)
|
||||
}
|
||||
}
|
||||
|
||||
interface UploadTaskPigeonEventChannelWrapper<T> {
|
||||
open fun onListen(p0: Any?, sink: PigeonEventSink<T>) {}
|
||||
|
||||
open fun onCancel(p0: Any?) {}
|
||||
}
|
||||
|
||||
class PigeonEventSink<T>(private val sink: EventChannel.EventSink) {
|
||||
fun success(value: T) {
|
||||
sink.success(value)
|
||||
}
|
||||
|
||||
fun error(errorCode: String, errorMessage: String?, errorDetails: Any?) {
|
||||
sink.error(errorCode, errorMessage, errorDetails)
|
||||
}
|
||||
|
||||
fun endOfStream() {
|
||||
sink.endOfStream()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class StreamStatusStreamHandler : UploadTaskPigeonEventChannelWrapper<UploadApiTaskStatus> {
|
||||
companion object {
|
||||
fun register(messenger: BinaryMessenger, streamHandler: StreamStatusStreamHandler, instanceName: String = "") {
|
||||
var channelName: String = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamStatus"
|
||||
if (instanceName.isNotEmpty()) {
|
||||
channelName += ".$instanceName"
|
||||
}
|
||||
val internalStreamHandler = UploadTaskPigeonStreamHandler<UploadApiTaskStatus>(streamHandler)
|
||||
EventChannel(messenger, channelName, UploadTaskPigeonMethodCodec).setStreamHandler(internalStreamHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
abstract class StreamProgressStreamHandler : UploadTaskPigeonEventChannelWrapper<UploadApiTaskProgress> {
|
||||
companion object {
|
||||
fun register(messenger: BinaryMessenger, streamHandler: StreamProgressStreamHandler, instanceName: String = "") {
|
||||
var channelName: String = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamProgress"
|
||||
if (instanceName.isNotEmpty()) {
|
||||
channelName += ".$instanceName"
|
||||
}
|
||||
val internalStreamHandler = UploadTaskPigeonStreamHandler<UploadApiTaskProgress>(streamHandler)
|
||||
EventChannel(messenger, channelName, UploadTaskPigeonMethodCodec).setStreamHandler(internalStreamHandler)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,175 @@
|
||||
package app.alextran.immich.upload
|
||||
|
||||
import android.content.Context
|
||||
import androidx.work.*
|
||||
import app.alextran.immich.schema.AppDatabase
|
||||
import app.alextran.immich.schema.AssetType
|
||||
import app.alextran.immich.schema.StorageType
|
||||
import app.alextran.immich.schema.StoreKey
|
||||
import app.alextran.immich.schema.TaskStatus
|
||||
import app.alextran.immich.schema.UploadMethod
|
||||
import app.alextran.immich.schema.UploadTask
|
||||
import kotlinx.coroutines.*
|
||||
import kotlinx.coroutines.guava.await
|
||||
import java.util.Date
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
// TODO: this is almost entirely LLM-generated (ported from Swift), need to verify behavior
|
||||
class UploadTaskImpl(context: Context) : UploadApi {
|
||||
private val ctx: Context = context.applicationContext
|
||||
private val db: AppDatabase = AppDatabase.getDatabase(ctx)
|
||||
private val workManager: WorkManager = WorkManager.getInstance(ctx)
|
||||
|
||||
@Volatile
|
||||
private var isInitialized = false
|
||||
private val scope = CoroutineScope(SupervisorJob() + Dispatchers.Default)
|
||||
|
||||
override fun initialize(callback: (Result<Unit>) -> Unit) {
|
||||
scope.launch {
|
||||
try {
|
||||
// Clean up orphaned tasks
|
||||
val activeWorkInfos = workManager.getWorkInfosByTag(UPLOAD_WORK_TAG).await()
|
||||
val activeTaskIds = activeWorkInfos
|
||||
.filter { it.state == WorkInfo.State.RUNNING || it.state == WorkInfo.State.ENQUEUED }
|
||||
.mapNotNull {
|
||||
it.tags.find { tag -> tag.startsWith("task_") }?.substringAfter("task_")?.toLongOrNull()
|
||||
}
|
||||
.toSet()
|
||||
|
||||
db.uploadTaskDao().run {
|
||||
withContext(Dispatchers.IO) {
|
||||
// Find tasks marked as queued but not actually running
|
||||
val dbQueuedIds = getTaskIdsByStatus(
|
||||
listOf(
|
||||
TaskStatus.DOWNLOAD_QUEUED,
|
||||
TaskStatus.UPLOAD_QUEUED,
|
||||
TaskStatus.UPLOAD_PENDING
|
||||
)
|
||||
)
|
||||
|
||||
val orphanIds = dbQueuedIds.filterNot { it in activeTaskIds }
|
||||
|
||||
if (orphanIds.isNotEmpty()) {
|
||||
resetOrphanedTasks(orphanIds)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up temp files
|
||||
val tempDir = getTempDirectory()
|
||||
tempDir.deleteRecursively()
|
||||
|
||||
isInitialized = true
|
||||
startBackup()
|
||||
|
||||
withContext(Dispatchers.Main) {
|
||||
callback(Result.success(Unit))
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
withContext(Dispatchers.Main) {
|
||||
callback(Result.failure(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun refresh(callback: (Result<Unit>) -> Unit) {
|
||||
scope.launch {
|
||||
try {
|
||||
startBackup()
|
||||
withContext(Dispatchers.Main) {
|
||||
callback(Result.success(Unit))
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
withContext(Dispatchers.Main) {
|
||||
callback(Result.failure(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun startBackup() {
|
||||
if (!isInitialized) return
|
||||
|
||||
withContext(Dispatchers.IO) {
|
||||
try {
|
||||
// Check if backup is enabled
|
||||
val backupEnabled = db.storeDao().get(StoreKey.enableBackup, StorageType.BoolStorage)
|
||||
if (backupEnabled != true) return@withContext
|
||||
|
||||
// Get upload statistics
|
||||
val stats = db.uploadTaskStatDao().getStats() ?: return@withContext
|
||||
val availableSlots = TaskConfig.MAX_PENDING_UPLOADS + TaskConfig.MAX_PENDING_DOWNLOADS -
|
||||
(stats.pendingDownloads + stats.queuedDownloads + stats.pendingUploads + stats.queuedUploads)
|
||||
|
||||
if (availableSlots <= 0) return@withContext
|
||||
|
||||
// Find candidate assets for backup
|
||||
val candidates = db.localAssetDao().getCandidatesForBackup(availableSlots)
|
||||
|
||||
if (candidates.isEmpty()) return@withContext
|
||||
|
||||
// Create upload tasks for candidates
|
||||
db.uploadTaskDao().insertAll(candidates.map { candidate ->
|
||||
UploadTask(
|
||||
attempts = 0,
|
||||
createdAt = Date(),
|
||||
filePath = null,
|
||||
isLivePhoto = null,
|
||||
lastError = null,
|
||||
livePhotoVideoId = null,
|
||||
localId = candidate.id,
|
||||
method = UploadMethod.MULTIPART,
|
||||
priority = when (candidate.type) {
|
||||
AssetType.IMAGE -> 0.5f
|
||||
else -> 0.3f
|
||||
},
|
||||
retryAfter = null,
|
||||
status = TaskStatus.UPLOAD_PENDING
|
||||
)
|
||||
})
|
||||
|
||||
// Start upload workers
|
||||
enqueueUploadWorkers()
|
||||
} catch (e: Exception) {
|
||||
android.util.Log.e(TAG, "Backup queue error", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun enqueueUploadWorkers() {
|
||||
// Create constraints
|
||||
val constraints = Constraints.Builder()
|
||||
.setRequiredNetworkType(NetworkType.CONNECTED)
|
||||
.build()
|
||||
|
||||
// Create work request
|
||||
val uploadWorkRequest = OneTimeWorkRequestBuilder<UploadWorker>()
|
||||
.setConstraints(constraints)
|
||||
.addTag(UPLOAD_WORK_TAG)
|
||||
.setBackoffCriteria(
|
||||
BackoffPolicy.EXPONENTIAL,
|
||||
WorkRequest.MIN_BACKOFF_MILLIS,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
.build()
|
||||
|
||||
workManager.enqueueUniqueWork(
|
||||
UPLOAD_WORK_NAME,
|
||||
ExistingWorkPolicy.KEEP,
|
||||
uploadWorkRequest
|
||||
)
|
||||
}
|
||||
|
||||
private fun getTempDirectory(): java.io.File {
|
||||
return java.io.File(ctx.cacheDir, "upload_temp").apply {
|
||||
if (!exists()) mkdirs()
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
private const val TAG = "UploadTaskImpl"
|
||||
private const val UPLOAD_WORK_TAG = "immich_upload"
|
||||
private const val UPLOAD_WORK_NAME = "immich_upload_unique"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,265 @@
|
||||
package app.alextran.immich.upload
|
||||
|
||||
import android.content.Context
|
||||
import android.provider.MediaStore
|
||||
import androidx.work.*
|
||||
import app.alextran.immich.schema.AppDatabase
|
||||
import app.alextran.immich.schema.AssetType
|
||||
import app.alextran.immich.schema.LocalAssetTaskData
|
||||
import app.alextran.immich.schema.StorageType
|
||||
import app.alextran.immich.schema.StoreKey
|
||||
import app.alextran.immich.schema.TaskStatus
|
||||
import app.alextran.immich.schema.UploadErrorCode
|
||||
import kotlinx.coroutines.*
|
||||
import okhttp3.*
|
||||
import okhttp3.MediaType.Companion.toMediaType
|
||||
import java.io.File
|
||||
import java.io.IOException
|
||||
import java.net.URL
|
||||
import java.util.*
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
class UploadWorker(
|
||||
context: Context,
|
||||
params: WorkerParameters
|
||||
) : CoroutineWorker(context, params) {
|
||||
|
||||
private val db = AppDatabase.getDatabase(applicationContext)
|
||||
private val client = createOkHttpClient()
|
||||
|
||||
override suspend fun doWork(): Result = withContext(Dispatchers.IO) {
|
||||
try {
|
||||
// Check if backup is enabled
|
||||
val backupEnabled = db.storeDao().get(StoreKey.enableBackup, StorageType.BoolStorage)
|
||||
if (backupEnabled != true) {
|
||||
return@withContext Result.success()
|
||||
}
|
||||
|
||||
// Get pending upload tasks
|
||||
val tasks = db.uploadTaskDao().getTasksForUpload(TaskConfig.MAX_ACTIVE_UPLOADS)
|
||||
|
||||
if (tasks.isEmpty()) {
|
||||
return@withContext Result.success()
|
||||
}
|
||||
|
||||
// Process tasks concurrently
|
||||
val results = tasks.map { task ->
|
||||
async { processUploadTask(task) }
|
||||
}.awaitAll()
|
||||
|
||||
// Check if we should continue processing
|
||||
val hasMore = db.uploadTaskDao().hasPendingTasks()
|
||||
|
||||
if (hasMore) {
|
||||
// Schedule next batch
|
||||
enqueueNextBatch()
|
||||
}
|
||||
|
||||
// Determine result based on processing outcomes
|
||||
when {
|
||||
results.all { it } -> Result.success()
|
||||
results.any { it } -> Result.success() // Partial success
|
||||
else -> Result.retry()
|
||||
}
|
||||
} catch (e: Exception) {
|
||||
android.util.Log.e(TAG, "Upload worker error", e)
|
||||
Result.retry()
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun processUploadTask(task: LocalAssetTaskData): Boolean {
|
||||
return try {
|
||||
// Get asset from MediaStore
|
||||
val assetUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI
|
||||
.buildUpon()
|
||||
.appendPath(task.localId)
|
||||
.build()
|
||||
|
||||
val cursor = applicationContext.contentResolver.query(
|
||||
assetUri,
|
||||
arrayOf(MediaStore.Images.Media.DATA),
|
||||
null,
|
||||
null,
|
||||
null
|
||||
) ?: return handleFailure(task, UploadErrorCode.ASSET_NOT_FOUND)
|
||||
|
||||
val filePath = cursor.use {
|
||||
if (it.moveToFirst()) {
|
||||
it.getString(it.getColumnIndexOrThrow(MediaStore.Images.Media.DATA))
|
||||
} else null
|
||||
} ?: return handleFailure(task, UploadErrorCode.ASSET_NOT_FOUND)
|
||||
|
||||
val file = File(filePath)
|
||||
if (!file.exists()) {
|
||||
return handleFailure(task, UploadErrorCode.FILE_NOT_FOUND)
|
||||
}
|
||||
|
||||
// Get server configuration
|
||||
val serverUrl = db.storeDao().get(StoreKey.serverEndpoint, StorageType.UrlStorage)
|
||||
?: return handleFailure(task, UploadErrorCode.NO_SERVER_URL)
|
||||
val accessToken = db.storeDao().get(StoreKey.accessToken, StorageType.StringStorage)
|
||||
?: return handleFailure(task, UploadErrorCode.NO_ACCESS_TOKEN)
|
||||
val deviceId = db.storeDao().get(StoreKey.deviceId, StorageType.StringStorage)
|
||||
?: return handleFailure(task, UploadErrorCode.NO_DEVICE_ID)
|
||||
|
||||
// Check network constraints
|
||||
val useWifiOnly = when (task.type) {
|
||||
AssetType.IMAGE -> db.storeDao().get(StoreKey.useWifiForUploadPhotos, StorageType.BoolStorage) ?: false
|
||||
AssetType.VIDEO -> db.storeDao().get(StoreKey.useWifiForUploadVideos, StorageType.BoolStorage) ?: false
|
||||
else -> false
|
||||
}
|
||||
|
||||
if (useWifiOnly && !NetworkMonitor.isWifiConnected(applicationContext)) {
|
||||
// Wait for WiFi
|
||||
return true
|
||||
}
|
||||
|
||||
// Update task status
|
||||
db.uploadTaskDao().updateStatus(task.taskId, TaskStatus.UPLOAD_QUEUED)
|
||||
|
||||
// Perform upload
|
||||
uploadFile(task, file, serverUrl, accessToken, deviceId)
|
||||
|
||||
// Mark as complete
|
||||
db.uploadTaskDao().updateStatus(task.taskId, TaskStatus.UPLOAD_COMPLETE)
|
||||
|
||||
true
|
||||
} catch (e: Exception) {
|
||||
android.util.Log.e(TAG, "Upload task ${task.taskId} failed", e)
|
||||
handleFailure(task, UploadErrorCode.UNKNOWN)
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun uploadFile(
|
||||
task: LocalAssetTaskData,
|
||||
file: File,
|
||||
serverUrl: URL,
|
||||
accessToken: String,
|
||||
deviceId: String
|
||||
) {
|
||||
val requestBody = createMultipartBody(task, file, deviceId)
|
||||
|
||||
val request = Request.Builder()
|
||||
.url("${serverUrl}/api/upload")
|
||||
.post(requestBody)
|
||||
.header("x-immich-user-token", accessToken)
|
||||
.tag(task.taskId)
|
||||
.build()
|
||||
|
||||
client.newCall(request).execute().use { response ->
|
||||
if (!response.isSuccessful) {
|
||||
throw IOException("Upload failed: ${response.code}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun createMultipartBody(
|
||||
task: LocalAssetTaskData,
|
||||
file: File,
|
||||
deviceId: String
|
||||
): RequestBody {
|
||||
val boundary = "Boundary-${UUID.randomUUID()}"
|
||||
|
||||
return object : RequestBody() {
|
||||
override fun contentType() = "multipart/form-data; boundary=$boundary".toMediaType()
|
||||
|
||||
override fun writeTo(sink: okio.BufferedSink) {
|
||||
// Write form fields
|
||||
writeFormField(sink, boundary, "deviceAssetId", task.localId)
|
||||
writeFormField(sink, boundary, "deviceId", deviceId)
|
||||
writeFormField(sink, boundary, "fileCreatedAt", (task.createdAt.time / 1000).toString())
|
||||
writeFormField(sink, boundary, "fileModifiedAt", (task.updatedAt.time / 1000).toString())
|
||||
writeFormField(sink, boundary, "fileName", task.fileName)
|
||||
writeFormField(sink, boundary, "isFavorite", task.isFavorite.toString())
|
||||
|
||||
// Write file
|
||||
sink.writeUtf8("--$boundary\r\n")
|
||||
sink.writeUtf8("Content-Disposition: form-data; name=\"assetData\"; filename=\"asset\"\r\n")
|
||||
sink.writeUtf8("Content-Type: application/octet-stream\r\n\r\n")
|
||||
|
||||
file.inputStream().use { input ->
|
||||
val buffer = ByteArray(8192)
|
||||
var bytesRead: Int
|
||||
while (input.read(buffer).also { bytesRead = it } != -1) {
|
||||
sink.write(buffer, 0, bytesRead)
|
||||
|
||||
// Report progress (simplified - could be enhanced with listeners)
|
||||
setProgressAsync(
|
||||
workDataOf(
|
||||
PROGRESS_TASK_ID to task.taskId,
|
||||
PROGRESS_BYTES to file.length()
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
sink.writeUtf8("\r\n--$boundary--\r\n")
|
||||
}
|
||||
|
||||
private fun writeFormField(sink: okio.BufferedSink, boundary: String, name: String, value: String) {
|
||||
sink.writeUtf8("--$boundary\r\n")
|
||||
sink.writeUtf8("Content-Disposition: form-data; name=\"$name\"\r\n\r\n")
|
||||
sink.writeUtf8(value)
|
||||
sink.writeUtf8("\r\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private suspend fun handleFailure(task: LocalAssetTaskData, code: UploadErrorCode): Boolean {
|
||||
val newAttempts = task.attempts + 1
|
||||
val status = if (newAttempts >= TaskConfig.MAX_ATTEMPTS) {
|
||||
TaskStatus.UPLOAD_FAILED
|
||||
} else {
|
||||
TaskStatus.UPLOAD_PENDING
|
||||
}
|
||||
|
||||
val retryAfter = if (status == TaskStatus.UPLOAD_PENDING) {
|
||||
Date(System.currentTimeMillis() + (Math.pow(3.0, newAttempts.toDouble()) * 1000).toLong())
|
||||
} else null
|
||||
|
||||
db.uploadTaskDao().updateTaskAfterFailure(
|
||||
task.taskId,
|
||||
newAttempts,
|
||||
code,
|
||||
status,
|
||||
retryAfter
|
||||
)
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
private fun enqueueNextBatch() {
|
||||
val constraints = Constraints.Builder()
|
||||
.setRequiredNetworkType(NetworkType.CONNECTED)
|
||||
.build()
|
||||
|
||||
val nextWorkRequest = OneTimeWorkRequestBuilder<UploadWorker>()
|
||||
.setConstraints(constraints)
|
||||
.addTag(UPLOAD_WORK_TAG)
|
||||
.setInitialDelay(1, TimeUnit.SECONDS)
|
||||
.build()
|
||||
|
||||
WorkManager.getInstance(applicationContext)
|
||||
.enqueueUniqueWork(
|
||||
UPLOAD_WORK_NAME,
|
||||
ExistingWorkPolicy.KEEP,
|
||||
nextWorkRequest
|
||||
)
|
||||
}
|
||||
|
||||
private fun createOkHttpClient(): OkHttpClient {
|
||||
return OkHttpClient.Builder()
|
||||
.connectTimeout(30, TimeUnit.SECONDS)
|
||||
.readTimeout(300, TimeUnit.SECONDS)
|
||||
.writeTimeout(300, TimeUnit.SECONDS)
|
||||
.build()
|
||||
}
|
||||
|
||||
companion object {
|
||||
private const val TAG = "UploadWorker"
|
||||
private const val UPLOAD_WORK_TAG = "immich_upload"
|
||||
private const val UPLOAD_WORK_NAME = "immich_upload_unique"
|
||||
const val PROGRESS_TASK_ID = "progress_task_id"
|
||||
const val PROGRESS_BYTES = "progress_bytes"
|
||||
}
|
||||
}
|
||||
@@ -36,4 +36,3 @@ tasks.register("clean", Delete) {
|
||||
tasks.named('wrapper') {
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
}
|
||||
|
||||
|
||||
1
mobile/drift_schemas/main/drift_schema_v14.json
generated
Normal file
1
mobile/drift_schemas/main/drift_schema_v14.json
generated
Normal file
File diff suppressed because one or more lines are too long
@@ -32,7 +32,6 @@ target 'Runner' do
|
||||
use_modular_headers!
|
||||
|
||||
flutter_install_all_ios_pods File.dirname(File.realpath(__FILE__))
|
||||
|
||||
# share_handler addition start
|
||||
target 'ShareExtension' do
|
||||
inherit! :search_paths
|
||||
|
||||
@@ -88,9 +88,9 @@ PODS:
|
||||
- Flutter
|
||||
- FlutterMacOS
|
||||
- SAMKeychain (1.5.3)
|
||||
- SDWebImage (5.21.0):
|
||||
- SDWebImage/Core (= 5.21.0)
|
||||
- SDWebImage/Core (5.21.0)
|
||||
- SDWebImage (5.21.3):
|
||||
- SDWebImage/Core (= 5.21.3)
|
||||
- SDWebImage/Core (5.21.3)
|
||||
- share_handler_ios (0.0.14):
|
||||
- Flutter
|
||||
- share_handler_ios/share_handler_ios_models (= 0.0.14)
|
||||
@@ -107,16 +107,16 @@ PODS:
|
||||
- sqflite_darwin (0.0.4):
|
||||
- Flutter
|
||||
- FlutterMacOS
|
||||
- sqlite3 (3.49.1):
|
||||
- sqlite3/common (= 3.49.1)
|
||||
- sqlite3/common (3.49.1)
|
||||
- sqlite3/dbstatvtab (3.49.1):
|
||||
- sqlite3 (3.49.2):
|
||||
- sqlite3/common (= 3.49.2)
|
||||
- sqlite3/common (3.49.2)
|
||||
- sqlite3/dbstatvtab (3.49.2):
|
||||
- sqlite3/common
|
||||
- sqlite3/fts5 (3.49.1):
|
||||
- sqlite3/fts5 (3.49.2):
|
||||
- sqlite3/common
|
||||
- sqlite3/perf-threadsafe (3.49.1):
|
||||
- sqlite3/perf-threadsafe (3.49.2):
|
||||
- sqlite3/common
|
||||
- sqlite3/rtree (3.49.1):
|
||||
- sqlite3/rtree (3.49.2):
|
||||
- sqlite3/common
|
||||
- sqlite3_flutter_libs (0.0.1):
|
||||
- Flutter
|
||||
@@ -275,18 +275,18 @@ SPEC CHECKSUMS:
|
||||
permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d
|
||||
photo_manager: 1d80ae07a89a67dfbcae95953a1e5a24af7c3e62
|
||||
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
|
||||
SDWebImage: f84b0feeb08d2d11e6a9b843cb06d75ebf5b8868
|
||||
SDWebImage: 16309af6d214ba3f77a7c6f6fdda888cb313a50a
|
||||
share_handler_ios: e2244e990f826b2c8eaa291ac3831569438ba0fb
|
||||
share_handler_ios_models: fc638c9b4330dc7f082586c92aee9dfa0b87b871
|
||||
share_plus: 50da8cb520a8f0f65671c6c6a99b3617ed10a58a
|
||||
shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
|
||||
sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
|
||||
sqlite3: fc1400008a9b3525f5914ed715a5d1af0b8f4983
|
||||
sqlite3: 3c950dc86011117c307eb0b28c4a7bb449dce9f1
|
||||
sqlite3_flutter_libs: f8fc13346870e73fe35ebf6dbb997fbcd156b241
|
||||
SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4
|
||||
url_launcher_ios: 694010445543906933d732453a59da0a173ae33d
|
||||
wakelock_plus: e29112ab3ef0b318e58cfa5c32326458be66b556
|
||||
|
||||
PODFILE CHECKSUM: 7ce312f2beab01395db96f6969d90a447279cf45
|
||||
PODFILE CHECKSUM: 95621706d175fee669455a5946a602e2a775019c
|
||||
|
||||
COCOAPODS: 1.16.2
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
archiveVersion = 1;
|
||||
classes = {
|
||||
};
|
||||
objectVersion = 54;
|
||||
objectVersion = 77;
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
@@ -29,6 +29,7 @@
|
||||
FAC6F89B2D287C890078CB2F /* ShareExtension.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = FAC6F8902D287C890078CB2F /* ShareExtension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
|
||||
FAC6F8B72D287F120078CB2F /* ShareViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = FAC6F8B52D287F120078CB2F /* ShareViewController.swift */; };
|
||||
FAC6F8B92D287F120078CB2F /* MainInterface.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FAC6F8B32D287F120078CB2F /* MainInterface.storyboard */; };
|
||||
FE30A0D02ECF97B8007AFDD7 /* Algorithms in Frameworks */ = {isa = PBXBuildFile; productRef = FE30A0CF2ECF97B8007AFDD7 /* Algorithms */; };
|
||||
FEAFA8732E4D42F4001E47FE /* Thumbhash.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEAFA8722E4D42F4001E47FE /* Thumbhash.swift */; };
|
||||
FED3B1962E253E9B0030FD97 /* ThumbnailsImpl.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1942E253E9B0030FD97 /* ThumbnailsImpl.swift */; };
|
||||
FED3B1972E253E9B0030FD97 /* Thumbnails.g.swift in Sources */ = {isa = PBXBuildFile; fileRef = FED3B1932E253E9B0030FD97 /* Thumbnails.g.swift */; };
|
||||
@@ -77,6 +78,16 @@
|
||||
name = "Embed Foundation Extensions";
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
FE4C52462EAFE736009EEB47 /* Embed ExtensionKit Extensions */ = {
|
||||
isa = PBXCopyFilesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
dstPath = "$(EXTENSIONS_FOLDER_PATH)";
|
||||
dstSubfolderSpec = 16;
|
||||
files = (
|
||||
);
|
||||
name = "Embed ExtensionKit Extensions";
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXCopyFilesBuildPhase section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
@@ -136,15 +147,11 @@
|
||||
/* Begin PBXFileSystemSynchronizedRootGroup section */
|
||||
B231F52D2E93A44A00BC45D1 /* Core */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Core;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Sync;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -156,10 +163,18 @@
|
||||
path = WidgetExtension;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FE14355D2EC446E90009D5AC /* Upload */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
path = Upload;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FEB3BA112EBD52860081A5EB /* Schemas */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
path = Schemas;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FEE084F22EC172080045228E /* Schemas */ = {
|
||||
isa = PBXFileSystemSynchronizedRootGroup;
|
||||
exceptions = (
|
||||
);
|
||||
path = Schemas;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
@@ -173,6 +188,7 @@
|
||||
FEE084F82EC172460045228E /* SQLiteData in Frameworks */,
|
||||
FEE084FB2EC1725A0045228E /* RawStructuredFieldValues in Frameworks */,
|
||||
FEE084FD2EC1725A0045228E /* StructuredFieldValues in Frameworks */,
|
||||
FE30A0D02ECF97B8007AFDD7 /* Algorithms in Frameworks */,
|
||||
D218389C4A4C4693F141F7D1 /* Pods_Runner.framework in Frameworks */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
@@ -267,7 +283,9 @@
|
||||
97C146F01CF9000F007C117D /* Runner */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
FE14355D2EC446E90009D5AC /* Upload */,
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
FEB3BA112EBD52860081A5EB /* Schemas */,
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B25D37792E72CA15008B6CA7 /* Connectivity */,
|
||||
B21E34A62E5AF9760031FDB9 /* Background */,
|
||||
@@ -345,6 +363,7 @@
|
||||
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
|
||||
D218A34AEE62BC1EF119F5B0 /* [CP] Embed Pods Frameworks */,
|
||||
6724EEB7D74949FA08581154 /* [CP] Copy Pods Resources */,
|
||||
FE4C52462EAFE736009EEB47 /* Embed ExtensionKit Extensions */,
|
||||
);
|
||||
buildRules = (
|
||||
);
|
||||
@@ -355,6 +374,8 @@
|
||||
fileSystemSynchronizedGroups = (
|
||||
B231F52D2E93A44A00BC45D1 /* Core */,
|
||||
B2CF7F8C2DDE4EBB00744BF6 /* Sync */,
|
||||
FE14355D2EC446E90009D5AC /* Upload */,
|
||||
FEB3BA112EBD52860081A5EB /* Schemas */,
|
||||
FEE084F22EC172080045228E /* Schemas */,
|
||||
);
|
||||
name = Runner;
|
||||
@@ -407,7 +428,7 @@
|
||||
isa = PBXProject;
|
||||
attributes = {
|
||||
BuildIndependentTargetsInParallel = YES;
|
||||
LastSwiftUpdateCheck = 1640;
|
||||
LastSwiftUpdateCheck = 1620;
|
||||
LastUpgradeCheck = 1510;
|
||||
ORGANIZATIONNAME = "";
|
||||
TargetAttributes = {
|
||||
@@ -437,6 +458,7 @@
|
||||
packageReferences = (
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */,
|
||||
FEE084F92EC1725A0045228E /* XCRemoteSwiftPackageReference "swift-http-structured-headers" */,
|
||||
FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */,
|
||||
);
|
||||
preferredProjectObjectVersion = 77;
|
||||
productRefGroup = 97C146EF1CF9000F007C117D /* Products */;
|
||||
@@ -549,10 +571,14 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Copy Pods Resources";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
|
||||
@@ -581,10 +607,14 @@
|
||||
inputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist",
|
||||
);
|
||||
inputPaths = (
|
||||
);
|
||||
name = "[CP] Embed Pods Frameworks";
|
||||
outputFileListPaths = (
|
||||
"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist",
|
||||
);
|
||||
outputPaths = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
|
||||
@@ -735,7 +765,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
@@ -744,7 +774,8 @@
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = 1.121.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile;
|
||||
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -D DEBUG -Xllvm -sil-disable-pass=performance-linker";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.profile;
|
||||
PRODUCT_NAME = "Immich-Profile";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
@@ -879,7 +910,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
@@ -888,7 +919,8 @@
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = 1.121.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug;
|
||||
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -D DEBUG -Xllvm -sil-disable-pass=performance-linker";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug;
|
||||
PRODUCT_NAME = "Immich-Debug";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
@@ -909,7 +941,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 14.0;
|
||||
@@ -918,7 +950,8 @@
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = 1.121.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich;
|
||||
OTHER_SWIFT_FLAGS = "$(inherited) -D COCOAPODS -Xllvm -sil-disable-pass=performance-linker";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich;
|
||||
PRODUCT_NAME = Immich;
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
@@ -942,7 +975,7 @@
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -959,7 +992,7 @@
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug.Widget;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug.Widget;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
|
||||
@@ -985,7 +1018,7 @@
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -1001,7 +1034,7 @@
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.Widget;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.Widget;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||
@@ -1025,7 +1058,7 @@
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -1041,7 +1074,7 @@
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile.Widget;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.profile.Widget;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
SWIFT_EMIT_LOC_STRINGS = YES;
|
||||
@@ -1065,7 +1098,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -1082,7 +1115,7 @@
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.vdebug.ShareExtension;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.vdebug.ShareExtension;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SKIP_INSTALL = YES;
|
||||
@@ -1109,7 +1142,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -1125,7 +1158,7 @@
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.ShareExtension;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertalev.immich.ShareExtension;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SKIP_INSTALL = YES;
|
||||
@@ -1150,7 +1183,7 @@
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 233;
|
||||
CUSTOM_GROUP_ID = group.app.immich.share;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
DEVELOPMENT_TEAM = 33MF3D8ZGA;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu17;
|
||||
GENERATE_INFOPLIST_FILE = YES;
|
||||
@@ -1166,7 +1199,7 @@
|
||||
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
|
||||
MARKETING_VERSION = 1.0;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile.ShareExtension;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.mertakev.immich.profile.ShareExtension;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SKIP_INSTALL = YES;
|
||||
@@ -1222,6 +1255,14 @@
|
||||
/* End XCConfigurationList section */
|
||||
|
||||
/* Begin XCRemoteSwiftPackageReference section */
|
||||
FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/apple/swift-algorithms.git";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 1.2.1;
|
||||
};
|
||||
};
|
||||
FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/pointfreeco/sqlite-data";
|
||||
@@ -1241,6 +1282,11 @@
|
||||
/* End XCRemoteSwiftPackageReference section */
|
||||
|
||||
/* Begin XCSwiftPackageProductDependency section */
|
||||
FE30A0CF2ECF97B8007AFDD7 /* Algorithms */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FE30A0CE2ECF97B8007AFDD7 /* XCRemoteSwiftPackageReference "swift-algorithms" */;
|
||||
productName = Algorithms;
|
||||
};
|
||||
FEE084F72EC172460045228E /* SQLiteData */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = FEE084F62EC172460045228E /* XCRemoteSwiftPackageReference "sqlite-data" */;
|
||||
|
||||
@@ -28,15 +28,6 @@
|
||||
"version" : "1.3.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-case-paths",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-case-paths",
|
||||
"state" : {
|
||||
"revision" : "6989976265be3f8d2b5802c722f9ba168e227c71",
|
||||
"version" : "1.7.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-clocks",
|
||||
"kind" : "remoteSourceControl",
|
||||
@@ -132,8 +123,8 @@
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-structured-queries",
|
||||
"state" : {
|
||||
"revision" : "1447ea20550f6f02c4b48cc80931c3ed40a9c756",
|
||||
"version" : "0.25.0"
|
||||
"revision" : "9c84335373bae5f5c9f7b5f0adf3ae10f2cab5b9",
|
||||
"version" : "0.25.2"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -145,15 +136,6 @@
|
||||
"version" : "602.0.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-tagged",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/pointfreeco/swift-tagged",
|
||||
"state" : {
|
||||
"revision" : "3907a9438f5b57d317001dc99f3f11b46882272b",
|
||||
"version" : "0.10.0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "xctest-dynamic-overlay",
|
||||
"kind" : "remoteSourceControl",
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import BackgroundTasks
|
||||
import Flutter
|
||||
import UIKit
|
||||
import network_info_plus
|
||||
import path_provider_foundation
|
||||
import permission_handler_apple
|
||||
import photo_manager
|
||||
import shared_preferences_foundation
|
||||
import UIKit
|
||||
|
||||
@main
|
||||
@objc class AppDelegate: FlutterAppDelegate {
|
||||
@@ -15,7 +15,7 @@ import UIKit
|
||||
) -> Bool {
|
||||
// Required for flutter_local_notification
|
||||
if #available(iOS 10.0, *) {
|
||||
UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
|
||||
UNUserNotificationCenter.current().delegate = self
|
||||
}
|
||||
|
||||
GeneratedPluginRegistrant.register(with: self)
|
||||
@@ -36,7 +36,9 @@ import UIKit
|
||||
}
|
||||
|
||||
if !registry.hasPlugin("org.cocoapods.shared-preferences-foundation") {
|
||||
SharedPreferencesPlugin.register(with: registry.registrar(forPlugin: "org.cocoapods.shared-preferences-foundation")!)
|
||||
SharedPreferencesPlugin.register(
|
||||
with: registry.registrar(forPlugin: "org.cocoapods.shared-preferences-foundation")!
|
||||
)
|
||||
}
|
||||
|
||||
if !registry.hasPlugin("org.cocoapods.permission-handler-apple") {
|
||||
@@ -50,14 +52,18 @@ import UIKit
|
||||
|
||||
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
|
||||
}
|
||||
|
||||
|
||||
public static func registerPlugins(with engine: FlutterEngine) {
|
||||
NativeSyncApiImpl.register(with: engine.registrar(forPlugin: NativeSyncApiImpl.name)!)
|
||||
ThumbnailApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: ThumbnailApiImpl())
|
||||
BackgroundWorkerFgHostApiSetup.setUp(binaryMessenger: engine.binaryMessenger, api: BackgroundWorkerApiImpl())
|
||||
}
|
||||
|
||||
public static func cancelPlugins(with engine: FlutterEngine) {
|
||||
(engine.valuePublished(byPlugin: NativeSyncApiImpl.name) as? NativeSyncApiImpl)?.detachFromEngine()
|
||||
|
||||
let statusListener = StatusEventListener()
|
||||
StreamStatusStreamHandler.register(with: engine.binaryMessenger, streamHandler: statusListener)
|
||||
let progressListener = ProgressEventListener()
|
||||
StreamProgressStreamHandler.register(with: engine.binaryMessenger, streamHandler: progressListener)
|
||||
UploadApiSetup.setUp(
|
||||
binaryMessenger: engine.binaryMessenger,
|
||||
api: UploadApiImpl(statusListener: statusListener, progressListener: progressListener)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -350,16 +350,12 @@ class BackgroundServicePlugin: NSObject, FlutterPlugin {
|
||||
|
||||
// If we have required Wi-Fi, we can check the isExpensive property
|
||||
let requireWifi = defaults.value(forKey: "require_wifi") as? Bool ?? false
|
||||
if (requireWifi) {
|
||||
let wifiMonitor = NWPathMonitor(requiredInterfaceType: .wifi)
|
||||
let isExpensive = wifiMonitor.currentPath.isExpensive
|
||||
if (isExpensive) {
|
||||
// The network is expensive and we have required Wi-Fi
|
||||
// Therefore, we will simply complete the task without
|
||||
// running it
|
||||
task.setTaskCompleted(success: true)
|
||||
return
|
||||
}
|
||||
|
||||
// The network is expensive and we have required Wi-Fi
|
||||
// Therefore, we will simply complete the task without
|
||||
// running it
|
||||
if (requireWifi && NetworkMonitor.shared.isExpensive) {
|
||||
return task.setTaskCompleted(success: true)
|
||||
}
|
||||
|
||||
// Schedule the next sync task so we can run this again later
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
class ImmichPlugin: NSObject {
|
||||
var detached: Bool
|
||||
|
||||
|
||||
override init() {
|
||||
detached = false
|
||||
super.init()
|
||||
}
|
||||
|
||||
|
||||
func detachFromEngine() {
|
||||
self.detached = true
|
||||
}
|
||||
|
||||
|
||||
func completeWhenActive<T>(for completion: @escaping (T) -> Void, with value: T) {
|
||||
guard !self.detached else { return }
|
||||
completion(value)
|
||||
}
|
||||
}
|
||||
|
||||
@inline(__always)
|
||||
func dPrint(_ item: Any) {
|
||||
#if DEBUG
|
||||
print(item)
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
<key>com.apple.developer.networking.wifi-info</key>
|
||||
<true/>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
<array>
|
||||
<string>group.app.immich.share</string>
|
||||
</array>
|
||||
<array/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -11,8 +11,6 @@
|
||||
<key>com.apple.developer.networking.wifi-info</key>
|
||||
<true/>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
<array>
|
||||
<string>group.app.immich.share</string>
|
||||
</array>
|
||||
<array/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import SQLiteData
|
||||
|
||||
struct Endpoint: Codable {
|
||||
let url: URL
|
||||
let status: Status
|
||||
extension Notification.Name {
|
||||
static let networkDidConnect = Notification.Name("networkDidConnect")
|
||||
}
|
||||
|
||||
enum Status: String, Codable {
|
||||
case loading, valid, error, unknown
|
||||
}
|
||||
enum TaskConfig {
|
||||
static let maxActiveDownloads = 3
|
||||
static let maxPendingDownloads = 50
|
||||
static let maxPendingUploads = 50
|
||||
static let maxRetries = 10
|
||||
static let sessionId = "app.mertalev.immich.upload"
|
||||
static let downloadCheckIntervalNs: UInt64 = 30_000_000_000 // 30 seconds
|
||||
static let downloadTimeoutS = TimeInterval(60)
|
||||
static let transferSpeedAlpha = 0.4
|
||||
static let originalsDir = FileManager.default.temporaryDirectory.appendingPathComponent(
|
||||
"originals",
|
||||
isDirectory: true
|
||||
)
|
||||
}
|
||||
|
||||
enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
@@ -47,8 +57,6 @@ enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
static let deviceId = Typed<String>(rawValue: ._deviceId)
|
||||
case _accessToken = 11
|
||||
static let accessToken = Typed<String>(rawValue: ._accessToken)
|
||||
case _serverEndpoint = 12
|
||||
static let serverEndpoint = Typed<String>(rawValue: ._serverEndpoint)
|
||||
case _sslClientCertData = 15
|
||||
static let sslClientCertData = Typed<String>(rawValue: ._sslClientCertData)
|
||||
case _sslClientPasswd = 16
|
||||
@@ -67,10 +75,12 @@ enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
static let externalEndpointList = Typed<[Endpoint]>(rawValue: ._externalEndpointList)
|
||||
|
||||
// MARK: - URL
|
||||
case _localEndpoint = 134
|
||||
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
|
||||
case _serverUrl = 10
|
||||
static let serverUrl = Typed<URL>(rawValue: ._serverUrl)
|
||||
case _serverEndpoint = 12
|
||||
static let serverEndpoint = Typed<URL>(rawValue: ._serverEndpoint)
|
||||
case _localEndpoint = 134
|
||||
static let localEndpoint = Typed<URL>(rawValue: ._localEndpoint)
|
||||
|
||||
// MARK: - Date
|
||||
case _backupFailedSince = 5
|
||||
@@ -160,6 +170,17 @@ enum StoreKey: Int, CaseIterable, QueryBindable {
|
||||
}
|
||||
}
|
||||
|
||||
enum UploadHeaders: String {
|
||||
case reprDigest = "Repr-Digest"
|
||||
case userToken = "X-Immich-User-Token"
|
||||
case assetData = "X-Immich-Asset-Data"
|
||||
}
|
||||
|
||||
enum TaskStatus: Int, QueryBindable {
|
||||
case downloadPending, downloadQueued, downloadFailed, uploadPending, uploadQueued, uploadFailed, uploadComplete,
|
||||
uploadSkipped
|
||||
}
|
||||
|
||||
enum BackupSelection: Int, QueryBindable {
|
||||
case selected, none, excluded
|
||||
}
|
||||
@@ -175,3 +196,77 @@ enum AlbumUserRole: Int, QueryBindable {
|
||||
enum MemoryType: Int, QueryBindable {
|
||||
case onThisDay
|
||||
}
|
||||
|
||||
enum AssetVisibility: Int, QueryBindable {
|
||||
case timeline, hidden, archive, locked
|
||||
}
|
||||
|
||||
enum SourceType: String, QueryBindable {
|
||||
case machineLearning = "machine-learning"
|
||||
case exif, manual
|
||||
}
|
||||
|
||||
enum UploadMethod: Int, QueryBindable {
|
||||
case multipart, resumable
|
||||
}
|
||||
|
||||
enum UploadError: Error {
|
||||
case fileCreationFailed
|
||||
case iCloudError(UploadErrorCode)
|
||||
case photosError(UploadErrorCode)
|
||||
}
|
||||
|
||||
enum UploadErrorCode: Int, QueryBindable {
|
||||
case unknown
|
||||
case assetNotFound
|
||||
case fileNotFound
|
||||
case resourceNotFound
|
||||
case invalidResource
|
||||
case encodingFailed
|
||||
case writeFailed
|
||||
case notEnoughSpace
|
||||
case networkError
|
||||
case photosInternalError
|
||||
case photosUnknownError
|
||||
case noServerUrl
|
||||
case noDeviceId
|
||||
case noAccessToken
|
||||
case interrupted
|
||||
case cancelled
|
||||
case downloadStalled
|
||||
case forceQuit
|
||||
case outOfResources
|
||||
case backgroundUpdatesDisabled
|
||||
case uploadTimeout
|
||||
case iCloudRateLimit
|
||||
case iCloudThrottled
|
||||
case invalidResponse
|
||||
case badRequest
|
||||
case internalServerError
|
||||
}
|
||||
|
||||
enum AssetType: Int, QueryBindable {
|
||||
case other, image, video, audio
|
||||
}
|
||||
|
||||
enum AssetMediaStatus: String, Codable {
|
||||
case created, replaced, duplicate
|
||||
}
|
||||
|
||||
struct Endpoint: Codable {
|
||||
let url: URL
|
||||
let status: Status
|
||||
|
||||
enum Status: String, Codable {
|
||||
case loading, valid, error, unknown
|
||||
}
|
||||
}
|
||||
|
||||
struct UploadSuccessResponse: Codable {
|
||||
let status: AssetMediaStatus
|
||||
let id: String
|
||||
}
|
||||
|
||||
struct UploadErrorResponse: Codable {
|
||||
let message: String
|
||||
}
|
||||
|
||||
@@ -4,35 +4,75 @@ enum StoreError: Error {
|
||||
case invalidJSON(String)
|
||||
case invalidURL(String)
|
||||
case encodingFailed
|
||||
case notFound
|
||||
}
|
||||
|
||||
protocol StoreConvertible {
|
||||
static var cacheKeyPath: ReferenceWritableKeyPath<StoreCache, [StoreKey: Self]> { get }
|
||||
associatedtype StorageType
|
||||
static func fromValue(_ value: StorageType) throws(StoreError) -> Self
|
||||
static func toValue(_ value: Self) throws(StoreError) -> StorageType
|
||||
}
|
||||
|
||||
extension StoreConvertible {
|
||||
static func get(_ cache: StoreCache, key: StoreKey) -> Self? {
|
||||
os_unfair_lock_lock(&cache.lock)
|
||||
defer { os_unfair_lock_unlock(&cache.lock) }
|
||||
return cache[keyPath: cacheKeyPath][key]
|
||||
}
|
||||
|
||||
static func set(_ cache: StoreCache, key: StoreKey, value: Self?) {
|
||||
os_unfair_lock_lock(&cache.lock)
|
||||
defer { os_unfair_lock_unlock(&cache.lock) }
|
||||
cache[keyPath: cacheKeyPath][key] = value
|
||||
}
|
||||
}
|
||||
|
||||
final class StoreCache {
|
||||
fileprivate var lock = os_unfair_lock()
|
||||
fileprivate var intCache: [StoreKey: Int] = [:]
|
||||
fileprivate var boolCache: [StoreKey: Bool] = [:]
|
||||
fileprivate var dateCache: [StoreKey: Date] = [:]
|
||||
fileprivate var stringCache: [StoreKey: String] = [:]
|
||||
fileprivate var urlCache: [StoreKey: URL] = [:]
|
||||
fileprivate var endpointArrayCache: [StoreKey: [Endpoint]] = [:]
|
||||
fileprivate var stringDictCache: [StoreKey: [String: String]] = [:]
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) -> T? {
|
||||
T.get(self, key: key.rawValue)
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T?) {
|
||||
T.set(self, key: key.rawValue, value: value)
|
||||
}
|
||||
}
|
||||
|
||||
extension Int: StoreConvertible {
|
||||
static let cacheKeyPath = \StoreCache.intCache
|
||||
static func fromValue(_ value: Int) -> Int { value }
|
||||
static func toValue(_ value: Int) -> Int { value }
|
||||
}
|
||||
|
||||
extension Bool: StoreConvertible {
|
||||
static let cacheKeyPath = \StoreCache.boolCache
|
||||
static func fromValue(_ value: Int) -> Bool { value == 1 }
|
||||
static func toValue(_ value: Bool) -> Int { value ? 1 : 0 }
|
||||
}
|
||||
|
||||
extension Date: StoreConvertible {
|
||||
static let cacheKeyPath = \StoreCache.dateCache
|
||||
static func fromValue(_ value: Int) -> Date { Date(timeIntervalSince1970: TimeInterval(value) / 1000) }
|
||||
static func toValue(_ value: Date) -> Int { Int(value.timeIntervalSince1970 * 1000) }
|
||||
}
|
||||
|
||||
extension String: StoreConvertible {
|
||||
static let cacheKeyPath = \StoreCache.stringCache
|
||||
static func fromValue(_ value: String) -> String { value }
|
||||
static func toValue(_ value: String) -> String { value }
|
||||
}
|
||||
|
||||
extension URL: StoreConvertible {
|
||||
static let cacheKeyPath = \StoreCache.urlCache
|
||||
static func fromValue(_ value: String) throws(StoreError) -> URL {
|
||||
guard let url = URL(string: value) else {
|
||||
throw StoreError.invalidURL(value)
|
||||
@@ -69,78 +109,52 @@ extension StoreConvertible where Self: Codable, StorageType == String {
|
||||
}
|
||||
}
|
||||
|
||||
extension Array: StoreConvertible where Element: Codable {
|
||||
extension Array: StoreConvertible where Element == Endpoint {
|
||||
static let cacheKeyPath = \StoreCache.endpointArrayCache
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
extension Dictionary: StoreConvertible where Key == String, Value: Codable {
|
||||
extension Dictionary: StoreConvertible where Key == String, Value == String {
|
||||
static let cacheKeyPath = \StoreCache.stringDictCache
|
||||
typealias StorageType = String
|
||||
}
|
||||
|
||||
class StoreRepository {
|
||||
private let db: DatabasePool
|
||||
|
||||
init(db: DatabasePool) {
|
||||
self.db = db
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == Int {
|
||||
extension Store {
|
||||
static let cache = StoreCache()
|
||||
|
||||
static func get<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>) throws -> T?
|
||||
where T.StorageType == Int {
|
||||
if let cached = cache.get(key) { return cached }
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
if let value = try query.fetchOne(conn) ?? nil {
|
||||
let converted = try T.fromValue(value)
|
||||
cache.set(key, value: converted)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) throws -> T? where T.StorageType == String {
|
||||
static func get<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>) throws -> T?
|
||||
where T.StorageType == String {
|
||||
if let cached = cache.get(key) { return cached }
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
if let value = try query.fetchOne(conn) ?? nil {
|
||||
let converted = try T.fromValue(value)
|
||||
cache.set(key, value: converted)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == Int {
|
||||
let query = Store.select(\.intValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
static func set<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>, value: T) throws
|
||||
where T.StorageType == Int {
|
||||
let converted = try T.toValue(value)
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: converted) }.execute(conn)
|
||||
cache.set(key, value: value)
|
||||
}
|
||||
|
||||
func get<T: StoreConvertible>(_ key: StoreKey.Typed<T>) async throws -> T? where T.StorageType == String {
|
||||
let query = Store.select(\.stringValue).where { $0.id.eq(key.rawValue) }
|
||||
if let value = try await db.read({ conn in try query.fetchOne(conn) }) ?? nil {
|
||||
return try T.fromValue(value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == Int {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: nil, intValue: value) }.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
func set<T: StoreConvertible>(_ key: StoreKey.Typed<T>, value: T) async throws where T.StorageType == String {
|
||||
let value = try T.toValue(value)
|
||||
try await db.write { conn in
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: value, intValue: nil) }.execute(conn)
|
||||
}
|
||||
static func set<T: StoreConvertible>(_ conn: Database, _ key: StoreKey.Typed<T>, value: T) throws
|
||||
where T.StorageType == String {
|
||||
let converted = try T.toValue(value)
|
||||
try Store.upsert { Store(id: key.rawValue, stringValue: converted, intValue: nil) }.execute(conn)
|
||||
cache.set(key, value: value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +1,170 @@
|
||||
import GRDB
|
||||
import SQLiteData
|
||||
|
||||
extension QueryExpression where QueryValue: _OptionalProtocol {
|
||||
// asserts column result cannot be nil
|
||||
var unwrapped: SQLQueryExpression<QueryValue.Wrapped> {
|
||||
SQLQueryExpression(self.queryFragment, as: QueryValue.Wrapped.self)
|
||||
}
|
||||
}
|
||||
|
||||
extension Date {
|
||||
var unixTime: Date.UnixTimeRepresentation {
|
||||
return Date.UnixTimeRepresentation(queryOutput: self)
|
||||
}
|
||||
}
|
||||
|
||||
@Table("asset_face_entity")
|
||||
struct AssetFace {
|
||||
struct AssetFace: Identifiable {
|
||||
let id: String
|
||||
let assetId: String
|
||||
let personId: String?
|
||||
@Column("asset_id")
|
||||
let assetId: RemoteAsset.ID
|
||||
@Column("person_id")
|
||||
let personId: Person.ID?
|
||||
@Column("image_width")
|
||||
let imageWidth: Int
|
||||
@Column("image_height")
|
||||
let imageHeight: Int
|
||||
@Column("bounding_box_x1")
|
||||
let boundingBoxX1: Int
|
||||
@Column("bounding_box_y1")
|
||||
let boundingBoxY1: Int
|
||||
@Column("bounding_box_x2")
|
||||
let boundingBoxX2: Int
|
||||
@Column("bounding_box_y2")
|
||||
let boundingBoxY2: Int
|
||||
let sourceType: String
|
||||
@Column("source_type")
|
||||
let sourceType: SourceType
|
||||
}
|
||||
|
||||
@Table("auth_user_entity")
|
||||
struct AuthUser {
|
||||
struct AuthUser: Identifiable {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
@Column("is_admin")
|
||||
let isAdmin: Bool
|
||||
@Column("has_profile_image")
|
||||
let hasProfileImage: Bool
|
||||
@Column("profile_changed_at")
|
||||
let profileChangedAt: Date
|
||||
@Column("avatar_color")
|
||||
let avatarColor: AvatarColor
|
||||
@Column("quota_size_in_bytes")
|
||||
let quotaSizeInBytes: Int
|
||||
@Column("quota_usage_in_bytes")
|
||||
let quotaUsageInBytes: Int
|
||||
@Column("pin_code")
|
||||
let pinCode: String?
|
||||
}
|
||||
|
||||
@Table("local_album_entity")
|
||||
struct LocalAlbum {
|
||||
struct LocalAlbum: Identifiable {
|
||||
let id: String
|
||||
@Column("backup_selection")
|
||||
let backupSelection: BackupSelection
|
||||
let linkedRemoteAlbumId: String?
|
||||
@Column("linked_remote_album_id")
|
||||
let linkedRemoteAlbumId: RemoteAlbum.ID?
|
||||
@Column("marker")
|
||||
let marker_: Bool?
|
||||
let name: String
|
||||
@Column("is_ios_shared_album")
|
||||
let isIosSharedAlbum: Bool
|
||||
@Column("updated_at")
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
extension LocalAlbum {
|
||||
static let selected = Self.where { $0.backupSelection.eq(BackupSelection.selected) }
|
||||
static let excluded = Self.where { $0.backupSelection.eq(BackupSelection.excluded) }
|
||||
}
|
||||
|
||||
@Table("local_album_asset_entity")
|
||||
struct LocalAlbumAsset {
|
||||
let id: ID
|
||||
@Column("marker")
|
||||
let marker_: String?
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("asset_id")
|
||||
let assetId: String
|
||||
@Column("album_id")
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
extension LocalAlbumAsset {
|
||||
static let selected = Self.where {
|
||||
$0.id.assetId.eq(LocalAsset.columns.id) && $0.id.albumId.in(LocalAlbum.selected.select(\.id))
|
||||
}
|
||||
static let excluded = Self.where {
|
||||
$0.id.assetId.eq(LocalAsset.columns.id) && $0.id.albumId.in(LocalAlbum.excluded.select(\.id))
|
||||
}
|
||||
|
||||
/// Get all asset ids that are only in this album and not in other albums.
|
||||
/// This is useful in cases where the album is a smart album or a user-created album, especially on iOS
|
||||
static func uniqueAssetIds(albumId: String) -> Select<String, Self, ()> {
|
||||
return Self.select(\.id.assetId)
|
||||
.where { laa in
|
||||
laa.id.albumId.eq(albumId)
|
||||
&& !LocalAlbumAsset.where { $0.id.assetId.eq(laa.id.assetId) && $0.id.albumId.neq(albumId) }.exists()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Table("local_asset_entity")
|
||||
struct LocalAsset {
|
||||
struct LocalAsset: Identifiable {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let createdAt: Date
|
||||
let durationInSeconds: Int?
|
||||
@Column("created_at")
|
||||
let createdAt: String
|
||||
@Column("duration_in_seconds")
|
||||
let durationInSeconds: Int64?
|
||||
let height: Int?
|
||||
@Column("is_favorite")
|
||||
let isFavorite: Bool
|
||||
let name: String
|
||||
let orientation: String
|
||||
let type: Int
|
||||
let updatedAt: Date
|
||||
let type: AssetType
|
||||
@Column("updated_at")
|
||||
let updatedAt: String
|
||||
let width: Int?
|
||||
|
||||
static func getCandidates() -> Where<LocalAsset> {
|
||||
return Self.where { local in
|
||||
LocalAlbumAsset.selected.exists()
|
||||
&& !LocalAlbumAsset.excluded.exists()
|
||||
&& !RemoteAsset.where {
|
||||
local.checksum.eq($0.checksum)
|
||||
&& $0.ownerId.eq(Store.select(\.stringValue).where { $0.id.eq(StoreKey.currentUser.rawValue) }.unwrapped)
|
||||
}.exists()
|
||||
&& !UploadTask.where { $0.localId.eq(local.id) }.exists()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Selection
|
||||
struct LocalAssetCandidate {
|
||||
let id: LocalAsset.ID
|
||||
let type: AssetType
|
||||
}
|
||||
|
||||
@Selection
|
||||
struct LocalAssetDownloadData {
|
||||
let checksum: String?
|
||||
let createdAt: String
|
||||
let livePhotoVideoId: RemoteAsset.ID?
|
||||
let localId: LocalAsset.ID
|
||||
let taskId: UploadTask.ID
|
||||
let updatedAt: String
|
||||
}
|
||||
|
||||
@Selection
|
||||
struct LocalAssetUploadData {
|
||||
let filePath: URL
|
||||
let priority: Float
|
||||
let taskId: UploadTask.ID
|
||||
let type: AssetType
|
||||
}
|
||||
|
||||
@Table("memory_asset_entity")
|
||||
@@ -73,63 +173,89 @@ struct MemoryAsset {
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("asset_id")
|
||||
let assetId: String
|
||||
@Column("album_id")
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("memory_entity")
|
||||
struct Memory {
|
||||
struct Memory: Identifiable {
|
||||
let id: String
|
||||
@Column("created_at")
|
||||
let createdAt: Date
|
||||
@Column("updated_at")
|
||||
let updatedAt: Date
|
||||
@Column("deleted_at")
|
||||
let deletedAt: Date?
|
||||
let ownerId: String
|
||||
@Column("owner_id")
|
||||
let ownerId: User.ID
|
||||
let type: MemoryType
|
||||
let data: String
|
||||
@Column("is_saved")
|
||||
let isSaved: Bool
|
||||
@Column("memory_at")
|
||||
let memoryAt: Date
|
||||
@Column("seen_at")
|
||||
let seenAt: Date?
|
||||
@Column("show_at")
|
||||
let showAt: Date?
|
||||
@Column("hide_at")
|
||||
let hideAt: Date?
|
||||
}
|
||||
|
||||
@Table("partner_entity")
|
||||
struct Partner {
|
||||
let id: ID
|
||||
@Column("in_timeline")
|
||||
let inTimeline: Bool
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("shared_by_id")
|
||||
let sharedById: String
|
||||
@Column("shared_with_id")
|
||||
let sharedWithId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("person_entity")
|
||||
struct Person {
|
||||
struct Person: Identifiable {
|
||||
let id: String
|
||||
@Column("created_at")
|
||||
let createdAt: Date
|
||||
@Column("updated_at")
|
||||
let updatedAt: Date
|
||||
@Column("owner_id")
|
||||
let ownerId: String
|
||||
let name: String
|
||||
let faceAssetId: String?
|
||||
@Column("face_asset_id")
|
||||
let faceAssetId: AssetFace.ID?
|
||||
@Column("is_favorite")
|
||||
let isFavorite: Bool
|
||||
@Column("is_hidden")
|
||||
let isHidden: Bool
|
||||
let color: String?
|
||||
@Column("birth_date")
|
||||
let birthDate: Date?
|
||||
}
|
||||
|
||||
@Table("remote_album_entity")
|
||||
struct RemoteAlbum {
|
||||
struct RemoteAlbum: Identifiable {
|
||||
let id: String
|
||||
@Column("created_at")
|
||||
let createdAt: Date
|
||||
let description: String?
|
||||
@Column("is_activity_enabled")
|
||||
let isActivityEnabled: Bool
|
||||
let name: String
|
||||
let order: Int
|
||||
@Column("owner_id")
|
||||
let ownerId: String
|
||||
let thumbnailAssetId: String?
|
||||
@Column("thumbnail_asset_id")
|
||||
let thumbnailAssetId: RemoteAsset.ID?
|
||||
@Column("updated_at")
|
||||
let updatedAt: Date
|
||||
}
|
||||
|
||||
@@ -139,7 +265,9 @@ struct RemoteAlbumAsset {
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("asset_id")
|
||||
let assetId: String
|
||||
@Column("album_id")
|
||||
let albumId: String
|
||||
}
|
||||
}
|
||||
@@ -151,40 +279,55 @@ struct RemoteAlbumUser {
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("album_id")
|
||||
let albumId: String
|
||||
@Column("user_id")
|
||||
let userId: String
|
||||
}
|
||||
}
|
||||
|
||||
@Table("remote_asset_entity")
|
||||
struct RemoteAsset {
|
||||
struct RemoteAsset: Identifiable {
|
||||
let id: String
|
||||
let checksum: String?
|
||||
let checksum: String
|
||||
@Column("is_favorite")
|
||||
let isFavorite: Bool
|
||||
@Column("deleted_at")
|
||||
let deletedAt: Date?
|
||||
let isFavorite: Int
|
||||
let libraryId: String?
|
||||
let livePhotoVideoId: String?
|
||||
@Column("owner_id")
|
||||
let ownerId: User.ID
|
||||
@Column("local_date_time")
|
||||
let localDateTime: Date?
|
||||
let orientation: String
|
||||
let ownerId: String
|
||||
let stackId: String?
|
||||
let visibility: Int
|
||||
@Column("thumb_hash")
|
||||
let thumbHash: String?
|
||||
@Column("library_id")
|
||||
let libraryId: String?
|
||||
@Column("live_photo_video_id")
|
||||
let livePhotoVideoId: String?
|
||||
@Column("stack_id")
|
||||
let stackId: Stack.ID?
|
||||
let visibility: AssetVisibility
|
||||
}
|
||||
|
||||
@Table("remote_exif_entity")
|
||||
struct RemoteExif {
|
||||
@Column(primaryKey: true)
|
||||
let assetId: String
|
||||
@Column("asset_id", primaryKey: true)
|
||||
let assetId: RemoteAsset.ID
|
||||
let city: String?
|
||||
let state: String?
|
||||
let country: String?
|
||||
@Column("date_time_original")
|
||||
let dateTimeOriginal: Date?
|
||||
let description: String?
|
||||
let height: Int?
|
||||
let width: Int?
|
||||
@Column("exposure_time")
|
||||
let exposureTime: String?
|
||||
@Column("f_number")
|
||||
let fNumber: Double?
|
||||
@Column("file_size")
|
||||
let fileSize: Int?
|
||||
@Column("focal_length")
|
||||
let focalLength: Double?
|
||||
let latitude: Double?
|
||||
let longitude: Double?
|
||||
@@ -193,34 +336,110 @@ struct RemoteExif {
|
||||
let model: String?
|
||||
let lens: String?
|
||||
let orientation: String?
|
||||
@Column("time_zone")
|
||||
let timeZone: String?
|
||||
let rating: Int?
|
||||
@Column("projection_type")
|
||||
let projectionType: String?
|
||||
}
|
||||
|
||||
@Table("stack_entity")
|
||||
struct Stack {
|
||||
struct Stack: Identifiable {
|
||||
let id: String
|
||||
@Column("created_at")
|
||||
let createdAt: Date
|
||||
@Column("updated_at")
|
||||
let updatedAt: Date
|
||||
let ownerId: String
|
||||
@Column("owner_id")
|
||||
let ownerId: User.ID
|
||||
@Column("primary_asset_id")
|
||||
let primaryAssetId: String
|
||||
}
|
||||
|
||||
@Table("store_entity")
|
||||
struct Store {
|
||||
struct Store: Identifiable {
|
||||
let id: StoreKey
|
||||
@Column("string_value")
|
||||
let stringValue: String?
|
||||
@Column("int_value")
|
||||
let intValue: Int?
|
||||
}
|
||||
|
||||
@Table("upload_tasks")
|
||||
struct UploadTask: Identifiable {
|
||||
let id: Int64
|
||||
let attempts: Int
|
||||
@Column("created_at", as: Date.UnixTimeRepresentation.self)
|
||||
let createdAt: Date
|
||||
@Column("file_path")
|
||||
var filePath: URL?
|
||||
@Column("is_live_photo")
|
||||
let isLivePhoto: Bool?
|
||||
@Column("last_error")
|
||||
let lastError: UploadErrorCode?
|
||||
@Column("live_photo_video_id")
|
||||
let livePhotoVideoId: RemoteAsset.ID?
|
||||
@Column("local_id")
|
||||
var localId: LocalAsset.ID?
|
||||
let method: UploadMethod
|
||||
var priority: Float
|
||||
@Column("retry_after", as: Date?.UnixTimeRepresentation.self)
|
||||
let retryAfter: Date?
|
||||
let status: TaskStatus
|
||||
|
||||
static func retryOrFail(code: UploadErrorCode, status: TaskStatus) -> Update<UploadTask, ()> {
|
||||
return Self.update { row in
|
||||
row.status = Case().when(row.attempts.lte(TaskConfig.maxRetries), then: TaskStatus.downloadPending).else(status)
|
||||
row.attempts += 1
|
||||
row.lastError = code
|
||||
row.retryAfter = #sql("unixepoch('now') + (\(4 << row.attempts))")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Table("upload_task_stats")
|
||||
struct UploadTaskStat {
|
||||
@Column("pending_downloads")
|
||||
let pendingDownloads: Int
|
||||
@Column("pending_uploads")
|
||||
let pendingUploads: Int
|
||||
@Column("queued_downloads")
|
||||
let queuedDownloads: Int
|
||||
@Column("queued_uploads")
|
||||
let queuedUploads: Int
|
||||
@Column("failed_downloads")
|
||||
let failedDownloads: Int
|
||||
@Column("failed_uploads")
|
||||
let failedUploads: Int
|
||||
@Column("completed_uploads")
|
||||
let completedUploads: Int
|
||||
@Column("skipped_uploads")
|
||||
let skippedUploads: Int
|
||||
|
||||
static let availableDownloadSlots = Self.select {
|
||||
TaskConfig.maxPendingDownloads - ($0.pendingDownloads + $0.queuedDownloads)
|
||||
}
|
||||
|
||||
static let availableUploadSlots = Self.select {
|
||||
TaskConfig.maxPendingUploads - ($0.pendingUploads + $0.queuedUploads)
|
||||
}
|
||||
|
||||
static let availableSlots = Self.select {
|
||||
TaskConfig.maxPendingUploads + TaskConfig.maxPendingDownloads
|
||||
- ($0.pendingDownloads + $0.queuedDownloads + $0.pendingUploads + $0.queuedUploads)
|
||||
}
|
||||
}
|
||||
|
||||
@Table("user_entity")
|
||||
struct User {
|
||||
struct User: Identifiable {
|
||||
let id: String
|
||||
let name: String
|
||||
let email: String
|
||||
@Column("has_profile_image")
|
||||
let hasProfileImage: Bool
|
||||
@Column("profile_changed_at")
|
||||
let profileChangedAt: Date
|
||||
@Column("avatar_color")
|
||||
let avatarColor: AvatarColor
|
||||
}
|
||||
|
||||
@@ -231,6 +450,7 @@ struct UserMetadata {
|
||||
|
||||
@Selection
|
||||
struct ID {
|
||||
@Column("user_id")
|
||||
let userId: String
|
||||
let key: Date
|
||||
}
|
||||
|
||||
@@ -1,552 +0,0 @@
|
||||
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
|
||||
import Foundation
|
||||
|
||||
#if os(iOS)
|
||||
import Flutter
|
||||
#elseif os(macOS)
|
||||
import FlutterMacOS
|
||||
#else
|
||||
#error("Unsupported platform.")
|
||||
#endif
|
||||
|
||||
/// Error class for passing custom error details to Dart side.
|
||||
final class PigeonError: Error {
|
||||
let code: String
|
||||
let message: String?
|
||||
let details: Sendable?
|
||||
|
||||
init(code: String, message: String?, details: Sendable?) {
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.details = details
|
||||
}
|
||||
|
||||
var localizedDescription: String {
|
||||
return
|
||||
"PigeonError(code: \(code), message: \(message ?? "<nil>"), details: \(details ?? "<nil>")"
|
||||
}
|
||||
}
|
||||
|
||||
private func wrapResult(_ result: Any?) -> [Any?] {
|
||||
return [result]
|
||||
}
|
||||
|
||||
private func wrapError(_ error: Any) -> [Any?] {
|
||||
if let pigeonError = error as? PigeonError {
|
||||
return [
|
||||
pigeonError.code,
|
||||
pigeonError.message,
|
||||
pigeonError.details,
|
||||
]
|
||||
}
|
||||
if let flutterError = error as? FlutterError {
|
||||
return [
|
||||
flutterError.code,
|
||||
flutterError.message,
|
||||
flutterError.details,
|
||||
]
|
||||
}
|
||||
return [
|
||||
"\(error)",
|
||||
"\(type(of: error))",
|
||||
"Stacktrace: \(Thread.callStackSymbols)",
|
||||
]
|
||||
}
|
||||
|
||||
private func isNullish(_ value: Any?) -> Bool {
|
||||
return value is NSNull || value == nil
|
||||
}
|
||||
|
||||
private func nilOrValue<T>(_ value: Any?) -> T? {
|
||||
if value is NSNull { return nil }
|
||||
return value as! T?
|
||||
}
|
||||
|
||||
func deepEqualsMessages(_ lhs: Any?, _ rhs: Any?) -> Bool {
|
||||
let cleanLhs = nilOrValue(lhs) as Any?
|
||||
let cleanRhs = nilOrValue(rhs) as Any?
|
||||
switch (cleanLhs, cleanRhs) {
|
||||
case (nil, nil):
|
||||
return true
|
||||
|
||||
case (nil, _), (_, nil):
|
||||
return false
|
||||
|
||||
case is (Void, Void):
|
||||
return true
|
||||
|
||||
case let (cleanLhsHashable, cleanRhsHashable) as (AnyHashable, AnyHashable):
|
||||
return cleanLhsHashable == cleanRhsHashable
|
||||
|
||||
case let (cleanLhsArray, cleanRhsArray) as ([Any?], [Any?]):
|
||||
guard cleanLhsArray.count == cleanRhsArray.count else { return false }
|
||||
for (index, element) in cleanLhsArray.enumerated() {
|
||||
if !deepEqualsMessages(element, cleanRhsArray[index]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
case let (cleanLhsDictionary, cleanRhsDictionary) as ([AnyHashable: Any?], [AnyHashable: Any?]):
|
||||
guard cleanLhsDictionary.count == cleanRhsDictionary.count else { return false }
|
||||
for (key, cleanLhsValue) in cleanLhsDictionary {
|
||||
guard cleanRhsDictionary.index(forKey: key) != nil else { return false }
|
||||
if !deepEqualsMessages(cleanLhsValue, cleanRhsDictionary[key]!) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
default:
|
||||
// Any other type shouldn't be able to be used with pigeon. File an issue if you find this to be untrue.
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func deepHashMessages(value: Any?, hasher: inout Hasher) {
|
||||
if let valueList = value as? [AnyHashable] {
|
||||
for item in valueList { deepHashMessages(value: item, hasher: &hasher) }
|
||||
return
|
||||
}
|
||||
|
||||
if let valueDict = value as? [AnyHashable: AnyHashable] {
|
||||
for key in valueDict.keys {
|
||||
hasher.combine(key)
|
||||
deepHashMessages(value: valueDict[key]!, hasher: &hasher)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if let hashableValue = value as? AnyHashable {
|
||||
hasher.combine(hashableValue.hashValue)
|
||||
}
|
||||
|
||||
return hasher.combine(String(describing: value))
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct PlatformAsset: Hashable {
|
||||
var id: String
|
||||
var name: String
|
||||
var type: Int64
|
||||
var createdAt: Int64? = nil
|
||||
var updatedAt: Int64? = nil
|
||||
var width: Int64? = nil
|
||||
var height: Int64? = nil
|
||||
var durationInSeconds: Int64
|
||||
var orientation: Int64
|
||||
var isFavorite: Bool
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformAsset? {
|
||||
let id = pigeonVar_list[0] as! String
|
||||
let name = pigeonVar_list[1] as! String
|
||||
let type = pigeonVar_list[2] as! Int64
|
||||
let createdAt: Int64? = nilOrValue(pigeonVar_list[3])
|
||||
let updatedAt: Int64? = nilOrValue(pigeonVar_list[4])
|
||||
let width: Int64? = nilOrValue(pigeonVar_list[5])
|
||||
let height: Int64? = nilOrValue(pigeonVar_list[6])
|
||||
let durationInSeconds = pigeonVar_list[7] as! Int64
|
||||
let orientation = pigeonVar_list[8] as! Int64
|
||||
let isFavorite = pigeonVar_list[9] as! Bool
|
||||
|
||||
return PlatformAsset(
|
||||
id: id,
|
||||
name: name,
|
||||
type: type,
|
||||
createdAt: createdAt,
|
||||
updatedAt: updatedAt,
|
||||
width: width,
|
||||
height: height,
|
||||
durationInSeconds: durationInSeconds,
|
||||
orientation: orientation,
|
||||
isFavorite: isFavorite
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
width,
|
||||
height,
|
||||
durationInSeconds,
|
||||
orientation,
|
||||
isFavorite,
|
||||
]
|
||||
}
|
||||
static func == (lhs: PlatformAsset, rhs: PlatformAsset) -> Bool {
|
||||
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashMessages(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct PlatformAlbum: Hashable {
|
||||
var id: String
|
||||
var name: String
|
||||
var updatedAt: Int64? = nil
|
||||
var isCloud: Bool
|
||||
var assetCount: Int64
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> PlatformAlbum? {
|
||||
let id = pigeonVar_list[0] as! String
|
||||
let name = pigeonVar_list[1] as! String
|
||||
let updatedAt: Int64? = nilOrValue(pigeonVar_list[2])
|
||||
let isCloud = pigeonVar_list[3] as! Bool
|
||||
let assetCount = pigeonVar_list[4] as! Int64
|
||||
|
||||
return PlatformAlbum(
|
||||
id: id,
|
||||
name: name,
|
||||
updatedAt: updatedAt,
|
||||
isCloud: isCloud,
|
||||
assetCount: assetCount
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
id,
|
||||
name,
|
||||
updatedAt,
|
||||
isCloud,
|
||||
assetCount,
|
||||
]
|
||||
}
|
||||
static func == (lhs: PlatformAlbum, rhs: PlatformAlbum) -> Bool {
|
||||
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashMessages(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct SyncDelta: Hashable {
|
||||
var hasChanges: Bool
|
||||
var updates: [PlatformAsset]
|
||||
var deletes: [String]
|
||||
var assetAlbums: [String: [String]]
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> SyncDelta? {
|
||||
let hasChanges = pigeonVar_list[0] as! Bool
|
||||
let updates = pigeonVar_list[1] as! [PlatformAsset]
|
||||
let deletes = pigeonVar_list[2] as! [String]
|
||||
let assetAlbums = pigeonVar_list[3] as! [String: [String]]
|
||||
|
||||
return SyncDelta(
|
||||
hasChanges: hasChanges,
|
||||
updates: updates,
|
||||
deletes: deletes,
|
||||
assetAlbums: assetAlbums
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
hasChanges,
|
||||
updates,
|
||||
deletes,
|
||||
assetAlbums,
|
||||
]
|
||||
}
|
||||
static func == (lhs: SyncDelta, rhs: SyncDelta) -> Bool {
|
||||
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashMessages(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct HashResult: Hashable {
|
||||
var assetId: String
|
||||
var error: String? = nil
|
||||
var hash: String? = nil
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> HashResult? {
|
||||
let assetId = pigeonVar_list[0] as! String
|
||||
let error: String? = nilOrValue(pigeonVar_list[1])
|
||||
let hash: String? = nilOrValue(pigeonVar_list[2])
|
||||
|
||||
return HashResult(
|
||||
assetId: assetId,
|
||||
error: error,
|
||||
hash: hash
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
assetId,
|
||||
error,
|
||||
hash,
|
||||
]
|
||||
}
|
||||
static func == (lhs: HashResult, rhs: HashResult) -> Bool {
|
||||
return deepEqualsMessages(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashMessages(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
private class MessagesPigeonCodecReader: FlutterStandardReader {
|
||||
override func readValue(ofType type: UInt8) -> Any? {
|
||||
switch type {
|
||||
case 129:
|
||||
return PlatformAsset.fromList(self.readValue() as! [Any?])
|
||||
case 130:
|
||||
return PlatformAlbum.fromList(self.readValue() as! [Any?])
|
||||
case 131:
|
||||
return SyncDelta.fromList(self.readValue() as! [Any?])
|
||||
case 132:
|
||||
return HashResult.fromList(self.readValue() as! [Any?])
|
||||
default:
|
||||
return super.readValue(ofType: type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class MessagesPigeonCodecWriter: FlutterStandardWriter {
|
||||
override func writeValue(_ value: Any) {
|
||||
if let value = value as? PlatformAsset {
|
||||
super.writeByte(129)
|
||||
super.writeValue(value.toList())
|
||||
} else if let value = value as? PlatformAlbum {
|
||||
super.writeByte(130)
|
||||
super.writeValue(value.toList())
|
||||
} else if let value = value as? SyncDelta {
|
||||
super.writeByte(131)
|
||||
super.writeValue(value.toList())
|
||||
} else if let value = value as? HashResult {
|
||||
super.writeByte(132)
|
||||
super.writeValue(value.toList())
|
||||
} else {
|
||||
super.writeValue(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class MessagesPigeonCodecReaderWriter: FlutterStandardReaderWriter {
|
||||
override func reader(with data: Data) -> FlutterStandardReader {
|
||||
return MessagesPigeonCodecReader(data: data)
|
||||
}
|
||||
|
||||
override func writer(with data: NSMutableData) -> FlutterStandardWriter {
|
||||
return MessagesPigeonCodecWriter(data: data)
|
||||
}
|
||||
}
|
||||
|
||||
class MessagesPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable {
|
||||
static let shared = MessagesPigeonCodec(readerWriter: MessagesPigeonCodecReaderWriter())
|
||||
}
|
||||
|
||||
|
||||
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
|
||||
protocol NativeSyncApi {
|
||||
func shouldFullSync() throws -> Bool
|
||||
func getMediaChanges() throws -> SyncDelta
|
||||
func checkpointSync() throws
|
||||
func clearSyncCheckpoint() throws
|
||||
func getAssetIdsForAlbum(albumId: String) throws -> [String]
|
||||
func getAlbums() throws -> [PlatformAlbum]
|
||||
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset]
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void)
|
||||
func cancelHashing() throws
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]]
|
||||
}
|
||||
|
||||
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
|
||||
class NativeSyncApiSetup {
|
||||
static var codec: FlutterStandardMessageCodec { MessagesPigeonCodec.shared }
|
||||
/// Sets up an instance of `NativeSyncApi` to handle messages through the `binaryMessenger`.
|
||||
static func setUp(binaryMessenger: FlutterBinaryMessenger, api: NativeSyncApi?, messageChannelSuffix: String = "") {
|
||||
let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : ""
|
||||
#if os(iOS)
|
||||
let taskQueue = binaryMessenger.makeBackgroundTaskQueue?()
|
||||
#else
|
||||
let taskQueue: FlutterTaskQueue? = nil
|
||||
#endif
|
||||
let shouldFullSyncChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.shouldFullSync\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
shouldFullSyncChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.shouldFullSync()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
shouldFullSyncChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getMediaChangesChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getMediaChanges\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getMediaChanges\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getMediaChangesChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.getMediaChanges()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getMediaChangesChannel.setMessageHandler(nil)
|
||||
}
|
||||
let checkpointSyncChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.checkpointSync\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
checkpointSyncChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
try api.checkpointSync()
|
||||
reply(wrapResult(nil))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
checkpointSyncChannel.setMessageHandler(nil)
|
||||
}
|
||||
let clearSyncCheckpointChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.clearSyncCheckpoint\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
clearSyncCheckpointChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
try api.clearSyncCheckpoint()
|
||||
reply(wrapResult(nil))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clearSyncCheckpointChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getAssetIdsForAlbumChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetIdsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetIdsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getAssetIdsForAlbumChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let albumIdArg = args[0] as! String
|
||||
do {
|
||||
let result = try api.getAssetIdsForAlbum(albumId: albumIdArg)
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getAssetIdsForAlbumChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getAlbumsChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAlbums\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAlbums\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getAlbumsChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.getAlbums()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getAlbumsChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getAssetsCountSinceChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsCountSince\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsCountSince\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getAssetsCountSinceChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let albumIdArg = args[0] as! String
|
||||
let timestampArg = args[1] as! Int64
|
||||
do {
|
||||
let result = try api.getAssetsCountSince(albumId: albumIdArg, timestamp: timestampArg)
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getAssetsCountSinceChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getAssetsForAlbumChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getAssetsForAlbum\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getAssetsForAlbumChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let albumIdArg = args[0] as! String
|
||||
let updatedTimeCondArg: Int64? = nilOrValue(args[1])
|
||||
do {
|
||||
let result = try api.getAssetsForAlbum(albumId: albumIdArg, updatedTimeCond: updatedTimeCondArg)
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getAssetsForAlbumChannel.setMessageHandler(nil)
|
||||
}
|
||||
let hashAssetsChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.hashAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.hashAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
hashAssetsChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let assetIdsArg = args[0] as! [String]
|
||||
let allowNetworkAccessArg = args[1] as! Bool
|
||||
api.hashAssets(assetIds: assetIdsArg, allowNetworkAccess: allowNetworkAccessArg) { result in
|
||||
switch result {
|
||||
case .success(let res):
|
||||
reply(wrapResult(res))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
hashAssetsChannel.setMessageHandler(nil)
|
||||
}
|
||||
let cancelHashingChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.cancelHashing\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
cancelHashingChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
try api.cancelHashing()
|
||||
reply(wrapResult(nil))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cancelHashingChannel.setMessageHandler(nil)
|
||||
}
|
||||
let getTrashedAssetsChannel = taskQueue == nil
|
||||
? FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
: FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.NativeSyncApi.getTrashedAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec, taskQueue: taskQueue)
|
||||
if let api = api {
|
||||
getTrashedAssetsChannel.setMessageHandler { _, reply in
|
||||
do {
|
||||
let result = try api.getTrashedAssets()
|
||||
reply(wrapResult(result))
|
||||
} catch {
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
getTrashedAssetsChannel.setMessageHandler(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,393 +1,580 @@
|
||||
import Photos
|
||||
import Algorithms
|
||||
import CryptoKit
|
||||
import Foundation
|
||||
import Photos
|
||||
import SQLiteData
|
||||
import os.log
|
||||
|
||||
struct AssetWrapper: Hashable, Equatable {
|
||||
let asset: PlatformAsset
|
||||
|
||||
init(with asset: PlatformAsset) {
|
||||
self.asset = asset
|
||||
}
|
||||
|
||||
func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(self.asset.id)
|
||||
}
|
||||
|
||||
static func == (lhs: AssetWrapper, rhs: AssetWrapper) -> Bool {
|
||||
return lhs.asset.id == rhs.asset.id
|
||||
}
|
||||
extension Notification.Name {
|
||||
static let localSyncDidComplete = Notification.Name("localSyncDidComplete")
|
||||
}
|
||||
|
||||
class NativeSyncApiImpl: ImmichPlugin, NativeSyncApi, FlutterPlugin {
|
||||
static let name = "NativeSyncApi"
|
||||
enum LocalSyncError: Error {
|
||||
case photoAccessDenied, assetUpsertFailed, noChangeToken, unsupportedOS
|
||||
case unsupportedAssetType(Int)
|
||||
}
|
||||
|
||||
static func register(with registrar: any FlutterPluginRegistrar) {
|
||||
let instance = NativeSyncApiImpl()
|
||||
NativeSyncApiSetup.setUp(binaryMessenger: registrar.messenger(), api: instance)
|
||||
registrar.publish(instance)
|
||||
}
|
||||
enum SyncConfig {
|
||||
static let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
|
||||
static let batchSize: Int = 5000
|
||||
static let changeTokenKey = "immich:changeToken"
|
||||
static let recoveredAlbumSubType = 1_000_000_219
|
||||
static let sortDescriptors = [NSSortDescriptor(key: "localIdentifier", ascending: true)]
|
||||
}
|
||||
|
||||
func detachFromEngine(for registrar: any FlutterPluginRegistrar) {
|
||||
super.detachFromEngine()
|
||||
}
|
||||
class LocalSyncService {
|
||||
private static let dateFormatter = ISO8601DateFormatter()
|
||||
|
||||
private let defaults: UserDefaults
|
||||
private let changeTokenKey = "immich:changeToken"
|
||||
private let albumTypes: [PHAssetCollectionType] = [.album, .smartAlbum]
|
||||
private let recoveredAlbumSubType = 1000000219
|
||||
private let db: DatabasePool
|
||||
private let photoLibrary: PhotoLibraryProvider
|
||||
private let logger = Logger(subsystem: "com.immich.mobile", category: "LocalSync")
|
||||
|
||||
private var hashTask: Task<Void?, Error>?
|
||||
private static let hashCancelledCode = "HASH_CANCELLED"
|
||||
private static let hashCancelled = Result<[HashResult], Error>.failure(PigeonError(code: hashCancelledCode, message: "Hashing cancelled", details: nil))
|
||||
|
||||
|
||||
init(with defaults: UserDefaults = .standard) {
|
||||
init(db: DatabasePool, photoLibrary: PhotoLibraryProvider, with defaults: UserDefaults = .standard) {
|
||||
self.defaults = defaults
|
||||
self.db = db
|
||||
self.photoLibrary = photoLibrary
|
||||
}
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func getChangeToken() -> PHPersistentChangeToken? {
|
||||
guard let data = defaults.data(forKey: changeTokenKey) else {
|
||||
return nil
|
||||
}
|
||||
return try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: data)
|
||||
defaults.data(forKey: SyncConfig.changeTokenKey)
|
||||
.flatMap { try? NSKeyedUnarchiver.unarchivedObject(ofClass: PHPersistentChangeToken.self, from: $0) }
|
||||
}
|
||||
|
||||
@available(iOS 16, *)
|
||||
private func saveChangeToken(token: PHPersistentChangeToken) -> Void {
|
||||
private func saveChangeToken(token: PHPersistentChangeToken) {
|
||||
guard let data = try? NSKeyedArchiver.archivedData(withRootObject: token, requiringSecureCoding: true) else {
|
||||
return
|
||||
}
|
||||
defaults.set(data, forKey: changeTokenKey)
|
||||
defaults.set(data, forKey: SyncConfig.changeTokenKey)
|
||||
}
|
||||
|
||||
func clearSyncCheckpoint() -> Void {
|
||||
defaults.removeObject(forKey: changeTokenKey)
|
||||
func clearSyncCheckpoint() {
|
||||
defaults.removeObject(forKey: SyncConfig.changeTokenKey)
|
||||
}
|
||||
|
||||
func checkpointSync() {
|
||||
guard #available(iOS 16, *) else {
|
||||
return
|
||||
}
|
||||
saveChangeToken(token: PHPhotoLibrary.shared().currentChangeToken)
|
||||
guard #available(iOS 16, *) else { return }
|
||||
saveChangeToken(token: photoLibrary.currentChangeToken)
|
||||
}
|
||||
|
||||
func shouldFullSync() -> Bool {
|
||||
guard #available(iOS 16, *),
|
||||
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized,
|
||||
let storedToken = getChangeToken() else {
|
||||
// When we do not have access to photo library, older iOS version or No token available, fallback to full sync
|
||||
return true
|
||||
func sync(full: Bool = false) async throws {
|
||||
let start = Date()
|
||||
defer { logger.info("Sync completed in \(Int(Date().timeIntervalSince(start) * 1000))ms") }
|
||||
|
||||
guard !full, !shouldFullSync(), let delta = try? getMediaChanges(), delta.hasChanges
|
||||
else {
|
||||
logger.debug("Full sync: \(full ? "user requested" : "required")")
|
||||
return try await fullSync()
|
||||
}
|
||||
|
||||
guard let _ = try? PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken) else {
|
||||
// Cannot fetch persistent changes
|
||||
return true
|
||||
logger.debug("Delta sync: +\(delta.updates.count) -\(delta.deletes.count)")
|
||||
|
||||
let albumFetchOptions = PHFetchOptions()
|
||||
albumFetchOptions.predicate = NSPredicate(format: "assetCollectionSubtype != %d", SyncConfig.recoveredAlbumSubType)
|
||||
|
||||
try await db.write { conn in
|
||||
try #sql("pragma temp_store = 2").execute(conn)
|
||||
try #sql("create temp table current_albums(id text primary key) without rowid").execute(conn)
|
||||
|
||||
var cloudAlbums = [PHAssetCollection]()
|
||||
for type in SyncConfig.albumTypes {
|
||||
photoLibrary.fetchAlbums(with: type, subtype: .any, options: albumFetchOptions)
|
||||
.enumerateObjects { album, _, _ in
|
||||
try? CurrentAlbum.insert { CurrentAlbum(id: album.localIdentifier) }.execute(conn)
|
||||
try? upsertAlbum(album, conn: conn)
|
||||
if album.isCloud {
|
||||
cloudAlbums.append(album)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try LocalAlbum.delete().where { localAlbum in
|
||||
localAlbum.backupSelection.eq(BackupSelection.none) && !CurrentAlbum.where { $0.id == localAlbum.id }.exists()
|
||||
}.execute(conn)
|
||||
|
||||
for asset in delta.updates {
|
||||
try upsertAsset(asset, conn: conn)
|
||||
}
|
||||
|
||||
if !delta.deletes.isEmpty {
|
||||
try LocalAsset.delete().where { $0.id.in(delta.deletes) }.execute(conn)
|
||||
}
|
||||
|
||||
try self.updateAssetAlbumLinks(delta.assetAlbums, conn: conn)
|
||||
}
|
||||
|
||||
// On iOS, we need to full sync albums that are marked as cloud as the delta sync
|
||||
// does not include changes for cloud albums. If ignoreIcloudAssets is enabled,
|
||||
// remove the albums from the local database from the previous sync
|
||||
if !cloudAlbums.isEmpty {
|
||||
try await syncCloudAlbums(cloudAlbums)
|
||||
}
|
||||
|
||||
checkpointSync()
|
||||
}
|
||||
|
||||
private func fullSync() async throws {
|
||||
let start = Date()
|
||||
defer { logger.info("Full sync completed in \(Int(Date().timeIntervalSince(start) * 1000))ms") }
|
||||
|
||||
let dbAlbumIds = try await db.read { conn in
|
||||
try LocalAlbum.all.select(\.id).order { $0.id }.fetchAll(conn)
|
||||
}
|
||||
|
||||
let albumFetchOptions = PHFetchOptions()
|
||||
albumFetchOptions.predicate = NSPredicate(format: "assetCollectionSubtype != %d", SyncConfig.recoveredAlbumSubType)
|
||||
albumFetchOptions.sortDescriptors = SyncConfig.sortDescriptors
|
||||
|
||||
let albums = photoLibrary.fetchAlbums(with: .album, subtype: .any, options: albumFetchOptions)
|
||||
let smartAlbums = photoLibrary.fetchAlbums(with: .smartAlbum, subtype: .any, options: albumFetchOptions)
|
||||
|
||||
try await withThrowingTaskGroup(of: Void.self) { group in
|
||||
var dbIndex = 0
|
||||
var albumIndex = 0
|
||||
var smartAlbumIndex = 0
|
||||
|
||||
// Three-pointer merge: dbAlbumIds, albums, smartAlbums
|
||||
while albumIndex < albums.count || smartAlbumIndex < smartAlbums.count {
|
||||
let currentAlbum = albumIndex < albums.count ? albums.object(at: albumIndex) : nil
|
||||
let currentSmartAlbum = smartAlbumIndex < smartAlbums.count ? smartAlbums.object(at: smartAlbumIndex) : nil
|
||||
|
||||
let useRegular =
|
||||
currentSmartAlbum == nil
|
||||
|| (currentAlbum != nil && currentAlbum!.localIdentifier < currentSmartAlbum!.localIdentifier)
|
||||
|
||||
let nextAlbum = useRegular ? currentAlbum! : currentSmartAlbum!
|
||||
let deviceId = nextAlbum.localIdentifier
|
||||
|
||||
while dbIndex < dbAlbumIds.count && dbAlbumIds[dbIndex] < deviceId {
|
||||
let albumToRemove = dbAlbumIds[dbIndex]
|
||||
group.addTask { try await self.removeAlbum(albumId: albumToRemove) }
|
||||
dbIndex += 1
|
||||
}
|
||||
|
||||
if dbIndex < dbAlbumIds.count && dbAlbumIds[dbIndex] == deviceId {
|
||||
group.addTask { try await self.syncAlbum(albumId: deviceId, deviceAlbum: nextAlbum) }
|
||||
dbIndex += 1
|
||||
} else {
|
||||
group.addTask { try await self.addAlbum(nextAlbum) }
|
||||
}
|
||||
|
||||
if useRegular {
|
||||
albumIndex += 1
|
||||
} else {
|
||||
smartAlbumIndex += 1
|
||||
}
|
||||
}
|
||||
|
||||
// Remove any remaining DB albums
|
||||
while dbIndex < dbAlbumIds.count {
|
||||
let albumToRemove = dbAlbumIds[dbIndex]
|
||||
group.addTask { try await self.removeAlbum(albumId: albumToRemove) }
|
||||
dbIndex += 1
|
||||
}
|
||||
|
||||
try await group.waitForAll()
|
||||
}
|
||||
|
||||
checkpointSync()
|
||||
}
|
||||
|
||||
private func shouldFullSync() -> Bool {
|
||||
guard #available(iOS 16, *), photoLibrary.isAuthorized, let token = getChangeToken(),
|
||||
(try? photoLibrary.fetchPersistentChanges(since: token)) != nil
|
||||
else {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func getAlbums() throws -> [PlatformAlbum] {
|
||||
var albums: [PlatformAlbum] = []
|
||||
private func addAlbum(_ album: PHAssetCollection) async throws {
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
|
||||
albumTypes.forEach { type in
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
for i in 0..<collections.count {
|
||||
let album = collections.object(at: i)
|
||||
|
||||
// Ignore recovered album
|
||||
if(album.assetCollectionSubtype.rawValue == self.recoveredAlbumSubType) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
|
||||
options.includeHiddenAssets = false
|
||||
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
|
||||
let isCloud = album.assetCollectionSubtype == .albumCloudShared || album.assetCollectionSubtype == .albumMyPhotoStream
|
||||
|
||||
var domainAlbum = PlatformAlbum(
|
||||
id: album.localIdentifier,
|
||||
name: album.localizedTitle!,
|
||||
updatedAt: nil,
|
||||
isCloud: isCloud,
|
||||
assetCount: Int64(assets.count)
|
||||
)
|
||||
|
||||
if let firstAsset = assets.firstObject {
|
||||
domainAlbum.updatedAt = firstAsset.modificationDate.map { Int64($0.timeIntervalSince1970) }
|
||||
}
|
||||
|
||||
albums.append(domainAlbum)
|
||||
}
|
||||
if let timestamp = album.updatedAt {
|
||||
let date = timestamp as NSDate
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
}
|
||||
|
||||
let result = photoLibrary.fetchAssets(in: album, options: options)
|
||||
try await self.db.write { conn in
|
||||
try upsertStreamedAssets(result: result, albumId: album.localIdentifier, conn: conn)
|
||||
}
|
||||
return albums.sorted { $0.id < $1.id }
|
||||
}
|
||||
|
||||
func getMediaChanges() throws -> SyncDelta {
|
||||
guard #available(iOS 16, *) else {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature requires iOS 16 or later.", details: nil)
|
||||
private func upsertStreamedAssets(result: PHFetchResult<PHAsset>, albumId: String, conn: Database) throws {
|
||||
result.enumerateObjects { asset, _, stop in
|
||||
do {
|
||||
try self.upsertAsset(asset, conn: conn)
|
||||
try self.linkAsset(asset.localIdentifier, toAlbum: albumId, conn: conn)
|
||||
} catch {
|
||||
stop.pointee = true
|
||||
}
|
||||
}
|
||||
if let error = conn.lastErrorMessage {
|
||||
throw LocalSyncError.assetUpsertFailed
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove all assets that are only in this particular album.
|
||||
/// We cannot remove all assets in the album because they might be in other albums in iOS.
|
||||
private func removeAlbum(albumId: String) async throws {
|
||||
try await db.write { conn in
|
||||
try LocalAsset.delete().where { $0.id.in(LocalAlbumAsset.uniqueAssetIds(albumId: albumId)) }.execute(conn)
|
||||
try LocalAlbum.delete()
|
||||
.where { $0.id.eq(albumId) && $0.backupSelection.eq(BackupSelection.none) }
|
||||
.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
private func syncAlbum(albumId: String, deviceAlbum: PHAssetCollection) async throws {
|
||||
let dbAlbum = try await db.read { conn in
|
||||
try LocalAlbum.all.where { $0.id.eq(albumId) }.fetchOne(conn)
|
||||
}
|
||||
guard let dbAlbum else { return try await addAlbum(deviceAlbum) }
|
||||
|
||||
// Check if unchanged
|
||||
guard dbAlbum.name != deviceAlbum.localizedTitle || dbAlbum.updatedAt != deviceAlbum.updatedAt
|
||||
else { return }
|
||||
|
||||
try await fullDiffAlbum(dbAlbum: dbAlbum, deviceAlbum: deviceAlbum)
|
||||
}
|
||||
|
||||
private func fullDiffAlbum(dbAlbum: LocalAlbum, deviceAlbum: PHAssetCollection) async throws {
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let date = dbAlbum.updatedAt as NSDate
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
options.sortDescriptors = SyncConfig.sortDescriptors
|
||||
|
||||
var deviceAssetIds: [String] = []
|
||||
let result = photoLibrary.fetchAssets(in: deviceAlbum, options: options)
|
||||
result.enumerateObjects { asset, _, _ in
|
||||
deviceAssetIds.append(asset.localIdentifier)
|
||||
}
|
||||
|
||||
guard PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized else {
|
||||
throw PigeonError(code: "NO_AUTH", message: "No photo library access", details: nil)
|
||||
let dbAssetIds = try await db.read { conn in
|
||||
try LocalAlbumAsset.all
|
||||
.where { $0.id.albumId.eq(dbAlbum.id) }
|
||||
.select(\.id.assetId)
|
||||
.order { $0.id.assetId }
|
||||
.fetchAll(conn)
|
||||
}
|
||||
|
||||
let (toFetch, toDelete) = diffSortedArrays(dbAssetIds, deviceAssetIds)
|
||||
guard !toFetch.isEmpty || !toDelete.isEmpty else { return }
|
||||
|
||||
logger.debug("Syncing \(deviceAlbum.localizedTitle ?? "album"): +\(toFetch.count) -\(toDelete.count)")
|
||||
|
||||
try await db.write { conn in
|
||||
try self.updateAlbum(deviceAlbum, conn: conn)
|
||||
}
|
||||
|
||||
for batch in toFetch.chunks(ofCount: SyncConfig.batchSize) {
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let result = photoLibrary.fetchAssets(withLocalIdentifiers: Array(batch), options: options)
|
||||
|
||||
try await db.write { conn in
|
||||
try upsertStreamedAssets(result: result, albumId: deviceAlbum.localIdentifier, conn: conn)
|
||||
}
|
||||
}
|
||||
|
||||
guard !toDelete.isEmpty else { return }
|
||||
|
||||
let uniqueAssetIds = try await db.read { conn in
|
||||
return try LocalAlbumAsset.uniqueAssetIds(albumId: deviceAlbum.localIdentifier).fetchAll(conn)
|
||||
}
|
||||
|
||||
// Delete unique assets and unlink others
|
||||
var toDeleteSet = Set(toDelete)
|
||||
let uniqueIds = toDeleteSet.intersection(uniqueAssetIds)
|
||||
toDeleteSet.subtract(uniqueIds)
|
||||
let toUnlink = toDeleteSet
|
||||
guard !toDeleteSet.isEmpty || !toUnlink.isEmpty else { return }
|
||||
try await db.write { conn in
|
||||
if !uniqueIds.isEmpty {
|
||||
try LocalAsset.delete().where { $0.id.in(Array(uniqueIds)) }.execute(conn)
|
||||
}
|
||||
|
||||
if !toUnlink.isEmpty {
|
||||
try LocalAlbumAsset.delete()
|
||||
.where { $0.id.assetId.in(Array(toUnlink)) && $0.id.albumId.eq(deviceAlbum.localIdentifier) }
|
||||
.execute(conn)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func syncCloudAlbums(_ albums: [PHAssetCollection]) async throws {
|
||||
try await withThrowingTaskGroup(of: Void.self) { group in
|
||||
for album in albums {
|
||||
group.addTask {
|
||||
let dbAlbum = try await self.db.read { conn in
|
||||
try LocalAlbum.all.where { $0.id.eq(album.localIdentifier) }.fetchOne(conn)
|
||||
}
|
||||
|
||||
guard let dbAlbum else { return }
|
||||
|
||||
let deviceIds = try self.getAssetIdsForAlbum(albumId: album.localIdentifier)
|
||||
let dbIds = try await self.db.read { conn in
|
||||
try LocalAlbumAsset.all
|
||||
.where { $0.id.albumId.eq(album.localIdentifier) }
|
||||
.select(\.id.assetId)
|
||||
.order { $0.id.assetId }
|
||||
.fetchAll(conn)
|
||||
}
|
||||
|
||||
guard deviceIds != dbIds else { return }
|
||||
try await self.fullDiffAlbum(dbAlbum: dbAlbum, deviceAlbum: album)
|
||||
}
|
||||
}
|
||||
|
||||
try await group.waitForAll()
|
||||
}
|
||||
}
|
||||
|
||||
private func upsertAlbum(_ album: PHAssetCollection, conn: Database) throws {
|
||||
try LocalAlbum.insert {
|
||||
LocalAlbum(
|
||||
id: album.localIdentifier,
|
||||
backupSelection: .none,
|
||||
linkedRemoteAlbumId: nil,
|
||||
marker_: nil,
|
||||
name: album.localizedTitle ?? "",
|
||||
isIosSharedAlbum: album.isCloud,
|
||||
updatedAt: album.updatedAt ?? Date()
|
||||
)
|
||||
} onConflict: {
|
||||
$0.id
|
||||
} doUpdate: { old, new in
|
||||
old.name = new.name
|
||||
old.updatedAt = new.updatedAt
|
||||
old.isIosSharedAlbum = new.isIosSharedAlbum
|
||||
old.marker_ = new.marker_
|
||||
}.execute(conn)
|
||||
}
|
||||
|
||||
private func updateAlbum(_ album: PHAssetCollection, conn: Database) throws {
|
||||
try LocalAlbum.update { row in
|
||||
row.name = album.localizedTitle ?? ""
|
||||
row.updatedAt = album.updatedAt ?? Date()
|
||||
row.isIosSharedAlbum = album.isCloud
|
||||
}.where { $0.id.eq(album.localIdentifier) }.execute(conn)
|
||||
}
|
||||
|
||||
private func upsertAsset(_ asset: PHAsset, conn: Database) throws {
|
||||
guard let assetType = AssetType(rawValue: asset.mediaType.rawValue) else {
|
||||
throw LocalSyncError.unsupportedAssetType(asset.mediaType.rawValue)
|
||||
}
|
||||
let dateStr = Self.dateFormatter.string(from: asset.creationDate ?? Date())
|
||||
|
||||
try LocalAsset.insert {
|
||||
LocalAsset(
|
||||
id: asset.localIdentifier,
|
||||
checksum: nil,
|
||||
createdAt: dateStr,
|
||||
durationInSeconds: Int64(asset.duration),
|
||||
height: asset.pixelHeight,
|
||||
isFavorite: asset.isFavorite,
|
||||
name: asset.title,
|
||||
orientation: "0",
|
||||
type: assetType,
|
||||
updatedAt: dateStr,
|
||||
width: asset.pixelWidth
|
||||
)
|
||||
} onConflict: {
|
||||
$0.id
|
||||
} doUpdate: { old, new in
|
||||
old.name = new.name
|
||||
old.type = new.type
|
||||
old.updatedAt = new.updatedAt
|
||||
old.width = new.width
|
||||
old.height = new.height
|
||||
old.durationInSeconds = new.durationInSeconds
|
||||
old.isFavorite = new.isFavorite
|
||||
old.orientation = new.orientation
|
||||
}.execute(conn)
|
||||
}
|
||||
|
||||
private func linkAsset(_ assetId: String, toAlbum albumId: String, conn: Database) throws {
|
||||
try LocalAlbumAsset.insert {
|
||||
LocalAlbumAsset(id: LocalAlbumAsset.ID(assetId: assetId, albumId: albumId), marker_: nil)
|
||||
} onConflict: {
|
||||
($0.id.assetId, $0.id.albumId)
|
||||
}.execute(conn)
|
||||
}
|
||||
|
||||
private func updateAssetAlbumLinks(_ assetAlbums: [String: [String]], conn: Database) throws {
|
||||
for (assetId, albumIds) in assetAlbums {
|
||||
// Delete old links not in the new set
|
||||
try LocalAlbumAsset.delete()
|
||||
.where { $0.id.assetId.eq(assetId) && !$0.id.albumId.in(albumIds) }
|
||||
.execute(conn)
|
||||
|
||||
// Insert new links
|
||||
for albumId in albumIds {
|
||||
try LocalAlbumAsset.insert {
|
||||
LocalAlbumAsset(id: LocalAlbumAsset.ID(assetId: assetId, albumId: albumId), marker_: nil)
|
||||
} onConflict: {
|
||||
($0.id.assetId, $0.id.albumId)
|
||||
}.execute(conn)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func fetchAssetsByIds(_ ids: [String]) throws -> [PHAsset] {
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let result = photoLibrary.fetchAssets(withLocalIdentifiers: ids, options: options)
|
||||
|
||||
var assets: [PHAsset] = []
|
||||
assets.reserveCapacity(ids.count)
|
||||
result.enumerateObjects { asset, _, _ in assets.append(asset) }
|
||||
|
||||
return assets
|
||||
}
|
||||
|
||||
private func getMediaChanges() throws -> NativeSyncDelta {
|
||||
guard #available(iOS 16, *) else {
|
||||
throw LocalSyncError.unsupportedOS
|
||||
}
|
||||
|
||||
guard photoLibrary.isAuthorized else {
|
||||
throw LocalSyncError.photoAccessDenied
|
||||
}
|
||||
|
||||
guard let storedToken = getChangeToken() else {
|
||||
// No token exists, definitely need a full sync
|
||||
print("MediaManager::getMediaChanges: No token found")
|
||||
throw PigeonError(code: "NO_TOKEN", message: "No stored change token", details: nil)
|
||||
throw LocalSyncError.noChangeToken
|
||||
}
|
||||
|
||||
let currentToken = PHPhotoLibrary.shared().currentChangeToken
|
||||
if storedToken == currentToken {
|
||||
return SyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
|
||||
let currentToken = photoLibrary.currentChangeToken
|
||||
guard storedToken != currentToken else {
|
||||
return NativeSyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
|
||||
}
|
||||
|
||||
do {
|
||||
let changes = try PHPhotoLibrary.shared().fetchPersistentChanges(since: storedToken)
|
||||
let changes = try photoLibrary.fetchPersistentChanges(since: storedToken)
|
||||
var updatedIds = Set<String>()
|
||||
var deletedIds = Set<String>()
|
||||
|
||||
var updatedAssets: Set<AssetWrapper> = []
|
||||
var deletedAssets: Set<String> = []
|
||||
|
||||
for change in changes {
|
||||
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
|
||||
|
||||
let updated = details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers)
|
||||
deletedAssets.formUnion(details.deletedLocalIdentifiers)
|
||||
|
||||
if (updated.isEmpty) { continue }
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let result = PHAsset.fetchAssets(withLocalIdentifiers: Array(updated), options: options)
|
||||
for i in 0..<result.count {
|
||||
let asset = result.object(at: i)
|
||||
|
||||
// Asset wrapper only uses the id for comparison. Multiple change can contain the same asset, skip duplicate changes
|
||||
let predicate = PlatformAsset(
|
||||
id: asset.localIdentifier,
|
||||
name: "",
|
||||
type: 0,
|
||||
durationInSeconds: 0,
|
||||
orientation: 0,
|
||||
isFavorite: false
|
||||
)
|
||||
if (updatedAssets.contains(AssetWrapper(with: predicate))) {
|
||||
continue
|
||||
}
|
||||
|
||||
let domainAsset = AssetWrapper(with: asset.toPlatformAsset())
|
||||
updatedAssets.insert(domainAsset)
|
||||
}
|
||||
}
|
||||
|
||||
let updates = Array(updatedAssets.map { $0.asset })
|
||||
return SyncDelta(hasChanges: true, updates: updates, deletes: Array(deletedAssets), assetAlbums: buildAssetAlbumsMap(assets: updates))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private func buildAssetAlbumsMap(assets: Array<PlatformAsset>) -> [String: [String]] {
|
||||
guard !assets.isEmpty else {
|
||||
return [:]
|
||||
for change in changes {
|
||||
guard let details = try? change.changeDetails(for: PHObjectType.asset) else { continue }
|
||||
updatedIds.formUnion(details.updatedLocalIdentifiers.union(details.insertedLocalIdentifiers))
|
||||
deletedIds.formUnion(details.deletedLocalIdentifiers)
|
||||
}
|
||||
|
||||
var albumAssets: [String: [String]] = [:]
|
||||
|
||||
for type in albumTypes {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
collections.enumerateObjects { (album, _, _) in
|
||||
let options = PHFetchOptions()
|
||||
options.predicate = NSPredicate(format: "localIdentifier IN %@", assets.map(\.id))
|
||||
options.includeHiddenAssets = false
|
||||
let result = self.getAssetsFromAlbum(in: album, options: options)
|
||||
result.enumerateObjects { (asset, _, _) in
|
||||
albumAssets[asset.localIdentifier, default: []].append(album.localIdentifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
return albumAssets
|
||||
}
|
||||
|
||||
func getAssetIdsForAlbum(albumId: String) throws -> [String] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
guard !updatedIds.isEmpty || !deletedIds.isEmpty else {
|
||||
return NativeSyncDelta(hasChanges: false, updates: [], deletes: [], assetAlbums: [:])
|
||||
}
|
||||
|
||||
var ids: [String] = []
|
||||
let updatedIdArray = Array(updatedIds)
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
assets.enumerateObjects { (asset, _, _) in
|
||||
let result = photoLibrary.fetchAssets(withLocalIdentifiers: updatedIdArray, options: options)
|
||||
|
||||
var updates: [PHAsset] = []
|
||||
result.enumerateObjects { asset, _, _ in updates.append(asset) }
|
||||
|
||||
return NativeSyncDelta(
|
||||
hasChanges: true,
|
||||
updates: updates,
|
||||
deletes: Array(deletedIds),
|
||||
assetAlbums: buildAssetAlbumsMap(assetIds: updatedIdArray)
|
||||
)
|
||||
}
|
||||
|
||||
private func buildAssetAlbumsMap(assetIds: [String]) -> [String: [String]] {
|
||||
guard !assetIds.isEmpty else { return [:] }
|
||||
|
||||
var result: [String: [String]] = [:]
|
||||
let options = PHFetchOptions()
|
||||
options.predicate = NSPredicate(format: "localIdentifier IN %@", assetIds)
|
||||
options.includeHiddenAssets = false
|
||||
|
||||
for type in SyncConfig.albumTypes {
|
||||
photoLibrary.fetchAssetCollections(with: type, subtype: .any, options: nil)
|
||||
.enumerateObjects { album, _, _ in
|
||||
photoLibrary.fetchAssets(in: album, options: options)
|
||||
.enumerateObjects { asset, _, _ in
|
||||
result[asset.localIdentifier, default: []].append(album.localIdentifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
private func getAssetIdsForAlbum(albumId: String) throws -> [String] {
|
||||
guard let album = photoLibrary.fetchAssetCollection(albumId: albumId, options: nil) else { return [] }
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
options.sortDescriptors = [NSSortDescriptor(key: "localIdentifier", ascending: true)]
|
||||
|
||||
var ids: [String] = []
|
||||
photoLibrary.fetchAssets(in: album, options: options).enumerateObjects { asset, _, _ in
|
||||
ids.append(asset.localIdentifier)
|
||||
}
|
||||
return ids
|
||||
}
|
||||
|
||||
func getAssetsCountSince(albumId: String, timestamp: Int64) throws -> Int64 {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return 0
|
||||
}
|
||||
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(timestamp))
|
||||
let options = PHFetchOptions()
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
options.includeHiddenAssets = false
|
||||
let assets = getAssetsFromAlbum(in: album, options: options)
|
||||
return Int64(assets.count)
|
||||
}
|
||||
|
||||
func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PlatformAsset] {
|
||||
let collections = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: nil)
|
||||
guard let album = collections.firstObject else {
|
||||
return []
|
||||
}
|
||||
private func getAssetsForAlbum(albumId: String, updatedTimeCond: Int64?) throws -> [PHAsset] {
|
||||
guard let album = photoLibrary.fetchAssetCollection(albumId: albumId, options: nil) else { return [] }
|
||||
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
if(updatedTimeCond != nil) {
|
||||
let date = NSDate(timeIntervalSince1970: TimeInterval(updatedTimeCond!))
|
||||
options.predicate = NSPredicate(format: "creationDate > %@ OR modificationDate > %@", date, date)
|
||||
if let timestamp = updatedTimeCond {
|
||||
let date = Date(timeIntervalSince1970: TimeInterval(timestamp))
|
||||
options.predicate = NSPredicate(
|
||||
format: "creationDate > %@ OR modificationDate > %@",
|
||||
date as NSDate,
|
||||
date as NSDate
|
||||
)
|
||||
}
|
||||
|
||||
let result = getAssetsFromAlbum(in: album, options: options)
|
||||
if(result.count == 0) {
|
||||
return []
|
||||
}
|
||||
let result = photoLibrary.fetchAssets(in: album, options: options)
|
||||
var assets: [PHAsset] = []
|
||||
result.enumerateObjects { asset, _, _ in assets.append(asset) }
|
||||
|
||||
var assets: [PlatformAsset] = []
|
||||
result.enumerateObjects { (asset, _, _) in
|
||||
assets.append(asset.toPlatformAsset())
|
||||
}
|
||||
return assets
|
||||
}
|
||||
|
||||
func hashAssets(assetIds: [String], allowNetworkAccess: Bool, completion: @escaping (Result<[HashResult], Error>) -> Void) {
|
||||
if let prevTask = hashTask {
|
||||
prevTask.cancel()
|
||||
hashTask = nil
|
||||
}
|
||||
hashTask = Task { [weak self] in
|
||||
var missingAssetIds = Set(assetIds)
|
||||
var assets = [PHAsset]()
|
||||
assets.reserveCapacity(assetIds.count)
|
||||
PHAsset.fetchAssets(withLocalIdentifiers: assetIds, options: nil).enumerateObjects { (asset, _, stop) in
|
||||
if Task.isCancelled {
|
||||
stop.pointee = true
|
||||
return
|
||||
}
|
||||
missingAssetIds.remove(asset.localIdentifier)
|
||||
assets.append(asset)
|
||||
}
|
||||
|
||||
if Task.isCancelled {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
|
||||
await withTaskGroup(of: HashResult?.self) { taskGroup in
|
||||
var results = [HashResult]()
|
||||
results.reserveCapacity(assets.count)
|
||||
for asset in assets {
|
||||
if Task.isCancelled {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
taskGroup.addTask {
|
||||
guard let self = self else { return nil }
|
||||
return await self.hashAsset(asset, allowNetworkAccess: allowNetworkAccess)
|
||||
}
|
||||
}
|
||||
|
||||
for await result in taskGroup {
|
||||
guard let result = result else {
|
||||
return self?.completeWhenActive(for: completion, with: Self.hashCancelled)
|
||||
}
|
||||
results.append(result)
|
||||
}
|
||||
|
||||
for missing in missingAssetIds {
|
||||
results.append(HashResult(assetId: missing, error: "Asset not found in library", hash: nil))
|
||||
}
|
||||
|
||||
return self?.completeWhenActive(for: completion, with: .success(results))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func cancelHashing() {
|
||||
hashTask?.cancel()
|
||||
hashTask = nil
|
||||
}
|
||||
|
||||
private func hashAsset(_ asset: PHAsset, allowNetworkAccess: Bool) async -> HashResult? {
|
||||
class RequestRef {
|
||||
var id: PHAssetResourceDataRequestID?
|
||||
}
|
||||
let requestRef = RequestRef()
|
||||
return await withTaskCancellationHandler(operation: {
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
guard let resource = asset.getResource() else {
|
||||
return HashResult(assetId: asset.localIdentifier, error: "Cannot get asset resource", hash: nil)
|
||||
}
|
||||
|
||||
if Task.isCancelled {
|
||||
return nil
|
||||
}
|
||||
|
||||
let options = PHAssetResourceRequestOptions()
|
||||
options.isNetworkAccessAllowed = allowNetworkAccess
|
||||
|
||||
return await withCheckedContinuation { continuation in
|
||||
var hasher = Insecure.SHA1()
|
||||
|
||||
requestRef.id = PHAssetResourceManager.default().requestData(
|
||||
for: resource,
|
||||
options: options,
|
||||
dataReceivedHandler: { data in
|
||||
hasher.update(data: data)
|
||||
},
|
||||
completionHandler: { error in
|
||||
let result: HashResult? = switch (error) {
|
||||
case let e as PHPhotosError where e.code == .userCancelled: nil
|
||||
case let .some(e): HashResult(
|
||||
assetId: asset.localIdentifier,
|
||||
error: "Failed to hash asset: \(e.localizedDescription)",
|
||||
hash: nil
|
||||
)
|
||||
case .none:
|
||||
HashResult(
|
||||
assetId: asset.localIdentifier,
|
||||
error: nil,
|
||||
hash: Data(hasher.finalize()).base64EncodedString()
|
||||
)
|
||||
}
|
||||
continuation.resume(returning: result)
|
||||
}
|
||||
)
|
||||
}
|
||||
}, onCancel: {
|
||||
guard let requestId = requestRef.id else { return }
|
||||
PHAssetResourceManager.default().cancelDataRequest(requestId)
|
||||
})
|
||||
}
|
||||
|
||||
func getTrashedAssets() throws -> [String: [PlatformAsset]] {
|
||||
throw PigeonError(code: "UNSUPPORTED_OS", message: "This feature not supported on iOS.", details: nil)
|
||||
}
|
||||
|
||||
private func getAssetsFromAlbum(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
|
||||
// Ensure to actually getting all assets for the Recents album
|
||||
if (album.assetCollectionSubtype == .smartAlbumUserLibrary) {
|
||||
return PHAsset.fetchAssets(with: options)
|
||||
} else {
|
||||
return PHAsset.fetchAssets(in: album, options: options)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func diffSortedArrays<T: Comparable & Hashable>(_ a: [T], _ b: [T]) -> (toAdd: [T], toRemove: [T]) {
|
||||
var toAdd: [T] = []
|
||||
var toRemove: [T] = []
|
||||
var i = 0
|
||||
var j = 0
|
||||
|
||||
while i < a.count && j < b.count {
|
||||
if a[i] < b[j] {
|
||||
toRemove.append(a[i])
|
||||
i += 1
|
||||
} else if b[j] < a[i] {
|
||||
toAdd.append(b[j])
|
||||
j += 1
|
||||
} else {
|
||||
i += 1
|
||||
j += 1
|
||||
}
|
||||
}
|
||||
|
||||
toRemove.append(contentsOf: a[i...])
|
||||
toAdd.append(contentsOf: b[j...])
|
||||
|
||||
return (toAdd, toRemove)
|
||||
}
|
||||
|
||||
private struct NativeSyncDelta: Hashable {
|
||||
var hasChanges: Bool
|
||||
var updates: [PHAsset]
|
||||
var deletes: [String]
|
||||
var assetAlbums: [String: [String]]
|
||||
}
|
||||
|
||||
/// Temp table to avoid parameter limit for album changes.
|
||||
@Table("current_albums")
|
||||
private struct CurrentAlbum {
|
||||
let id: String
|
||||
}
|
||||
|
||||
@@ -1,21 +1,6 @@
|
||||
import Photos
|
||||
|
||||
extension PHAsset {
|
||||
func toPlatformAsset() -> PlatformAsset {
|
||||
return PlatformAsset(
|
||||
id: localIdentifier,
|
||||
name: title,
|
||||
type: Int64(mediaType.rawValue),
|
||||
createdAt: creationDate.map { Int64($0.timeIntervalSince1970) },
|
||||
updatedAt: modificationDate.map { Int64($0.timeIntervalSince1970) },
|
||||
width: Int64(pixelWidth),
|
||||
height: Int64(pixelHeight),
|
||||
durationInSeconds: Int64(duration),
|
||||
orientation: 0,
|
||||
isFavorite: isFavorite
|
||||
)
|
||||
}
|
||||
|
||||
var title: String {
|
||||
return filename ?? originalFilename ?? "<unknown>"
|
||||
}
|
||||
@@ -52,6 +37,23 @@ extension PHAsset {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getLivePhotoResource() -> PHAssetResource? {
|
||||
let resources = PHAssetResource.assetResources(for: self)
|
||||
|
||||
var livePhotoResource: PHAssetResource?
|
||||
for resource in resources {
|
||||
if resource.type == .fullSizePairedVideo {
|
||||
return resource
|
||||
}
|
||||
|
||||
if resource.type == .pairedVideo {
|
||||
livePhotoResource = resource
|
||||
}
|
||||
}
|
||||
|
||||
return livePhotoResource
|
||||
}
|
||||
|
||||
private func isValidResourceType(_ type: PHAssetResourceType) -> Bool {
|
||||
switch mediaType {
|
||||
@@ -75,3 +77,37 @@ extension PHAsset {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension PHAssetCollection {
|
||||
private static let latestAssetOptions: PHFetchOptions = {
|
||||
let options = PHFetchOptions()
|
||||
options.includeHiddenAssets = false
|
||||
options.sortDescriptors = [NSSortDescriptor(key: "modificationDate", ascending: false)]
|
||||
options.fetchLimit = 1
|
||||
return options
|
||||
}()
|
||||
|
||||
var isCloud: Bool { assetCollectionSubtype == .albumCloudShared || assetCollectionSubtype == .albumMyPhotoStream }
|
||||
|
||||
var updatedAt: Date? {
|
||||
let result: PHFetchResult<PHAsset>
|
||||
if assetCollectionSubtype == .smartAlbumUserLibrary {
|
||||
result = PHAsset.fetchAssets(with: Self.latestAssetOptions)
|
||||
} else {
|
||||
result = PHAsset.fetchAssets(in: self, options: Self.latestAssetOptions)
|
||||
}
|
||||
|
||||
return result.firstObject?.modificationDate
|
||||
}
|
||||
|
||||
static func fetchAssetCollection(albumId: String, options: PHFetchOptions? = nil) -> PHAssetCollection? {
|
||||
let albums = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: options)
|
||||
return albums.firstObject
|
||||
}
|
||||
|
||||
static func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions) -> PHFetchResult<PHAsset> {
|
||||
album.assetCollectionSubtype == .smartAlbumUserLibrary
|
||||
? PHAsset.fetchAssets(with: options)
|
||||
: PHAsset.fetchAssets(in: album, options: options)
|
||||
}
|
||||
}
|
||||
|
||||
53
mobile/ios/Runner/Sync/PhotoLibrary.swift
Normal file
53
mobile/ios/Runner/Sync/PhotoLibrary.swift
Normal file
@@ -0,0 +1,53 @@
|
||||
import Photos
|
||||
|
||||
protocol PhotoLibraryProvider {
|
||||
var isAuthorized: Bool { get }
|
||||
@available(iOS 16, *)
|
||||
var currentChangeToken: PHPersistentChangeToken { get }
|
||||
|
||||
func fetchAlbums(sorted: Bool) -> [PHAssetCollection]
|
||||
func fetchAlbums(with type: PHAssetCollectionType, subtype: PHAssetCollectionSubtype, options: PHFetchOptions?) -> PHFetchResult<PHAssetCollection>
|
||||
func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult<PHAsset>
|
||||
func fetchAssets(withIdentifiers ids: [String], options: PHFetchOptions?) -> PHFetchResult<PHAsset>
|
||||
@available(iOS 16, *)
|
||||
func fetchPersistentChanges(since token: PHPersistentChangeToken) throws -> PHPersistentChangeFetchResult
|
||||
}
|
||||
|
||||
struct PhotoLibrary: PhotoLibraryProvider {
|
||||
static let shared: PhotoLibrary = .init()
|
||||
|
||||
private init() {}
|
||||
|
||||
func fetchAlbums(with type: PHAssetCollectionType, subtype: PHAssetCollectionSubtype, options: PHFetchOptions?) -> PHFetchResult<PHAssetCollection> {
|
||||
PHAssetCollection.fetchAssetCollections(with: type, subtype: subtype, options: options)
|
||||
}
|
||||
|
||||
func fetchAssetCollection(albumId: String, options: PHFetchOptions? = nil) -> PHAssetCollection? {
|
||||
let albums = PHAssetCollection.fetchAssetCollections(withLocalIdentifiers: [albumId], options: options)
|
||||
return albums.firstObject
|
||||
}
|
||||
|
||||
func fetchAssets(in album: PHAssetCollection, options: PHFetchOptions?) -> PHFetchResult<PHAsset> {
|
||||
album.assetCollectionSubtype == .smartAlbumUserLibrary
|
||||
? PHAsset.fetchAssets(with: options)
|
||||
: PHAsset.fetchAssets(in: album, options: options)
|
||||
}
|
||||
|
||||
func fetchAssets(withIdentifiers ids: [String], options: PHFetchOptions?) -> PHFetchResult<PHAsset> {
|
||||
PHAsset.fetchAssets(withLocalIdentifiers: ids, options: options)
|
||||
}
|
||||
|
||||
@available(iOS 16, *)
|
||||
func fetchPersistentChanges(since token: PHPersistentChangeToken) throws -> PHPersistentChangeFetchResult {
|
||||
try PHPhotoLibrary.shared().fetchPersistentChanges(since: token)
|
||||
}
|
||||
|
||||
@available(iOS 16, *)
|
||||
var currentChangeToken: PHPersistentChangeToken {
|
||||
PHPhotoLibrary.shared().currentChangeToken
|
||||
}
|
||||
|
||||
var isAuthorized: Bool {
|
||||
PHPhotoLibrary.authorizationStatus(for: .readWrite) == .authorized
|
||||
}
|
||||
}
|
||||
208
mobile/ios/Runner/Upload/Delegate.swift
Normal file
208
mobile/ios/Runner/Upload/Delegate.swift
Normal file
@@ -0,0 +1,208 @@
|
||||
import SQLiteData
|
||||
|
||||
class UploadApiDelegate: NSObject, URLSessionDataDelegate, URLSessionTaskDelegate {
|
||||
private static let stateLock = NSLock()
|
||||
private static var transferStates: [Int64: NetworkTransferState] = [:]
|
||||
private static var responseData: [Int64: Data] = [:]
|
||||
private static let jsonDecoder = JSONDecoder()
|
||||
|
||||
private let db: DatabasePool
|
||||
private let statusListener: StatusEventListener
|
||||
private let progressListener: ProgressEventListener
|
||||
weak var downloadQueue: DownloadQueue?
|
||||
weak var uploadQueue: UploadQueue?
|
||||
|
||||
init(db: DatabasePool, statusListener: StatusEventListener, progressListener: ProgressEventListener) {
|
||||
self.db = db
|
||||
self.statusListener = statusListener
|
||||
self.progressListener = progressListener
|
||||
}
|
||||
|
||||
static func reset() {
|
||||
stateLock.withLock {
|
||||
transferStates.removeAll()
|
||||
responseData.removeAll()
|
||||
}
|
||||
}
|
||||
|
||||
func urlSession(_ session: URLSession, dataTask: URLSessionDataTask, didReceive data: Data) {
|
||||
guard let taskIdStr = dataTask.taskDescription,
|
||||
let taskId = Int64(taskIdStr)
|
||||
else { return }
|
||||
|
||||
Self.stateLock.withLock {
|
||||
if var response = Self.responseData[taskId] {
|
||||
response.append(data)
|
||||
} else {
|
||||
Self.responseData[taskId] = data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func urlSession(_ session: URLSession, task: URLSessionTask, didCompleteWithError error: Error?) {
|
||||
Task {
|
||||
defer {
|
||||
downloadQueue?.startQueueProcessing()
|
||||
uploadQueue?.startQueueProcessing()
|
||||
}
|
||||
|
||||
guard let taskDescriptionId = task.taskDescription,
|
||||
let taskId = Int64(taskDescriptionId)
|
||||
else {
|
||||
return dPrint("Unexpected: task without session ID completed")
|
||||
}
|
||||
|
||||
defer {
|
||||
Self.stateLock.withLock { let _ = Self.transferStates.removeValue(forKey: taskId) }
|
||||
}
|
||||
|
||||
if let responseData = Self.stateLock.withLock({ Self.responseData.removeValue(forKey: taskId) }),
|
||||
let httpResponse = task.response as? HTTPURLResponse
|
||||
{
|
||||
switch httpResponse.statusCode {
|
||||
case 200, 201:
|
||||
do {
|
||||
let response = try Self.jsonDecoder.decode(UploadSuccessResponse.self, from: responseData)
|
||||
return await handleSuccess(taskId: taskId, response: response)
|
||||
} catch {
|
||||
return await handleFailure(taskId: taskId, code: .invalidResponse)
|
||||
}
|
||||
case 400..<500:
|
||||
dPrint(
|
||||
"Response \(httpResponse.statusCode): \(String(data: responseData, encoding: .utf8) ?? "No response body")"
|
||||
)
|
||||
return await handleFailure(taskId: taskId, code: .badRequest)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
guard let urlError = error as? URLError else {
|
||||
return await handleFailure(taskId: taskId)
|
||||
}
|
||||
|
||||
if #available(iOS 17, *), let resumeData = urlError.uploadTaskResumeData {
|
||||
return await handleFailure(taskDescriptionId: taskDescriptionId, session: session, resumeData: resumeData)
|
||||
}
|
||||
|
||||
let code: UploadErrorCode =
|
||||
switch urlError.backgroundTaskCancelledReason {
|
||||
case .backgroundUpdatesDisabled: .backgroundUpdatesDisabled
|
||||
case .insufficientSystemResources: .outOfResources
|
||||
case .userForceQuitApplication: .forceQuit
|
||||
default:
|
||||
switch urlError.code {
|
||||
case .networkConnectionLost, .notConnectedToInternet: .networkError
|
||||
case .timedOut: .uploadTimeout
|
||||
case .resourceUnavailable, .fileDoesNotExist: .fileNotFound
|
||||
default: .unknown
|
||||
}
|
||||
}
|
||||
await handleFailure(taskId: taskId, code: code)
|
||||
}
|
||||
}
|
||||
|
||||
func urlSession(
|
||||
_ session: URLSession,
|
||||
task: URLSessionTask,
|
||||
didSendBodyData bytesSent: Int64,
|
||||
totalBytesSent: Int64,
|
||||
totalBytesExpectedToSend: Int64
|
||||
) {
|
||||
guard let sessionTaskId = task.taskDescription, let taskId = Int64(sessionTaskId) else { return }
|
||||
let currentTime = Date()
|
||||
let state = Self.stateLock.withLock {
|
||||
if let existing = Self.transferStates[taskId] {
|
||||
return existing
|
||||
}
|
||||
let new = NetworkTransferState(
|
||||
lastUpdateTime: currentTime,
|
||||
totalBytesTransferred: totalBytesSent,
|
||||
currentSpeed: nil
|
||||
)
|
||||
Self.transferStates[taskId] = new
|
||||
return new
|
||||
}
|
||||
|
||||
let timeDelta = currentTime.timeIntervalSince(state.lastUpdateTime)
|
||||
guard timeDelta > 0 else { return }
|
||||
|
||||
let bytesDelta = totalBytesSent - state.totalBytesTransferred
|
||||
let instantSpeed = Double(bytesDelta) / timeDelta
|
||||
let currentSpeed =
|
||||
if let previousSpeed = state.currentSpeed {
|
||||
TaskConfig.transferSpeedAlpha * instantSpeed + (1 - TaskConfig.transferSpeedAlpha) * previousSpeed
|
||||
} else {
|
||||
instantSpeed
|
||||
}
|
||||
state.currentSpeed = currentSpeed
|
||||
state.lastUpdateTime = currentTime
|
||||
state.totalBytesTransferred = totalBytesSent
|
||||
self.progressListener.onTaskProgress(
|
||||
UploadApiTaskProgress(
|
||||
id: sessionTaskId,
|
||||
progress: Double(totalBytesSent) / Double(totalBytesExpectedToSend),
|
||||
speed: currentSpeed
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
private func handleSuccess(taskId: Int64, response: UploadSuccessResponse) async {
|
||||
dPrint("Upload succeeded for task \(taskId), server ID: \(response.id)")
|
||||
do {
|
||||
try await db.write { conn in
|
||||
let task = try UploadTask.update { $0.status = .uploadComplete }.where({ $0.id.eq(taskId) })
|
||||
.returning(\.self).fetchOne(conn)
|
||||
guard let task, let isLivePhoto = task.isLivePhoto, isLivePhoto, task.livePhotoVideoId == nil else { return }
|
||||
try UploadTask.insert {
|
||||
UploadTask.Draft(
|
||||
attempts: 0,
|
||||
createdAt: Date(),
|
||||
filePath: nil,
|
||||
isLivePhoto: true,
|
||||
lastError: nil,
|
||||
livePhotoVideoId: response.id,
|
||||
localId: task.localId,
|
||||
method: .multipart,
|
||||
priority: 0.7,
|
||||
retryAfter: nil,
|
||||
status: .downloadPending,
|
||||
)
|
||||
}.execute(conn)
|
||||
}
|
||||
dPrint("Updated upload success status for session task \(taskId)")
|
||||
} catch {
|
||||
dPrint(
|
||||
"Failed to update upload success status for session task \(taskId): \(error.localizedDescription)"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private func handleFailure(taskId: Int64, code: UploadErrorCode = .unknown) async {
|
||||
dPrint("Upload failed for task \(taskId) with code \(code)")
|
||||
try? await db.write { conn in
|
||||
try UploadTask.retryOrFail(code: code, status: .uploadFailed).where { $0.id.eq(taskId) }
|
||||
.execute(conn)
|
||||
}
|
||||
}
|
||||
|
||||
@available(iOS 17, *)
|
||||
private func handleFailure(taskDescriptionId: String, session: URLSession, resumeData: Data) async {
|
||||
dPrint("Resuming upload for task \(taskDescriptionId)")
|
||||
let resumeTask = session.uploadTask(withResumeData: resumeData)
|
||||
resumeTask.taskDescription = taskDescriptionId
|
||||
resumeTask.resume()
|
||||
}
|
||||
|
||||
private class NetworkTransferState {
|
||||
var lastUpdateTime: Date
|
||||
var totalBytesTransferred: Int64
|
||||
var currentSpeed: Double?
|
||||
|
||||
init(lastUpdateTime: Date, totalBytesTransferred: Int64, currentSpeed: Double?) {
|
||||
self.lastUpdateTime = lastUpdateTime
|
||||
self.totalBytesTransferred = totalBytesTransferred
|
||||
self.currentSpeed = currentSpeed
|
||||
}
|
||||
}
|
||||
}
|
||||
351
mobile/ios/Runner/Upload/DownloadQueue.swift
Normal file
351
mobile/ios/Runner/Upload/DownloadQueue.swift
Normal file
@@ -0,0 +1,351 @@
|
||||
import CryptoKit
|
||||
import Photos
|
||||
import SQLiteData
|
||||
|
||||
class DownloadQueue {
|
||||
private static let resourceManager = PHAssetResourceManager.default()
|
||||
private static var queueProcessingTask: Task<Void, Never>?
|
||||
private static var queueProcessingLock = NSLock()
|
||||
|
||||
private let db: DatabasePool
|
||||
private let uploadQueue: UploadQueue
|
||||
private let statusListener: StatusEventListener
|
||||
private let progressListener: ProgressEventListener
|
||||
|
||||
init(
|
||||
db: DatabasePool,
|
||||
uploadQueue: UploadQueue,
|
||||
statusListener: StatusEventListener,
|
||||
progressListener: ProgressEventListener
|
||||
) {
|
||||
self.db = db
|
||||
self.uploadQueue = uploadQueue
|
||||
self.statusListener = statusListener
|
||||
self.progressListener = progressListener
|
||||
NotificationCenter.default.addObserver(forName: .networkDidConnect, object: nil, queue: nil) { [weak self] _ in
|
||||
dPrint("Network connected")
|
||||
self?.startQueueProcessing()
|
||||
}
|
||||
}
|
||||
|
||||
func enqueueAssets(localIds: [String]) async throws {
|
||||
guard !localIds.isEmpty else { return dPrint("No assets to enqueue") }
|
||||
|
||||
defer { startQueueProcessing() }
|
||||
let candidates = try await db.read { conn in
|
||||
return try LocalAsset.all
|
||||
.where { asset in asset.id.in(localIds) }
|
||||
.select { LocalAssetCandidate.Columns(id: $0.id, type: $0.type) }
|
||||
.limit { _ in UploadTaskStat.availableSlots }
|
||||
.fetchAll(conn)
|
||||
}
|
||||
|
||||
guard !candidates.isEmpty else { return dPrint("No candidates to enqueue") }
|
||||
|
||||
try await db.write { conn in
|
||||
var draft = UploadTask.Draft(
|
||||
attempts: 0,
|
||||
createdAt: Date(),
|
||||
filePath: nil,
|
||||
isLivePhoto: nil,
|
||||
lastError: nil,
|
||||
livePhotoVideoId: nil,
|
||||
localId: "",
|
||||
method: .multipart,
|
||||
priority: 0.5,
|
||||
retryAfter: nil,
|
||||
status: .downloadPending,
|
||||
)
|
||||
for candidate in candidates {
|
||||
draft.localId = candidate.id
|
||||
draft.priority = candidate.type == .image ? 0.9 : 0.8
|
||||
try UploadTask.insert {
|
||||
draft
|
||||
} onConflict: {
|
||||
($0.localId, $0.livePhotoVideoId)
|
||||
}.execute(conn)
|
||||
}
|
||||
}
|
||||
dPrint("Enqueued \(candidates.count) assets for upload")
|
||||
}
|
||||
|
||||
func startQueueProcessing() {
|
||||
dPrint("Starting download queue processing")
|
||||
Self.queueProcessingLock.withLock {
|
||||
guard Self.queueProcessingTask == nil else { return }
|
||||
Self.queueProcessingTask = Task {
|
||||
await startDownloads()
|
||||
Self.queueProcessingLock.withLock { Self.queueProcessingTask = nil }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func startDownloads() async {
|
||||
dPrint("Processing download queue")
|
||||
guard NetworkMonitor.shared.isConnected else {
|
||||
return dPrint("Download queue paused: network disconnected")
|
||||
}
|
||||
|
||||
do {
|
||||
let tasks: [LocalAssetDownloadData] = try await db.read({ conn in
|
||||
guard let backupEnabled = try Store.get(conn, StoreKey.enableBackup), backupEnabled else { return [] }
|
||||
return try UploadTask.join(LocalAsset.all) { task, asset in task.localId.eq(asset.id) }
|
||||
.where { task, asset in
|
||||
asset.checksum.isNot(nil) && task.status.eq(TaskStatus.downloadPending)
|
||||
&& task.attempts < TaskConfig.maxRetries
|
||||
&& (task.retryAfter.is(nil) || task.retryAfter.unwrapped <= Date().unixTime)
|
||||
&& (task.lastError.is(nil)
|
||||
|| !task.lastError.unwrapped.in([
|
||||
UploadErrorCode.assetNotFound, UploadErrorCode.resourceNotFound, UploadErrorCode.invalidResource,
|
||||
]))
|
||||
}
|
||||
.select { task, asset in
|
||||
LocalAssetDownloadData.Columns(
|
||||
checksum: asset.checksum,
|
||||
createdAt: asset.createdAt,
|
||||
livePhotoVideoId: task.livePhotoVideoId,
|
||||
localId: asset.id,
|
||||
taskId: task.id,
|
||||
updatedAt: asset.updatedAt
|
||||
)
|
||||
}
|
||||
.order { task, asset in (task.priority.desc(), task.createdAt) }
|
||||
.limit { _, _ in UploadTaskStat.availableDownloadSlots }
|
||||
.fetchAll(conn)
|
||||
})
|
||||
if tasks.isEmpty { return dPrint("No download tasks to process") }
|
||||
|
||||
try await withThrowingTaskGroup(of: Void.self) { group in
|
||||
var iterator = tasks.makeIterator()
|
||||
for _ in 0..<min(TaskConfig.maxActiveDownloads, tasks.count) {
|
||||
if let task = iterator.next() {
|
||||
group.addTask { await self.downloadAndQueue(task) }
|
||||
}
|
||||
}
|
||||
|
||||
while try await group.next() != nil {
|
||||
if let task = iterator.next() {
|
||||
group.addTask { await self.downloadAndQueue(task) }
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
dPrint("Download queue error: \(error)")
|
||||
}
|
||||
}
|
||||
|
||||
private func downloadAndQueue(_ task: LocalAssetDownloadData) async {
|
||||
defer { startQueueProcessing() }
|
||||
dPrint("Starting download for task \(task.taskId)")
|
||||
|
||||
guard let asset = PHAsset.fetchAssets(withLocalIdentifiers: [task.localId], options: nil).firstObject
|
||||
else {
|
||||
dPrint("Asset not found")
|
||||
return handleFailure(task: task, code: .assetNotFound)
|
||||
}
|
||||
|
||||
let isLivePhoto = asset.mediaSubtypes.contains(.photoLive)
|
||||
let isMotion = isLivePhoto && task.livePhotoVideoId != nil
|
||||
guard let resource = isMotion ? asset.getLivePhotoResource() : asset.getResource() else {
|
||||
dPrint("Resource not found")
|
||||
return handleFailure(task: task, code: .resourceNotFound)
|
||||
}
|
||||
|
||||
guard let deviceId = (try? await db.read { conn in try Store.get(conn, StoreKey.deviceId) }) else {
|
||||
dPrint("Device ID not found")
|
||||
return handleFailure(task: task, code: .noDeviceId)
|
||||
}
|
||||
|
||||
let fileDir = TaskConfig.originalsDir
|
||||
let fileName = "\(resource.assetLocalIdentifier.replacingOccurrences(of: "/", with: "_"))_\(resource.type.rawValue)"
|
||||
let filePath = fileDir.appendingPathComponent(fileName)
|
||||
do {
|
||||
try FileManager.default.createDirectory(
|
||||
at: fileDir,
|
||||
withIntermediateDirectories: true,
|
||||
attributes: nil
|
||||
)
|
||||
} catch {
|
||||
dPrint("Failed to create directory for download task \(task.taskId): \(error)")
|
||||
return handleFailure(task: task, code: .writeFailed, filePath: filePath)
|
||||
}
|
||||
|
||||
do {
|
||||
try await db.write { conn in
|
||||
try UploadTask.update {
|
||||
$0.status = .downloadQueued
|
||||
$0.isLivePhoto = isLivePhoto
|
||||
$0.filePath = filePath
|
||||
}.where { $0.id.eq(task.taskId) }.execute(conn)
|
||||
}
|
||||
} catch {
|
||||
return dPrint("Failed to set file path for download task \(task.taskId): \(error)")
|
||||
}
|
||||
statusListener.onTaskStatus(
|
||||
UploadApiTaskStatus(id: String(task.taskId), filename: filePath.path, status: .downloadQueued)
|
||||
)
|
||||
|
||||
do {
|
||||
let hash = try await download(task: task, asset: asset, resource: resource, to: filePath, deviceId: deviceId)
|
||||
let status = try await db.write { conn in
|
||||
if let hash { try LocalAsset.update { $0.checksum = hash }.where { $0.id.eq(task.localId) }.execute(conn) }
|
||||
let status =
|
||||
if let hash, try RemoteAsset.select(\.rowid).where({ $0.checksum.eq(hash) }).fetchOne(conn) != nil {
|
||||
TaskStatus.uploadSkipped
|
||||
} else {
|
||||
TaskStatus.uploadPending
|
||||
}
|
||||
try UploadTask.update { $0.status = .uploadPending }.where { $0.id.eq(task.taskId) }.execute(conn)
|
||||
return status
|
||||
}
|
||||
statusListener.onTaskStatus(
|
||||
UploadApiTaskStatus(
|
||||
id: String(task.taskId),
|
||||
filename: filePath.path,
|
||||
status: UploadApiStatus(rawValue: status.rawValue)!
|
||||
)
|
||||
)
|
||||
uploadQueue.startQueueProcessing()
|
||||
} catch {
|
||||
dPrint("Download failed for task \(task.taskId): \(error)")
|
||||
handleFailure(task: task, code: .writeFailed, filePath: filePath)
|
||||
}
|
||||
}
|
||||
|
||||
func download(
|
||||
task: LocalAssetDownloadData,
|
||||
asset: PHAsset,
|
||||
resource: PHAssetResource,
|
||||
to filePath: URL,
|
||||
deviceId: String
|
||||
) async throws
|
||||
-> String?
|
||||
{
|
||||
dPrint("Downloading asset resource \(resource.assetLocalIdentifier) of type \(resource.type.rawValue)")
|
||||
let options = PHAssetResourceRequestOptions()
|
||||
options.isNetworkAccessAllowed = true
|
||||
let (header, footer) = AssetData(
|
||||
deviceAssetId: task.localId,
|
||||
deviceId: deviceId,
|
||||
fileCreatedAt: task.createdAt,
|
||||
fileModifiedAt: task.updatedAt,
|
||||
fileName: resource.originalFilename,
|
||||
isFavorite: asset.isFavorite,
|
||||
livePhotoVideoId: nil
|
||||
).multipart()
|
||||
|
||||
guard let fileHandle = try? FileHandle.createOrOverwrite(atPath: filePath.path) else {
|
||||
dPrint("Failed to open file handle for download task \(task.taskId), path: \(filePath.path)")
|
||||
throw UploadError.fileCreationFailed
|
||||
}
|
||||
try fileHandle.write(contentsOf: header)
|
||||
|
||||
class RequestRef {
|
||||
var id: PHAssetResourceDataRequestID?
|
||||
var lastProgressTime = Date()
|
||||
var didStall = false
|
||||
}
|
||||
|
||||
var lastProgressTime = Date()
|
||||
nonisolated(unsafe) let progressListener = self.progressListener
|
||||
let taskIdStr = String(task.taskId)
|
||||
options.progressHandler = { progress in
|
||||
lastProgressTime = Date()
|
||||
progressListener.onTaskProgress(UploadApiTaskProgress(id: taskIdStr, progress: progress))
|
||||
}
|
||||
|
||||
let request = RequestRef()
|
||||
let timeoutTask = Task {
|
||||
while !Task.isCancelled {
|
||||
try? await Task.sleep(nanoseconds: TaskConfig.downloadCheckIntervalNs)
|
||||
request.didStall = Date().timeIntervalSince(lastProgressTime) > TaskConfig.downloadTimeoutS
|
||||
if request.didStall {
|
||||
if let requestId = request.id {
|
||||
Self.resourceManager.cancelDataRequest(requestId)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return try await withTaskCancellationHandler {
|
||||
try await withCheckedThrowingContinuation { continuation in
|
||||
var hasher = task.checksum == nil && task.livePhotoVideoId == nil ? Insecure.SHA1() : nil
|
||||
request.id = Self.resourceManager.requestData(
|
||||
for: resource,
|
||||
options: options,
|
||||
dataReceivedHandler: { data in
|
||||
guard let requestId = request.id else { return }
|
||||
do {
|
||||
hasher?.update(data: data)
|
||||
try fileHandle.write(contentsOf: data)
|
||||
} catch {
|
||||
request.id = nil
|
||||
Self.resourceManager.cancelDataRequest(requestId)
|
||||
}
|
||||
},
|
||||
completionHandler: { error in
|
||||
timeoutTask.cancel()
|
||||
switch error {
|
||||
case let e as NSError where e.domain == "CloudPhotoLibraryErrorDomain":
|
||||
dPrint("iCloud error during download: \(e)")
|
||||
let code: UploadErrorCode =
|
||||
switch e.code {
|
||||
case 1005: .iCloudRateLimit
|
||||
case 81: .iCloudThrottled
|
||||
default: .photosUnknownError
|
||||
}
|
||||
self.handleFailure(task: task, code: code, filePath: filePath)
|
||||
case let e as PHPhotosError:
|
||||
dPrint("Photos error during download: \(e)")
|
||||
let code: UploadErrorCode =
|
||||
switch e.code {
|
||||
case .notEnoughSpace: .notEnoughSpace
|
||||
case .missingResource: .resourceNotFound
|
||||
case .networkError: .networkError
|
||||
case .internalError: .photosInternalError
|
||||
case .invalidResource: .invalidResource
|
||||
case .operationInterrupted: .interrupted
|
||||
case .userCancelled where request.didStall: .downloadStalled
|
||||
case .userCancelled: .cancelled
|
||||
default: .photosUnknownError
|
||||
}
|
||||
self.handleFailure(task: task, code: code, filePath: filePath)
|
||||
case .some:
|
||||
dPrint("Unknown error during download: \(String(describing: error))")
|
||||
self.handleFailure(task: task, code: .unknown, filePath: filePath)
|
||||
case .none:
|
||||
dPrint("Download completed for task \(task.taskId)")
|
||||
do {
|
||||
try fileHandle.write(contentsOf: footer)
|
||||
continuation.resume(returning: hasher.map { hasher in Data(hasher.finalize()).base64EncodedString() })
|
||||
} catch {
|
||||
try? FileManager.default.removeItem(at: filePath)
|
||||
continuation.resume(throwing: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
} onCancel: {
|
||||
if let requestId = request.id {
|
||||
Self.resourceManager.cancelDataRequest(requestId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func handleFailure(task: LocalAssetDownloadData, code: UploadErrorCode, filePath: URL? = nil) {
|
||||
dPrint("Handling failure for task \(task.taskId) with code \(code.rawValue)")
|
||||
do {
|
||||
if let filePath {
|
||||
try? FileManager.default.removeItem(at: filePath)
|
||||
}
|
||||
|
||||
try db.write { conn in
|
||||
try UploadTask.retryOrFail(code: code, status: .downloadFailed).where { $0.id.eq(task.taskId) }.execute(conn)
|
||||
}
|
||||
} catch {
|
||||
dPrint("Failed to update download failure status for task \(task.taskId): \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
39
mobile/ios/Runner/Upload/Listeners.swift
Normal file
39
mobile/ios/Runner/Upload/Listeners.swift
Normal file
@@ -0,0 +1,39 @@
|
||||
class StatusEventListener: StreamStatusStreamHandler {
|
||||
var eventSink: PigeonEventSink<UploadApiTaskStatus>?
|
||||
|
||||
override func onListen(withArguments arguments: Any?, sink: PigeonEventSink<UploadApiTaskStatus>) {
|
||||
eventSink = sink
|
||||
}
|
||||
|
||||
func onTaskStatus(_ event: UploadApiTaskStatus) {
|
||||
if let eventSink = eventSink {
|
||||
eventSink.success(event)
|
||||
}
|
||||
}
|
||||
|
||||
func onEventsDone() {
|
||||
eventSink?.endOfStream()
|
||||
eventSink = nil
|
||||
}
|
||||
}
|
||||
|
||||
class ProgressEventListener: StreamProgressStreamHandler {
|
||||
var eventSink: PigeonEventSink<UploadApiTaskProgress>?
|
||||
|
||||
override func onListen(withArguments arguments: Any?, sink: PigeonEventSink<UploadApiTaskProgress>) {
|
||||
eventSink = sink
|
||||
}
|
||||
|
||||
func onTaskProgress(_ event: UploadApiTaskProgress) {
|
||||
if let eventSink = eventSink {
|
||||
DispatchQueue.main.async { eventSink.success(event) }
|
||||
}
|
||||
}
|
||||
|
||||
func onEventsDone() {
|
||||
DispatchQueue.main.async {
|
||||
self.eventSink?.endOfStream()
|
||||
self.eventSink = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
22
mobile/ios/Runner/Upload/NetworkMonitor.swift
Normal file
22
mobile/ios/Runner/Upload/NetworkMonitor.swift
Normal file
@@ -0,0 +1,22 @@
|
||||
import Network
|
||||
|
||||
class NetworkMonitor {
|
||||
static let shared = NetworkMonitor()
|
||||
private let monitor = NWPathMonitor()
|
||||
private(set) var isConnected = false
|
||||
private(set) var isExpensive = false
|
||||
|
||||
private init() {
|
||||
monitor.pathUpdateHandler = { [weak self] path in
|
||||
guard let self else { return }
|
||||
let wasConnected = self.isConnected
|
||||
self.isConnected = path.status == .satisfied
|
||||
self.isExpensive = path.isExpensive
|
||||
|
||||
if !wasConnected && self.isConnected {
|
||||
NotificationCenter.default.post(name: .networkDidConnect, object: nil)
|
||||
}
|
||||
}
|
||||
monitor.start(queue: .global(qos: .utility))
|
||||
}
|
||||
}
|
||||
221
mobile/ios/Runner/Upload/UploadQueue.swift
Normal file
221
mobile/ios/Runner/Upload/UploadQueue.swift
Normal file
@@ -0,0 +1,221 @@
|
||||
import SQLiteData
|
||||
import StructuredFieldValues
|
||||
|
||||
class UploadQueue {
|
||||
private static let structuredEncoder = StructuredFieldValueEncoder()
|
||||
private static var queueProcessingTask: Task<Void, Never>?
|
||||
private static var queueProcessingLock = NSLock()
|
||||
|
||||
private let db: DatabasePool
|
||||
private let cellularSession: URLSession
|
||||
private let wifiOnlySession: URLSession
|
||||
private let statusListener: StatusEventListener
|
||||
|
||||
init(db: DatabasePool, cellularSession: URLSession, wifiOnlySession: URLSession, statusListener: StatusEventListener)
|
||||
{
|
||||
self.db = db
|
||||
self.cellularSession = cellularSession
|
||||
self.wifiOnlySession = wifiOnlySession
|
||||
self.statusListener = statusListener
|
||||
}
|
||||
|
||||
func enqueueFiles(paths: [String]) async throws {
|
||||
guard !paths.isEmpty else { return dPrint("No paths to enqueue") }
|
||||
|
||||
guard let deviceId = (try? await db.read { conn in try Store.get(conn, StoreKey.deviceId) }) else {
|
||||
throw StoreError.notFound
|
||||
}
|
||||
|
||||
defer { startQueueProcessing() }
|
||||
|
||||
try await withThrowingTaskGroup(of: Void.self, returning: Void.self) { group in
|
||||
let date = Date()
|
||||
try FileManager.default.createDirectory(
|
||||
at: TaskConfig.originalsDir,
|
||||
withIntermediateDirectories: true,
|
||||
attributes: nil
|
||||
)
|
||||
|
||||
for path in paths {
|
||||
group.addTask {
|
||||
let inputURL = URL(fileURLWithPath: path, isDirectory: false)
|
||||
let outputURL = TaskConfig.originalsDir.appendingPathComponent(UUID().uuidString)
|
||||
let resources = try inputURL.resourceValues(forKeys: [.creationDateKey, .contentModificationDateKey])
|
||||
|
||||
let formatter = ISO8601DateFormatter()
|
||||
let (header, footer) = AssetData(
|
||||
deviceAssetId: "",
|
||||
deviceId: deviceId,
|
||||
fileCreatedAt: formatter.string(from: resources.creationDate ?? date),
|
||||
fileModifiedAt: formatter.string(from: resources.contentModificationDate ?? date),
|
||||
fileName: resources.name ?? inputURL.lastPathComponent,
|
||||
isFavorite: false,
|
||||
livePhotoVideoId: nil
|
||||
).multipart()
|
||||
|
||||
do {
|
||||
let writeHandle = try FileHandle.createOrOverwrite(atPath: outputURL.path)
|
||||
try writeHandle.write(contentsOf: header)
|
||||
let readHandle = try FileHandle(forReadingFrom: inputURL)
|
||||
|
||||
let bufferSize = 1024 * 1024
|
||||
while true {
|
||||
let data = try readHandle.read(upToCount: bufferSize)
|
||||
guard let data = data, !data.isEmpty else { break }
|
||||
try writeHandle.write(contentsOf: data)
|
||||
}
|
||||
try writeHandle.write(contentsOf: footer)
|
||||
} catch {
|
||||
try? FileManager.default.removeItem(at: outputURL)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try await group.waitForAll()
|
||||
}
|
||||
|
||||
try await db.write { conn in
|
||||
var draft = UploadTask.Draft(
|
||||
attempts: 0,
|
||||
createdAt: Date(),
|
||||
filePath: nil,
|
||||
isLivePhoto: nil,
|
||||
lastError: nil,
|
||||
livePhotoVideoId: nil,
|
||||
localId: "",
|
||||
method: .multipart,
|
||||
priority: 0.5,
|
||||
retryAfter: nil,
|
||||
status: .downloadPending,
|
||||
)
|
||||
for path in paths {
|
||||
draft.filePath = URL(fileURLWithPath: path, isDirectory: false)
|
||||
try UploadTask.insert { draft }.execute(conn)
|
||||
}
|
||||
}
|
||||
dPrint("Enqueued \(paths.count) assets for upload")
|
||||
}
|
||||
|
||||
func startQueueProcessing() {
|
||||
dPrint("Starting upload queue processing")
|
||||
Self.queueProcessingLock.withLock {
|
||||
guard Self.queueProcessingTask == nil else { return }
|
||||
Self.queueProcessingTask = Task {
|
||||
await startUploads()
|
||||
Self.queueProcessingLock.withLock { Self.queueProcessingTask = nil }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func startUploads() async {
|
||||
dPrint("Processing download queue")
|
||||
guard NetworkMonitor.shared.isConnected,
|
||||
let backupEnabled = try? await db.read({ conn in try Store.get(conn, StoreKey.enableBackup) }),
|
||||
backupEnabled
|
||||
else { return dPrint("Download queue paused: network disconnected or backup disabled") }
|
||||
|
||||
do {
|
||||
let tasks: [LocalAssetUploadData] = try await db.read({ conn in
|
||||
guard let backupEnabled = try Store.get(conn, StoreKey.enableBackup), backupEnabled else { return [] }
|
||||
return try UploadTask.join(LocalAsset.all) { task, asset in task.localId.eq(asset.id) }
|
||||
.where { task, asset in
|
||||
asset.checksum.isNot(nil) && task.status.eq(TaskStatus.uploadPending)
|
||||
&& task.attempts < TaskConfig.maxRetries
|
||||
&& task.filePath.isNot(nil)
|
||||
}
|
||||
.select { task, asset in
|
||||
LocalAssetUploadData.Columns(
|
||||
filePath: task.filePath.unwrapped,
|
||||
priority: task.priority,
|
||||
taskId: task.id,
|
||||
type: asset.type
|
||||
)
|
||||
}
|
||||
.limit { task, _ in UploadTaskStat.availableUploadSlots }
|
||||
.order { task, asset in (task.priority.desc(), task.createdAt) }
|
||||
.fetchAll(conn)
|
||||
})
|
||||
if tasks.isEmpty { return dPrint("No upload tasks to process") }
|
||||
|
||||
await withTaskGroup(of: Void.self) { group in
|
||||
for task in tasks {
|
||||
group.addTask { await self.startUpload(multipart: task) }
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
dPrint("Upload queue error: \(error)")
|
||||
}
|
||||
}
|
||||
|
||||
private func startUpload(multipart task: LocalAssetUploadData) async {
|
||||
dPrint("Uploading asset resource at \(task.filePath) of task \(task.taskId)")
|
||||
defer { startQueueProcessing() }
|
||||
|
||||
let (url, accessToken, session): (URL, String, URLSession)
|
||||
do {
|
||||
(url, accessToken, session) = try await db.read { conn in
|
||||
guard let url = try Store.get(conn, StoreKey.serverEndpoint),
|
||||
let accessToken = try Store.get(conn, StoreKey.accessToken)
|
||||
else {
|
||||
throw StoreError.notFound
|
||||
}
|
||||
|
||||
let session =
|
||||
switch task.type {
|
||||
case .image:
|
||||
(try? Store.get(conn, StoreKey.useWifiForUploadPhotos)) ?? false ? cellularSession : wifiOnlySession
|
||||
case .video:
|
||||
(try? Store.get(conn, StoreKey.useWifiForUploadVideos)) ?? false ? cellularSession : wifiOnlySession
|
||||
default: wifiOnlySession
|
||||
}
|
||||
return (url, accessToken, session)
|
||||
}
|
||||
} catch {
|
||||
dPrint("Upload failed for \(task.taskId), could not retrieve server URL or access token: \(error)")
|
||||
return handleFailure(task: task, code: .noServerUrl)
|
||||
}
|
||||
|
||||
var request = URLRequest(url: url.appendingPathComponent("/assets"))
|
||||
request.httpMethod = "POST"
|
||||
request.setValue(accessToken, forHTTPHeaderField: UploadHeaders.userToken.rawValue)
|
||||
request.setValue(AssetData.contentType, forHTTPHeaderField: "Content-Type")
|
||||
|
||||
let sessionTask = session.uploadTask(with: request, fromFile: task.filePath)
|
||||
sessionTask.taskDescription = String(task.taskId)
|
||||
sessionTask.priority = task.priority
|
||||
do {
|
||||
try? FileManager.default.removeItem(at: task.filePath) // upload task already copied the file
|
||||
try await db.write { conn in
|
||||
try UploadTask.update { row in
|
||||
row.status = .uploadQueued
|
||||
row.filePath = nil
|
||||
}
|
||||
.where { $0.id.eq(task.taskId) }
|
||||
.execute(conn)
|
||||
}
|
||||
statusListener.onTaskStatus(
|
||||
UploadApiTaskStatus(
|
||||
id: String(task.taskId),
|
||||
filename: task.filePath.lastPathComponent,
|
||||
status: .uploadQueued,
|
||||
)
|
||||
)
|
||||
|
||||
sessionTask.resume()
|
||||
dPrint("Upload started for task \(task.taskId) using \(session == wifiOnlySession ? "WiFi" : "Cellular") session")
|
||||
} catch {
|
||||
dPrint("Upload failed for \(task.taskId), could not update queue status: \(error.localizedDescription)")
|
||||
}
|
||||
}
|
||||
|
||||
private func handleFailure(task: LocalAssetUploadData, code: UploadErrorCode) {
|
||||
do {
|
||||
try db.write { conn in
|
||||
try UploadTask.retryOrFail(code: code, status: .uploadFailed).where { $0.id.eq(task.taskId) }.execute(conn)
|
||||
}
|
||||
} catch {
|
||||
dPrint("Failed to update upload failure status for task \(task.taskId): \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
463
mobile/ios/Runner/Upload/UploadTask.g.swift
Normal file
463
mobile/ios/Runner/Upload/UploadTask.g.swift
Normal file
@@ -0,0 +1,463 @@
|
||||
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
|
||||
import Foundation
|
||||
|
||||
#if os(iOS)
|
||||
import Flutter
|
||||
#elseif os(macOS)
|
||||
import FlutterMacOS
|
||||
#else
|
||||
#error("Unsupported platform.")
|
||||
#endif
|
||||
|
||||
private func wrapResult(_ result: Any?) -> [Any?] {
|
||||
return [result]
|
||||
}
|
||||
|
||||
private func wrapError(_ error: Any) -> [Any?] {
|
||||
if let pigeonError = error as? PigeonError {
|
||||
return [
|
||||
pigeonError.code,
|
||||
pigeonError.message,
|
||||
pigeonError.details,
|
||||
]
|
||||
}
|
||||
if let flutterError = error as? FlutterError {
|
||||
return [
|
||||
flutterError.code,
|
||||
flutterError.message,
|
||||
flutterError.details,
|
||||
]
|
||||
}
|
||||
return [
|
||||
"\(error)",
|
||||
"\(type(of: error))",
|
||||
"Stacktrace: \(Thread.callStackSymbols)",
|
||||
]
|
||||
}
|
||||
|
||||
private func isNullish(_ value: Any?) -> Bool {
|
||||
return value is NSNull || value == nil
|
||||
}
|
||||
|
||||
private func nilOrValue<T>(_ value: Any?) -> T? {
|
||||
if value is NSNull { return nil }
|
||||
return value as! T?
|
||||
}
|
||||
|
||||
func deepEqualsUploadTask(_ lhs: Any?, _ rhs: Any?) -> Bool {
|
||||
let cleanLhs = nilOrValue(lhs) as Any?
|
||||
let cleanRhs = nilOrValue(rhs) as Any?
|
||||
switch (cleanLhs, cleanRhs) {
|
||||
case (nil, nil):
|
||||
return true
|
||||
|
||||
case (nil, _), (_, nil):
|
||||
return false
|
||||
|
||||
case is (Void, Void):
|
||||
return true
|
||||
|
||||
case let (cleanLhsHashable, cleanRhsHashable) as (AnyHashable, AnyHashable):
|
||||
return cleanLhsHashable == cleanRhsHashable
|
||||
|
||||
case let (cleanLhsArray, cleanRhsArray) as ([Any?], [Any?]):
|
||||
guard cleanLhsArray.count == cleanRhsArray.count else { return false }
|
||||
for (index, element) in cleanLhsArray.enumerated() {
|
||||
if !deepEqualsUploadTask(element, cleanRhsArray[index]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
case let (cleanLhsDictionary, cleanRhsDictionary) as ([AnyHashable: Any?], [AnyHashable: Any?]):
|
||||
guard cleanLhsDictionary.count == cleanRhsDictionary.count else { return false }
|
||||
for (key, cleanLhsValue) in cleanLhsDictionary {
|
||||
guard cleanRhsDictionary.index(forKey: key) != nil else { return false }
|
||||
if !deepEqualsUploadTask(cleanLhsValue, cleanRhsDictionary[key]!) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
default:
|
||||
// Any other type shouldn't be able to be used with pigeon. File an issue if you find this to be untrue.
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func deepHashUploadTask(value: Any?, hasher: inout Hasher) {
|
||||
if let valueList = value as? [AnyHashable] {
|
||||
for item in valueList { deepHashUploadTask(value: item, hasher: &hasher) }
|
||||
return
|
||||
}
|
||||
|
||||
if let valueDict = value as? [AnyHashable: AnyHashable] {
|
||||
for key in valueDict.keys {
|
||||
hasher.combine(key)
|
||||
deepHashUploadTask(value: valueDict[key]!, hasher: &hasher)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if let hashableValue = value as? AnyHashable {
|
||||
hasher.combine(hashableValue.hashValue)
|
||||
}
|
||||
|
||||
return hasher.combine(String(describing: value))
|
||||
}
|
||||
|
||||
|
||||
|
||||
enum UploadApiErrorCode: Int {
|
||||
case unknown = 0
|
||||
case assetNotFound = 1
|
||||
case fileNotFound = 2
|
||||
case resourceNotFound = 3
|
||||
case invalidResource = 4
|
||||
case encodingFailed = 5
|
||||
case writeFailed = 6
|
||||
case notEnoughSpace = 7
|
||||
case networkError = 8
|
||||
case photosInternalError = 9
|
||||
case photosUnknownError = 10
|
||||
case noServerUrl = 11
|
||||
case noDeviceId = 12
|
||||
case noAccessToken = 13
|
||||
case interrupted = 14
|
||||
case cancelled = 15
|
||||
case downloadStalled = 16
|
||||
case forceQuit = 17
|
||||
case outOfResources = 18
|
||||
case backgroundUpdatesDisabled = 19
|
||||
case uploadTimeout = 20
|
||||
case iCloudRateLimit = 21
|
||||
case iCloudThrottled = 22
|
||||
}
|
||||
|
||||
enum UploadApiStatus: Int {
|
||||
case downloadPending = 0
|
||||
case downloadQueued = 1
|
||||
case downloadFailed = 2
|
||||
case uploadPending = 3
|
||||
case uploadQueued = 4
|
||||
case uploadFailed = 5
|
||||
case uploadComplete = 6
|
||||
case uploadSkipped = 7
|
||||
}
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct UploadApiTaskStatus: Hashable {
|
||||
var id: String
|
||||
var filename: String
|
||||
var status: UploadApiStatus
|
||||
var errorCode: UploadApiErrorCode? = nil
|
||||
var httpStatusCode: Int64? = nil
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> UploadApiTaskStatus? {
|
||||
let id = pigeonVar_list[0] as! String
|
||||
let filename = pigeonVar_list[1] as! String
|
||||
let status = pigeonVar_list[2] as! UploadApiStatus
|
||||
let errorCode: UploadApiErrorCode? = nilOrValue(pigeonVar_list[3])
|
||||
let httpStatusCode: Int64? = nilOrValue(pigeonVar_list[4])
|
||||
|
||||
return UploadApiTaskStatus(
|
||||
id: id,
|
||||
filename: filename,
|
||||
status: status,
|
||||
errorCode: errorCode,
|
||||
httpStatusCode: httpStatusCode
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
id,
|
||||
filename,
|
||||
status,
|
||||
errorCode,
|
||||
httpStatusCode,
|
||||
]
|
||||
}
|
||||
static func == (lhs: UploadApiTaskStatus, rhs: UploadApiTaskStatus) -> Bool {
|
||||
return deepEqualsUploadTask(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashUploadTask(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generated class from Pigeon that represents data sent in messages.
|
||||
struct UploadApiTaskProgress: Hashable {
|
||||
var id: String
|
||||
var progress: Double
|
||||
var speed: Double? = nil
|
||||
var totalBytes: Int64? = nil
|
||||
|
||||
|
||||
// swift-format-ignore: AlwaysUseLowerCamelCase
|
||||
static func fromList(_ pigeonVar_list: [Any?]) -> UploadApiTaskProgress? {
|
||||
let id = pigeonVar_list[0] as! String
|
||||
let progress = pigeonVar_list[1] as! Double
|
||||
let speed: Double? = nilOrValue(pigeonVar_list[2])
|
||||
let totalBytes: Int64? = nilOrValue(pigeonVar_list[3])
|
||||
|
||||
return UploadApiTaskProgress(
|
||||
id: id,
|
||||
progress: progress,
|
||||
speed: speed,
|
||||
totalBytes: totalBytes
|
||||
)
|
||||
}
|
||||
func toList() -> [Any?] {
|
||||
return [
|
||||
id,
|
||||
progress,
|
||||
speed,
|
||||
totalBytes,
|
||||
]
|
||||
}
|
||||
static func == (lhs: UploadApiTaskProgress, rhs: UploadApiTaskProgress) -> Bool {
|
||||
return deepEqualsUploadTask(lhs.toList(), rhs.toList()) }
|
||||
func hash(into hasher: inout Hasher) {
|
||||
deepHashUploadTask(value: toList(), hasher: &hasher)
|
||||
}
|
||||
}
|
||||
|
||||
private class UploadTaskPigeonCodecReader: FlutterStandardReader {
|
||||
override func readValue(ofType type: UInt8) -> Any? {
|
||||
switch type {
|
||||
case 129:
|
||||
let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?)
|
||||
if let enumResultAsInt = enumResultAsInt {
|
||||
return UploadApiErrorCode(rawValue: enumResultAsInt)
|
||||
}
|
||||
return nil
|
||||
case 130:
|
||||
let enumResultAsInt: Int? = nilOrValue(self.readValue() as! Int?)
|
||||
if let enumResultAsInt = enumResultAsInt {
|
||||
return UploadApiStatus(rawValue: enumResultAsInt)
|
||||
}
|
||||
return nil
|
||||
case 131:
|
||||
return UploadApiTaskStatus.fromList(self.readValue() as! [Any?])
|
||||
case 132:
|
||||
return UploadApiTaskProgress.fromList(self.readValue() as! [Any?])
|
||||
default:
|
||||
return super.readValue(ofType: type)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class UploadTaskPigeonCodecWriter: FlutterStandardWriter {
|
||||
override func writeValue(_ value: Any) {
|
||||
if let value = value as? UploadApiErrorCode {
|
||||
super.writeByte(129)
|
||||
super.writeValue(value.rawValue)
|
||||
} else if let value = value as? UploadApiStatus {
|
||||
super.writeByte(130)
|
||||
super.writeValue(value.rawValue)
|
||||
} else if let value = value as? UploadApiTaskStatus {
|
||||
super.writeByte(131)
|
||||
super.writeValue(value.toList())
|
||||
} else if let value = value as? UploadApiTaskProgress {
|
||||
super.writeByte(132)
|
||||
super.writeValue(value.toList())
|
||||
} else {
|
||||
super.writeValue(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class UploadTaskPigeonCodecReaderWriter: FlutterStandardReaderWriter {
|
||||
override func reader(with data: Data) -> FlutterStandardReader {
|
||||
return UploadTaskPigeonCodecReader(data: data)
|
||||
}
|
||||
|
||||
override func writer(with data: NSMutableData) -> FlutterStandardWriter {
|
||||
return UploadTaskPigeonCodecWriter(data: data)
|
||||
}
|
||||
}
|
||||
|
||||
class UploadTaskPigeonCodec: FlutterStandardMessageCodec, @unchecked Sendable {
|
||||
static let shared = UploadTaskPigeonCodec(readerWriter: UploadTaskPigeonCodecReaderWriter())
|
||||
}
|
||||
|
||||
var uploadTaskPigeonMethodCodec = FlutterStandardMethodCodec(readerWriter: UploadTaskPigeonCodecReaderWriter());
|
||||
|
||||
|
||||
/// Generated protocol from Pigeon that represents a handler of messages from Flutter.
|
||||
protocol UploadApi {
|
||||
func initialize(completion: @escaping (Result<Void, Error>) -> Void)
|
||||
func refresh(completion: @escaping (Result<Void, Error>) -> Void)
|
||||
func cancelAll(completion: @escaping (Result<Void, Error>) -> Void)
|
||||
func enqueueAssets(localIds: [String], completion: @escaping (Result<Void, Error>) -> Void)
|
||||
func enqueueFiles(paths: [String], completion: @escaping (Result<Void, Error>) -> Void)
|
||||
}
|
||||
|
||||
/// Generated setup class from Pigeon to handle messages through the `binaryMessenger`.
|
||||
class UploadApiSetup {
|
||||
static var codec: FlutterStandardMessageCodec { UploadTaskPigeonCodec.shared }
|
||||
/// Sets up an instance of `UploadApi` to handle messages through the `binaryMessenger`.
|
||||
static func setUp(binaryMessenger: FlutterBinaryMessenger, api: UploadApi?, messageChannelSuffix: String = "") {
|
||||
let channelSuffix = messageChannelSuffix.count > 0 ? ".\(messageChannelSuffix)" : ""
|
||||
let initializeChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.initialize\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
initializeChannel.setMessageHandler { _, reply in
|
||||
api.initialize { result in
|
||||
switch result {
|
||||
case .success:
|
||||
reply(wrapResult(nil))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
initializeChannel.setMessageHandler(nil)
|
||||
}
|
||||
let refreshChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.refresh\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
refreshChannel.setMessageHandler { _, reply in
|
||||
api.refresh { result in
|
||||
switch result {
|
||||
case .success:
|
||||
reply(wrapResult(nil))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
refreshChannel.setMessageHandler(nil)
|
||||
}
|
||||
let cancelAllChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.cancelAll\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
cancelAllChannel.setMessageHandler { _, reply in
|
||||
api.cancelAll { result in
|
||||
switch result {
|
||||
case .success:
|
||||
reply(wrapResult(nil))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
cancelAllChannel.setMessageHandler(nil)
|
||||
}
|
||||
let enqueueAssetsChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueAssets\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
enqueueAssetsChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let localIdsArg = args[0] as! [String]
|
||||
api.enqueueAssets(localIds: localIdsArg) { result in
|
||||
switch result {
|
||||
case .success:
|
||||
reply(wrapResult(nil))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
enqueueAssetsChannel.setMessageHandler(nil)
|
||||
}
|
||||
let enqueueFilesChannel = FlutterBasicMessageChannel(name: "dev.flutter.pigeon.immich_mobile.UploadApi.enqueueFiles\(channelSuffix)", binaryMessenger: binaryMessenger, codec: codec)
|
||||
if let api = api {
|
||||
enqueueFilesChannel.setMessageHandler { message, reply in
|
||||
let args = message as! [Any?]
|
||||
let pathsArg = args[0] as! [String]
|
||||
api.enqueueFiles(paths: pathsArg) { result in
|
||||
switch result {
|
||||
case .success:
|
||||
reply(wrapResult(nil))
|
||||
case .failure(let error):
|
||||
reply(wrapError(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
enqueueFilesChannel.setMessageHandler(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class PigeonStreamHandler<ReturnType>: NSObject, FlutterStreamHandler {
|
||||
private let wrapper: PigeonEventChannelWrapper<ReturnType>
|
||||
private var pigeonSink: PigeonEventSink<ReturnType>? = nil
|
||||
|
||||
init(wrapper: PigeonEventChannelWrapper<ReturnType>) {
|
||||
self.wrapper = wrapper
|
||||
}
|
||||
|
||||
func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink)
|
||||
-> FlutterError?
|
||||
{
|
||||
pigeonSink = PigeonEventSink<ReturnType>(events)
|
||||
wrapper.onListen(withArguments: arguments, sink: pigeonSink!)
|
||||
return nil
|
||||
}
|
||||
|
||||
func onCancel(withArguments arguments: Any?) -> FlutterError? {
|
||||
pigeonSink = nil
|
||||
wrapper.onCancel(withArguments: arguments)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
class PigeonEventChannelWrapper<ReturnType> {
|
||||
func onListen(withArguments arguments: Any?, sink: PigeonEventSink<ReturnType>) {}
|
||||
func onCancel(withArguments arguments: Any?) {}
|
||||
}
|
||||
|
||||
class PigeonEventSink<ReturnType> {
|
||||
private let sink: FlutterEventSink
|
||||
|
||||
init(_ sink: @escaping FlutterEventSink) {
|
||||
self.sink = sink
|
||||
}
|
||||
|
||||
func success(_ value: ReturnType) {
|
||||
sink(value)
|
||||
}
|
||||
|
||||
func error(code: String, message: String?, details: Any?) {
|
||||
sink(FlutterError(code: code, message: message, details: details))
|
||||
}
|
||||
|
||||
func endOfStream() {
|
||||
sink(FlutterEndOfEventStream)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class StreamStatusStreamHandler: PigeonEventChannelWrapper<UploadApiTaskStatus> {
|
||||
static func register(with messenger: FlutterBinaryMessenger,
|
||||
instanceName: String = "",
|
||||
streamHandler: StreamStatusStreamHandler) {
|
||||
var channelName = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamStatus"
|
||||
if !instanceName.isEmpty {
|
||||
channelName += ".\(instanceName)"
|
||||
}
|
||||
let internalStreamHandler = PigeonStreamHandler<UploadApiTaskStatus>(wrapper: streamHandler)
|
||||
let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: uploadTaskPigeonMethodCodec)
|
||||
channel.setStreamHandler(internalStreamHandler)
|
||||
}
|
||||
}
|
||||
|
||||
class StreamProgressStreamHandler: PigeonEventChannelWrapper<UploadApiTaskProgress> {
|
||||
static func register(with messenger: FlutterBinaryMessenger,
|
||||
instanceName: String = "",
|
||||
streamHandler: StreamProgressStreamHandler) {
|
||||
var channelName = "dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamProgress"
|
||||
if !instanceName.isEmpty {
|
||||
channelName += ".\(instanceName)"
|
||||
}
|
||||
let internalStreamHandler = PigeonStreamHandler<UploadApiTaskProgress>(wrapper: streamHandler)
|
||||
let channel = FlutterEventChannel(name: channelName, binaryMessenger: messenger, codec: uploadTaskPigeonMethodCodec)
|
||||
channel.setStreamHandler(internalStreamHandler)
|
||||
}
|
||||
}
|
||||
|
||||
271
mobile/ios/Runner/Upload/UploadTask.swift
Normal file
271
mobile/ios/Runner/Upload/UploadTask.swift
Normal file
@@ -0,0 +1,271 @@
|
||||
import SQLiteData
|
||||
import StructuredFieldValues
|
||||
|
||||
extension FileHandle {
|
||||
static func createOrOverwrite(atPath path: String) throws -> FileHandle {
|
||||
let fd = open(path, O_WRONLY | O_CREAT | O_TRUNC, 0o644)
|
||||
guard fd >= 0 else {
|
||||
throw NSError(domain: NSPOSIXErrorDomain, code: Int(errno))
|
||||
}
|
||||
return FileHandle(fileDescriptor: fd, closeOnDealloc: true)
|
||||
}
|
||||
}
|
||||
|
||||
class UploadApiImpl: ImmichPlugin, UploadApi {
|
||||
private let db: DatabasePool
|
||||
private let downloadQueue: DownloadQueue
|
||||
private let uploadQueue: UploadQueue
|
||||
|
||||
private var isInitialized = false
|
||||
private let initLock = NSLock()
|
||||
|
||||
private var backupTask: Task<Void, Never>?
|
||||
private let backupLock = NSLock()
|
||||
|
||||
private let cellularSession: URLSession
|
||||
private let wifiOnlySession: URLSession
|
||||
|
||||
init(statusListener: StatusEventListener, progressListener: ProgressEventListener) {
|
||||
let dbUrl = try! FileManager.default.url(
|
||||
for: .documentDirectory,
|
||||
in: .userDomainMask,
|
||||
appropriateFor: nil,
|
||||
create: true
|
||||
).appendingPathComponent("immich.sqlite")
|
||||
|
||||
self.db = try! DatabasePool(path: dbUrl.path)
|
||||
let cellularConfig = URLSessionConfiguration.background(withIdentifier: "\(TaskConfig.sessionId).cellular")
|
||||
cellularConfig.allowsCellularAccess = true
|
||||
cellularConfig.waitsForConnectivity = true
|
||||
let delegate = UploadApiDelegate(db: db, statusListener: statusListener, progressListener: progressListener)
|
||||
self.cellularSession = URLSession(configuration: cellularConfig, delegate: delegate, delegateQueue: nil)
|
||||
|
||||
let wifiOnlyConfig = URLSessionConfiguration.background(withIdentifier: "\(TaskConfig.sessionId).wifi")
|
||||
wifiOnlyConfig.allowsCellularAccess = false
|
||||
wifiOnlyConfig.waitsForConnectivity = true
|
||||
self.wifiOnlySession = URLSession(configuration: wifiOnlyConfig, delegate: delegate, delegateQueue: nil)
|
||||
|
||||
self.uploadQueue = UploadQueue(
|
||||
db: db,
|
||||
cellularSession: cellularSession,
|
||||
wifiOnlySession: wifiOnlySession,
|
||||
statusListener: statusListener
|
||||
)
|
||||
self.downloadQueue = DownloadQueue(
|
||||
db: db,
|
||||
uploadQueue: uploadQueue,
|
||||
statusListener: statusListener,
|
||||
progressListener: progressListener
|
||||
)
|
||||
delegate.downloadQueue = downloadQueue
|
||||
delegate.uploadQueue = uploadQueue
|
||||
}
|
||||
|
||||
func initialize(completion: @escaping (Result<Void, any Error>) -> Void) {
|
||||
Task(priority: .high) {
|
||||
do {
|
||||
async let dbIds = db.read { conn in
|
||||
try UploadTask.select(\.id).where { $0.status.eq(TaskStatus.uploadQueued) }.fetchAll(conn)
|
||||
}
|
||||
async let cellularTasks = cellularSession.allTasks
|
||||
async let wifiTasks = wifiOnlySession.allTasks
|
||||
|
||||
var dbTaskIds = Set(try await dbIds)
|
||||
func validateTasks(_ tasks: [URLSessionTask]) {
|
||||
for task in tasks {
|
||||
if let taskIdStr = task.taskDescription, let taskId = Int64(taskIdStr), task.state != .canceling {
|
||||
dbTaskIds.remove(taskId)
|
||||
} else {
|
||||
task.cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
validateTasks(await cellularTasks)
|
||||
validateTasks(await wifiTasks)
|
||||
|
||||
let orphanIds = Array(dbTaskIds)
|
||||
try await db.write { conn in
|
||||
try UploadTask.update {
|
||||
$0.filePath = nil
|
||||
$0.status = .downloadPending
|
||||
}
|
||||
.where { row in row.status.in([TaskStatus.downloadQueued, TaskStatus.uploadPending]) || row.id.in(orphanIds) }
|
||||
.execute(conn)
|
||||
}
|
||||
|
||||
try? FileManager.default.removeItem(at: TaskConfig.originalsDir)
|
||||
initLock.withLock { isInitialized = true }
|
||||
startBackup()
|
||||
self.completeWhenActive(for: completion, with: .success(()))
|
||||
} catch {
|
||||
self.completeWhenActive(for: completion, with: .failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func refresh(completion: @escaping (Result<Void, any Error>) -> Void) {
|
||||
Task {
|
||||
startBackup()
|
||||
self.completeWhenActive(for: completion, with: .success(()))
|
||||
}
|
||||
}
|
||||
|
||||
func startBackup() {
|
||||
dPrint("Starting backup task")
|
||||
guard (initLock.withLock { isInitialized }) else { return dPrint("Not initialized, skipping backup") }
|
||||
|
||||
backupLock.withLock {
|
||||
guard backupTask == nil else { return dPrint("Backup task already running") }
|
||||
backupTask = Task {
|
||||
await _startBackup()
|
||||
backupLock.withLock { backupTask = nil }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func cancelAll(completion: @escaping (Result<Void, any Error>) -> Void) {
|
||||
Task {
|
||||
async let cellularTasks = cellularSession.allTasks
|
||||
async let wifiTasks = wifiOnlySession.allTasks
|
||||
|
||||
cancelSessionTasks(await cellularTasks)
|
||||
cancelSessionTasks(await wifiTasks)
|
||||
self.completeWhenActive(for: completion, with: .success(()))
|
||||
}
|
||||
}
|
||||
|
||||
func enqueueAssets(localIds: [String], completion: @escaping (Result<Void, any Error>) -> Void) {
|
||||
Task {
|
||||
do {
|
||||
try await downloadQueue.enqueueAssets(localIds: localIds)
|
||||
completion(.success(()))
|
||||
} catch {
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func enqueueFiles(paths: [String], completion: @escaping (Result<Void, any Error>) -> Void) {
|
||||
Task {
|
||||
do {
|
||||
try await uploadQueue.enqueueFiles(paths: paths)
|
||||
completion(.success(()))
|
||||
} catch {
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func cancelSessionTasks(_ tasks: [URLSessionTask]) {
|
||||
dPrint("Canceling \(tasks.count) tasks")
|
||||
for task in tasks {
|
||||
task.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
private func _startBackup() async {
|
||||
defer { downloadQueue.startQueueProcessing() }
|
||||
do {
|
||||
let candidates = try await db.read { conn in
|
||||
return try LocalAsset.getCandidates()
|
||||
.where { asset in !UploadTask.where { task in task.localId.eq(asset.id) }.exists() }
|
||||
.select { LocalAssetCandidate.Columns(id: $0.id, type: $0.type) }
|
||||
.limit { _ in UploadTaskStat.availableSlots }
|
||||
.fetchAll(conn)
|
||||
}
|
||||
|
||||
guard !candidates.isEmpty else { return dPrint("No candidates for backup") }
|
||||
|
||||
try await db.write { conn in
|
||||
var draft = UploadTask.Draft(
|
||||
attempts: 0,
|
||||
createdAt: Date(),
|
||||
filePath: nil,
|
||||
isLivePhoto: nil,
|
||||
lastError: nil,
|
||||
livePhotoVideoId: nil,
|
||||
localId: "",
|
||||
method: .multipart,
|
||||
priority: 0.5,
|
||||
retryAfter: nil,
|
||||
status: .downloadPending,
|
||||
)
|
||||
for candidate in candidates {
|
||||
draft.localId = candidate.id
|
||||
draft.priority = candidate.type == .image ? 0.5 : 0.3
|
||||
try UploadTask.insert {
|
||||
draft
|
||||
} onConflict: {
|
||||
($0.localId, $0.livePhotoVideoId)
|
||||
}
|
||||
.execute(conn)
|
||||
}
|
||||
}
|
||||
dPrint("Backup enqueued \(candidates.count) assets for upload")
|
||||
} catch {
|
||||
print("Backup queue error: \(error)")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct AssetData: StructuredFieldValue {
|
||||
static let structuredFieldType: StructuredFieldType = .dictionary
|
||||
|
||||
let deviceAssetId: String
|
||||
let deviceId: String
|
||||
let fileCreatedAt: String
|
||||
let fileModifiedAt: String
|
||||
let fileName: String
|
||||
let isFavorite: Bool
|
||||
let livePhotoVideoId: String?
|
||||
|
||||
static let boundary = "Boundary-\(UUID().uuidString)"
|
||||
static let deviceAssetIdField = "--\(boundary)\r\nContent-Disposition: form-data; name=\"deviceAssetId\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let deviceIdField = "\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"deviceId\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let fileCreatedAtField =
|
||||
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"fileCreatedAt\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let fileModifiedAtField =
|
||||
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"fileModifiedAt\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let isFavoriteField = "\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"isFavorite\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let livePhotoVideoIdField =
|
||||
"\r\n--\(boundary)\r\nContent-Disposition: form-data; name=\"livePhotoVideoId\"\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
static let trueData = "true".data(using: .utf8)!
|
||||
static let falseData = "false".data(using: .utf8)!
|
||||
static let footer = "\r\n--\(boundary)--\r\n".data(using: .utf8)!
|
||||
static let contentType = "multipart/form-data; boundary=\(boundary)"
|
||||
|
||||
func multipart() -> (Data, Data) {
|
||||
var header = Data()
|
||||
header.append(Self.deviceAssetIdField)
|
||||
header.append(deviceAssetId.data(using: .utf8)!)
|
||||
|
||||
header.append(Self.deviceIdField)
|
||||
header.append(deviceId.data(using: .utf8)!)
|
||||
|
||||
header.append(Self.fileCreatedAtField)
|
||||
header.append(fileCreatedAt.data(using: .utf8)!)
|
||||
|
||||
header.append(Self.fileModifiedAtField)
|
||||
header.append(fileModifiedAt.data(using: .utf8)!)
|
||||
|
||||
header.append(Self.isFavoriteField)
|
||||
header.append(isFavorite ? Self.trueData : Self.falseData)
|
||||
|
||||
if let livePhotoVideoId {
|
||||
header.append(Self.livePhotoVideoIdField)
|
||||
header.append(livePhotoVideoId.data(using: .utf8)!)
|
||||
}
|
||||
header.append(
|
||||
"\r\n--\(Self.boundary)\r\nContent-Disposition: form-data; name=\"assetData\"; filename=\"\(fileName)\"\r\nContent-Type: application/octet-stream\r\n\r\n"
|
||||
.data(using: .utf8)!
|
||||
)
|
||||
return (header, Self.footer)
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>AppGroupId</key>
|
||||
<string>$(CUSTOM_GROUP_ID)</string>
|
||||
<key>NSExtension</key>
|
||||
<dict>
|
||||
<key>NSExtensionAttributes</key>
|
||||
<dict>
|
||||
<key>IntentsSupported</key>
|
||||
<array>
|
||||
<string>INSendMessageIntent</string>
|
||||
</array>
|
||||
<key>NSExtensionActivationRule</key>
|
||||
<string>SUBQUERY ( extensionItems, $extensionItem, SUBQUERY ( $extensionItem.attachments,
|
||||
<dict>
|
||||
<key>AppGroupId</key>
|
||||
<string>$(CUSTOM_GROUP_ID)</string>
|
||||
<key>NSExtension</key>
|
||||
<dict>
|
||||
<key>NSExtensionAttributes</key>
|
||||
<dict>
|
||||
<key>IntentsSupported</key>
|
||||
<array>
|
||||
<string>INSendMessageIntent</string>
|
||||
</array>
|
||||
<key>NSExtensionActivationRule</key>
|
||||
<string>SUBQUERY ( extensionItems, $extensionItem, SUBQUERY ( $extensionItem.attachments,
|
||||
$attachment, ( ANY $attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.file-url"
|
||||
|| ANY $attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.image" || ANY
|
||||
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.text" || ANY
|
||||
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.movie" || ANY
|
||||
$attachment.registeredTypeIdentifiers UTI-CONFORMS-TO "public.url" ) ).@count > 0
|
||||
).@count > 0 </string>
|
||||
<key>PHSupportedMediaTypes</key>
|
||||
<array>
|
||||
<string>Video</string>
|
||||
<string>Image</string>
|
||||
</array>
|
||||
</dict>
|
||||
<key>NSExtensionMainStoryboard</key>
|
||||
<string>MainInterface</string>
|
||||
<key>NSExtensionPointIdentifier</key>
|
||||
<string>com.apple.share-services</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
<key>PHSupportedMediaTypes</key>
|
||||
<array>
|
||||
<string>Video</string>
|
||||
<string>Image</string>
|
||||
</array>
|
||||
</dict>
|
||||
<key>NSExtensionMainStoryboard</key>
|
||||
<string>MainInterface</string>
|
||||
<key>NSExtensionPointIdentifier</key>
|
||||
<string>com.apple.share-services</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
<array>
|
||||
<string>group.app.immich.share</string>
|
||||
</array>
|
||||
<array/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.application-groups</key>
|
||||
<array>
|
||||
<string>group.app.immich.share</string>
|
||||
</array>
|
||||
<array/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
import 'dart:ui';
|
||||
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:cancellation_token_http/http.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/network_capability_extensions.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/logger_db.repository.dart';
|
||||
@@ -17,16 +13,13 @@ import 'package:immich_mobile/platform/background_worker_api.g.dart';
|
||||
import 'package:immich_mobile/platform/background_worker_lock_api.g.dart';
|
||||
import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/repositories/file_media.repository.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/services/auth.service.dart';
|
||||
import 'package:immich_mobile/services/localization.service.dart';
|
||||
import 'package:immich_mobile/services/upload.service.dart';
|
||||
import 'package:immich_mobile/utils/bootstrap.dart';
|
||||
import 'package:immich_mobile/utils/debug_print.dart';
|
||||
import 'package:immich_mobile/utils/http_ssl_options.dart';
|
||||
@@ -96,23 +89,10 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
loadTranslations(),
|
||||
workerManagerPatch.init(dynamicSpawning: true),
|
||||
_ref?.read(authServiceProvider).setOpenApiServiceEndpoint(),
|
||||
// Initialize the file downloader
|
||||
FileDownloader().configure(
|
||||
globalConfig: [
|
||||
// maxConcurrent: 6, maxConcurrentByHost(server):6, maxConcurrentByGroup: 3
|
||||
(Config.holdingQueue, (6, 6, 3)),
|
||||
// On Android, if files are larger than 256MB, run in foreground service
|
||||
(Config.runInForegroundIfFileLargerThan, 256),
|
||||
],
|
||||
),
|
||||
FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false),
|
||||
FileDownloader().trackTasks(),
|
||||
_ref?.read(fileMediaRepositoryProvider).enableBackgroundAccess(),
|
||||
].nonNulls,
|
||||
);
|
||||
|
||||
configureFileDownloaderNotifications();
|
||||
|
||||
// Notify the host that the background worker service has been initialized and is ready to use
|
||||
unawaited(_backgroundHostApi.onInitialized());
|
||||
} catch (error, stack) {
|
||||
@@ -130,7 +110,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
_logger.warning("Remote sync did not complete successfully, skipping backup");
|
||||
return;
|
||||
}
|
||||
await _handleBackup();
|
||||
await uploadApi.refresh();
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Failed to complete Android background processing", error, stack);
|
||||
} finally {
|
||||
@@ -150,13 +130,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
_logger.warning("Remote sync did not complete successfully, skipping backup");
|
||||
return;
|
||||
}
|
||||
|
||||
final backupFuture = _handleBackup();
|
||||
if (maxSeconds != null) {
|
||||
await backupFuture.timeout(Duration(seconds: maxSeconds - 1), onTimeout: () {});
|
||||
} else {
|
||||
await backupFuture;
|
||||
}
|
||||
await uploadApi.refresh();
|
||||
} catch (error, stack) {
|
||||
_logger.severe("Failed to complete iOS background upload", error, stack);
|
||||
} finally {
|
||||
@@ -214,39 +188,6 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _handleBackup() async {
|
||||
await runZonedGuarded(
|
||||
() async {
|
||||
if (_isCleanedUp) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_isBackupEnabled) {
|
||||
_logger.info("Backup is disabled. Skipping backup routine");
|
||||
return;
|
||||
}
|
||||
|
||||
final currentUser = _ref?.read(currentUserProvider);
|
||||
if (currentUser == null) {
|
||||
_logger.warning("No current user found. Skipping backup from background");
|
||||
return;
|
||||
}
|
||||
|
||||
if (Platform.isIOS) {
|
||||
return _ref?.read(driftBackupProvider.notifier).handleBackupResume(currentUser.id);
|
||||
}
|
||||
|
||||
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
|
||||
return _ref
|
||||
?.read(uploadServiceProvider)
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
|
||||
},
|
||||
(error, stack) {
|
||||
dPrint(() => "Error in backup zone $error, $stack");
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Future<bool> _syncAssets({Duration? hashTimeout}) async {
|
||||
await _ref?.read(backgroundSyncProvider).syncLocal();
|
||||
if (_isCleanedUp) {
|
||||
|
||||
23
mobile/lib/infrastructure/entities/upload_task.entity.dart
Normal file
23
mobile/lib/infrastructure/entities/upload_task.entity.dart
Normal file
@@ -0,0 +1,23 @@
|
||||
import 'package:drift/drift.dart' hide Index;
|
||||
|
||||
@TableIndex.sql('CREATE INDEX IF NOT EXISTS idx_upload_tasks_local_id ON upload_task_entity(local_id);')
|
||||
@TableIndex.sql('CREATE INDEX idx_upload_tasks_asset_data ON upload_task_entity(status, priority DESC, created_at);')
|
||||
class UploadTaskEntity extends Table {
|
||||
const UploadTaskEntity();
|
||||
|
||||
IntColumn get id => integer().autoIncrement()();
|
||||
IntColumn get attempts => integer()();
|
||||
DateTimeColumn get createdAt => dateTime()();
|
||||
TextColumn get filePath => text()();
|
||||
BoolColumn get isLivePhoto => boolean().nullable()();
|
||||
IntColumn get lastError => integer().nullable()();
|
||||
TextColumn get livePhotoVideoId => text().nullable()();
|
||||
TextColumn get localId => text()();
|
||||
IntColumn get method => integer()();
|
||||
RealColumn get priority => real()();
|
||||
DateTimeColumn get retryAfter => dateTime().nullable()();
|
||||
IntColumn get status => integer()();
|
||||
|
||||
@override
|
||||
bool get isStrict => true;
|
||||
}
|
||||
1063
mobile/lib/infrastructure/entities/upload_task.entity.drift.dart
generated
Normal file
1063
mobile/lib/infrastructure/entities/upload_task.entity.drift.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
78
mobile/lib/infrastructure/entities/upload_tasks.drift
Normal file
78
mobile/lib/infrastructure/entities/upload_tasks.drift
Normal file
@@ -0,0 +1,78 @@
|
||||
CREATE TABLE upload_tasks
|
||||
(
|
||||
id integer primary key autoincrement,
|
||||
attempts integer not null,
|
||||
created_at integer not null,
|
||||
file_path text,
|
||||
is_live_photo integer,
|
||||
last_error integer,
|
||||
live_photo_video_id text,
|
||||
local_id text not null,
|
||||
method integer not null,
|
||||
priority real not null,
|
||||
retry_after integer,
|
||||
status integer not null
|
||||
);
|
||||
|
||||
CREATE TABLE upload_task_stats
|
||||
(
|
||||
pending_downloads integer not null,
|
||||
pending_uploads integer not null,
|
||||
queued_downloads integer not null,
|
||||
queued_uploads integer not null,
|
||||
failed_downloads integer not null,
|
||||
failed_uploads integer not null,
|
||||
completed_uploads integer not null,
|
||||
skipped_uploads integer not null
|
||||
);
|
||||
|
||||
CREATE TRIGGER update_stats_insert
|
||||
BEFORE INSERT
|
||||
ON upload_tasks
|
||||
BEGIN
|
||||
UPDATE upload_task_stats
|
||||
SET pending_downloads = pending_downloads + (NEW.status = 0),
|
||||
queued_downloads = queued_downloads + (NEW.status = 1),
|
||||
failed_downloads = failed_downloads + (NEW.status = 2),
|
||||
pending_uploads = pending_uploads + (NEW.status = 3),
|
||||
queued_uploads = queued_uploads + (NEW.status = 4),
|
||||
failed_uploads = failed_uploads + (NEW.status = 5),
|
||||
completed_uploads = completed_uploads + (NEW.status = 6),
|
||||
skipped_uploads = skipped_uploads + (NEW.status = 7);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER update_stats_update
|
||||
BEFORE UPDATE OF status
|
||||
ON upload_tasks
|
||||
WHEN OLD.status != NEW.status
|
||||
BEGIN
|
||||
UPDATE upload_task_stats
|
||||
SET pending_downloads = pending_downloads - (OLD.status = 0) + (NEW.status = 0),
|
||||
queued_downloads = queued_downloads - (OLD.status = 1) + (NEW.status = 1),
|
||||
failed_downloads = failed_downloads - (OLD.status = 2) + (NEW.status = 2),
|
||||
pending_uploads = pending_uploads - (OLD.status = 3) + (NEW.status = 3),
|
||||
queued_uploads = queued_uploads - (OLD.status = 4) + (NEW.status = 4),
|
||||
failed_uploads = failed_uploads - (OLD.status = 5) + (NEW.status = 5),
|
||||
completed_uploads = completed_uploads - (OLD.status = 6) + (NEW.status = 6),
|
||||
skipped_uploads = skipped_uploads - (OLD.status = 7) + (NEW.status = 7);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER update_stats_delete
|
||||
BEFORE DELETE
|
||||
ON upload_tasks
|
||||
BEGIN
|
||||
UPDATE upload_task_stats
|
||||
SET pending_downloads = pending_downloads - (OLD.status = 0),
|
||||
queued_downloads = queued_downloads - (OLD.status = 1),
|
||||
failed_downloads = failed_downloads - (OLD.status = 2),
|
||||
pending_uploads = pending_uploads - (OLD.status = 3),
|
||||
queued_uploads = queued_uploads - (OLD.status = 4),
|
||||
failed_uploads = failed_uploads - (OLD.status = 5),
|
||||
completed_uploads = completed_uploads - (OLD.status = 6),
|
||||
skipped_uploads = skipped_uploads - (OLD.status = 7);
|
||||
END;
|
||||
|
||||
CREATE UNIQUE INDEX idx_upload_tasks_local_id ON upload_tasks (local_id, live_photo_video_id);
|
||||
CREATE INDEX idx_upload_tasks_asset_data ON upload_tasks (status, priority DESC, created_at);
|
||||
|
||||
@create: INSERT INTO upload_task_stats VALUES (0, 0, 0, 0, 0, 0, 0, 0);
|
||||
1888
mobile/lib/infrastructure/entities/upload_tasks.drift.dart
generated
Normal file
1888
mobile/lib/infrastructure/entities/upload_tasks.drift.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -21,6 +21,7 @@ import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.d
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/stack.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/upload_tasks.drift.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/db.repository.steps.dart';
|
||||
@@ -65,7 +66,10 @@ class IsarDatabaseRepository implements IDatabaseRepository {
|
||||
StoreEntity,
|
||||
TrashedLocalAssetEntity,
|
||||
],
|
||||
include: {'package:immich_mobile/infrastructure/entities/merged_asset.drift'},
|
||||
include: {
|
||||
'package:immich_mobile/infrastructure/entities/merged_asset.drift',
|
||||
'package:immich_mobile/infrastructure/entities/upload_tasks.drift',
|
||||
},
|
||||
)
|
||||
class Drift extends $Drift implements IDatabaseRepository {
|
||||
Drift([QueryExecutor? executor])
|
||||
@@ -95,7 +99,7 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
}
|
||||
|
||||
@override
|
||||
int get schemaVersion => 13;
|
||||
int get schemaVersion => 14;
|
||||
|
||||
@override
|
||||
MigrationStrategy get migration => MigrationStrategy(
|
||||
@@ -185,6 +189,16 @@ class Drift extends $Drift implements IDatabaseRepository {
|
||||
await m.createIndex(v13.idxTrashedLocalAssetChecksum);
|
||||
await m.createIndex(v13.idxTrashedLocalAssetAlbum);
|
||||
},
|
||||
from13To14: (m, v14) async {
|
||||
await m.createTable(UploadTasks(m.database));
|
||||
await m.createTable(UploadTaskStats(m.database));
|
||||
await m.create($drift0);
|
||||
await m.createTrigger(updateStatsInsert);
|
||||
await m.createTrigger(updateStatsUpdate);
|
||||
await m.createTrigger(updateStatsDelete);
|
||||
await m.createIndex(idxUploadTasksLocalId);
|
||||
await m.createIndex(idxUploadTasksAssetData);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
|
||||
@@ -1,94 +1,109 @@
|
||||
// dart format width=80
|
||||
// ignore_for_file: type=lint
|
||||
import 'package:drift/drift.dart' as i0;
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/upload_tasks.drift.dart'
|
||||
as i1;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/user.entity.drift.dart'
|
||||
as i2;
|
||||
import 'package:immich_mobile/infrastructure/entities/stack.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_asset.entity.drift.dart'
|
||||
as i3;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/stack.entity.drift.dart'
|
||||
as i4;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart'
|
||||
as i5;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album.entity.drift.dart'
|
||||
as i6;
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album.entity.drift.dart'
|
||||
as i7;
|
||||
import 'package:immich_mobile/infrastructure/entities/auth_user.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.drift.dart'
|
||||
as i8;
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/auth_user.entity.drift.dart'
|
||||
as i9;
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.drift.dart'
|
||||
as i10;
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'
|
||||
as i11;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
|
||||
as i12;
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
|
||||
as i13;
|
||||
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
|
||||
as i14;
|
||||
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
|
||||
as i15;
|
||||
import 'package:immich_mobile/infrastructure/entities/person.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
|
||||
as i16;
|
||||
import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/person.entity.drift.dart'
|
||||
as i17;
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.dart'
|
||||
as i18;
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
|
||||
as i19;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
import 'package:immich_mobile/infrastructure/entities/trashed_local_asset.entity.drift.dart'
|
||||
as i20;
|
||||
import 'package:drift/internal/modular.dart' as i21;
|
||||
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
|
||||
as i21;
|
||||
import 'package:drift/internal/modular.dart' as i22;
|
||||
|
||||
abstract class $Drift extends i0.GeneratedDatabase {
|
||||
$Drift(i0.QueryExecutor e) : super(e);
|
||||
$DriftManager get managers => $DriftManager(this);
|
||||
late final i1.$UserEntityTable userEntity = i1.$UserEntityTable(this);
|
||||
late final i2.$RemoteAssetEntityTable remoteAssetEntity = i2
|
||||
late final i1.UploadTasks uploadTasks = i1.UploadTasks(this);
|
||||
late final i1.UploadTaskStats uploadTaskStats = i1.UploadTaskStats(this);
|
||||
late final i2.$UserEntityTable userEntity = i2.$UserEntityTable(this);
|
||||
late final i3.$RemoteAssetEntityTable remoteAssetEntity = i3
|
||||
.$RemoteAssetEntityTable(this);
|
||||
late final i3.$StackEntityTable stackEntity = i3.$StackEntityTable(this);
|
||||
late final i4.$LocalAssetEntityTable localAssetEntity = i4
|
||||
late final i4.$StackEntityTable stackEntity = i4.$StackEntityTable(this);
|
||||
late final i5.$LocalAssetEntityTable localAssetEntity = i5
|
||||
.$LocalAssetEntityTable(this);
|
||||
late final i5.$RemoteAlbumEntityTable remoteAlbumEntity = i5
|
||||
late final i6.$RemoteAlbumEntityTable remoteAlbumEntity = i6
|
||||
.$RemoteAlbumEntityTable(this);
|
||||
late final i6.$LocalAlbumEntityTable localAlbumEntity = i6
|
||||
late final i7.$LocalAlbumEntityTable localAlbumEntity = i7
|
||||
.$LocalAlbumEntityTable(this);
|
||||
late final i7.$LocalAlbumAssetEntityTable localAlbumAssetEntity = i7
|
||||
late final i8.$LocalAlbumAssetEntityTable localAlbumAssetEntity = i8
|
||||
.$LocalAlbumAssetEntityTable(this);
|
||||
late final i8.$AuthUserEntityTable authUserEntity = i8.$AuthUserEntityTable(
|
||||
late final i9.$AuthUserEntityTable authUserEntity = i9.$AuthUserEntityTable(
|
||||
this,
|
||||
);
|
||||
late final i9.$UserMetadataEntityTable userMetadataEntity = i9
|
||||
late final i10.$UserMetadataEntityTable userMetadataEntity = i10
|
||||
.$UserMetadataEntityTable(this);
|
||||
late final i10.$PartnerEntityTable partnerEntity = i10.$PartnerEntityTable(
|
||||
late final i11.$PartnerEntityTable partnerEntity = i11.$PartnerEntityTable(
|
||||
this,
|
||||
);
|
||||
late final i11.$RemoteExifEntityTable remoteExifEntity = i11
|
||||
late final i12.$RemoteExifEntityTable remoteExifEntity = i12
|
||||
.$RemoteExifEntityTable(this);
|
||||
late final i12.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity = i12
|
||||
late final i13.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity = i13
|
||||
.$RemoteAlbumAssetEntityTable(this);
|
||||
late final i13.$RemoteAlbumUserEntityTable remoteAlbumUserEntity = i13
|
||||
late final i14.$RemoteAlbumUserEntityTable remoteAlbumUserEntity = i14
|
||||
.$RemoteAlbumUserEntityTable(this);
|
||||
late final i14.$MemoryEntityTable memoryEntity = i14.$MemoryEntityTable(this);
|
||||
late final i15.$MemoryAssetEntityTable memoryAssetEntity = i15
|
||||
late final i15.$MemoryEntityTable memoryEntity = i15.$MemoryEntityTable(this);
|
||||
late final i16.$MemoryAssetEntityTable memoryAssetEntity = i16
|
||||
.$MemoryAssetEntityTable(this);
|
||||
late final i16.$PersonEntityTable personEntity = i16.$PersonEntityTable(this);
|
||||
late final i17.$AssetFaceEntityTable assetFaceEntity = i17
|
||||
late final i17.$PersonEntityTable personEntity = i17.$PersonEntityTable(this);
|
||||
late final i18.$AssetFaceEntityTable assetFaceEntity = i18
|
||||
.$AssetFaceEntityTable(this);
|
||||
late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this);
|
||||
late final i19.$TrashedLocalAssetEntityTable trashedLocalAssetEntity = i19
|
||||
late final i19.$StoreEntityTable storeEntity = i19.$StoreEntityTable(this);
|
||||
late final i20.$TrashedLocalAssetEntityTable trashedLocalAssetEntity = i20
|
||||
.$TrashedLocalAssetEntityTable(this);
|
||||
i20.MergedAssetDrift get mergedAssetDrift => i21.ReadDatabaseContainer(
|
||||
i21.MergedAssetDrift get mergedAssetDrift => i22.ReadDatabaseContainer(
|
||||
this,
|
||||
).accessor<i20.MergedAssetDrift>(i20.MergedAssetDrift.new);
|
||||
).accessor<i21.MergedAssetDrift>(i21.MergedAssetDrift.new);
|
||||
i1.UploadTasksDrift get uploadTasksDrift => i22.ReadDatabaseContainer(
|
||||
this,
|
||||
).accessor<i1.UploadTasksDrift>(i1.UploadTasksDrift.new);
|
||||
@override
|
||||
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
|
||||
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
|
||||
@override
|
||||
List<i0.DatabaseSchemaEntity> get allSchemaEntities => [
|
||||
uploadTasks,
|
||||
uploadTaskStats,
|
||||
i1.updateStatsInsert,
|
||||
i1.updateStatsUpdate,
|
||||
i1.updateStatsDelete,
|
||||
i1.idxUploadTasksLocalId,
|
||||
i1.idxUploadTasksAssetData,
|
||||
i1.$drift0,
|
||||
userEntity,
|
||||
remoteAssetEntity,
|
||||
stackEntity,
|
||||
@@ -96,11 +111,11 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
remoteAlbumEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
i4.idxLocalAssetChecksum,
|
||||
i2.idxRemoteAssetOwnerChecksum,
|
||||
i2.uQRemoteAssetsOwnerChecksum,
|
||||
i2.uQRemoteAssetsOwnerLibraryChecksum,
|
||||
i2.idxRemoteAssetChecksum,
|
||||
i5.idxLocalAssetChecksum,
|
||||
i3.idxRemoteAssetOwnerChecksum,
|
||||
i3.uQRemoteAssetsOwnerChecksum,
|
||||
i3.uQRemoteAssetsOwnerLibraryChecksum,
|
||||
i3.idxRemoteAssetChecksum,
|
||||
authUserEntity,
|
||||
userMetadataEntity,
|
||||
partnerEntity,
|
||||
@@ -113,13 +128,34 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
i11.idxLatLng,
|
||||
i19.idxTrashedLocalAssetChecksum,
|
||||
i19.idxTrashedLocalAssetAlbum,
|
||||
i12.idxLatLng,
|
||||
i20.idxTrashedLocalAssetChecksum,
|
||||
i20.idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
@override
|
||||
i0.StreamQueryUpdateRules
|
||||
get streamUpdateRules => const i0.StreamQueryUpdateRules([
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName(
|
||||
'upload_tasks',
|
||||
limitUpdateKind: i0.UpdateKind.insert,
|
||||
),
|
||||
result: [i0.TableUpdate('upload_task_stats', kind: i0.UpdateKind.update)],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName(
|
||||
'upload_tasks',
|
||||
limitUpdateKind: i0.UpdateKind.update,
|
||||
),
|
||||
result: [i0.TableUpdate('upload_task_stats', kind: i0.UpdateKind.update)],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName(
|
||||
'upload_tasks',
|
||||
limitUpdateKind: i0.UpdateKind.delete,
|
||||
),
|
||||
result: [i0.TableUpdate('upload_task_stats', kind: i0.UpdateKind.update)],
|
||||
),
|
||||
i0.WritePropagation(
|
||||
on: i0.TableUpdateQuery.onTableName(
|
||||
'user_entity',
|
||||
@@ -304,47 +340,51 @@ abstract class $Drift extends i0.GeneratedDatabase {
|
||||
class $DriftManager {
|
||||
final $Drift _db;
|
||||
$DriftManager(this._db);
|
||||
i1.$$UserEntityTableTableManager get userEntity =>
|
||||
i1.$$UserEntityTableTableManager(_db, _db.userEntity);
|
||||
i2.$$RemoteAssetEntityTableTableManager get remoteAssetEntity =>
|
||||
i2.$$RemoteAssetEntityTableTableManager(_db, _db.remoteAssetEntity);
|
||||
i3.$$StackEntityTableTableManager get stackEntity =>
|
||||
i3.$$StackEntityTableTableManager(_db, _db.stackEntity);
|
||||
i4.$$LocalAssetEntityTableTableManager get localAssetEntity =>
|
||||
i4.$$LocalAssetEntityTableTableManager(_db, _db.localAssetEntity);
|
||||
i5.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
|
||||
i5.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
|
||||
i6.$$LocalAlbumEntityTableTableManager get localAlbumEntity =>
|
||||
i6.$$LocalAlbumEntityTableTableManager(_db, _db.localAlbumEntity);
|
||||
i7.$$LocalAlbumAssetEntityTableTableManager get localAlbumAssetEntity => i7
|
||||
i1.$UploadTasksTableManager get uploadTasks =>
|
||||
i1.$UploadTasksTableManager(_db, _db.uploadTasks);
|
||||
i1.$UploadTaskStatsTableManager get uploadTaskStats =>
|
||||
i1.$UploadTaskStatsTableManager(_db, _db.uploadTaskStats);
|
||||
i2.$$UserEntityTableTableManager get userEntity =>
|
||||
i2.$$UserEntityTableTableManager(_db, _db.userEntity);
|
||||
i3.$$RemoteAssetEntityTableTableManager get remoteAssetEntity =>
|
||||
i3.$$RemoteAssetEntityTableTableManager(_db, _db.remoteAssetEntity);
|
||||
i4.$$StackEntityTableTableManager get stackEntity =>
|
||||
i4.$$StackEntityTableTableManager(_db, _db.stackEntity);
|
||||
i5.$$LocalAssetEntityTableTableManager get localAssetEntity =>
|
||||
i5.$$LocalAssetEntityTableTableManager(_db, _db.localAssetEntity);
|
||||
i6.$$RemoteAlbumEntityTableTableManager get remoteAlbumEntity =>
|
||||
i6.$$RemoteAlbumEntityTableTableManager(_db, _db.remoteAlbumEntity);
|
||||
i7.$$LocalAlbumEntityTableTableManager get localAlbumEntity =>
|
||||
i7.$$LocalAlbumEntityTableTableManager(_db, _db.localAlbumEntity);
|
||||
i8.$$LocalAlbumAssetEntityTableTableManager get localAlbumAssetEntity => i8
|
||||
.$$LocalAlbumAssetEntityTableTableManager(_db, _db.localAlbumAssetEntity);
|
||||
i8.$$AuthUserEntityTableTableManager get authUserEntity =>
|
||||
i8.$$AuthUserEntityTableTableManager(_db, _db.authUserEntity);
|
||||
i9.$$UserMetadataEntityTableTableManager get userMetadataEntity =>
|
||||
i9.$$UserMetadataEntityTableTableManager(_db, _db.userMetadataEntity);
|
||||
i10.$$PartnerEntityTableTableManager get partnerEntity =>
|
||||
i10.$$PartnerEntityTableTableManager(_db, _db.partnerEntity);
|
||||
i11.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
|
||||
i11.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
|
||||
i12.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
|
||||
i12.$$RemoteAlbumAssetEntityTableTableManager(
|
||||
i9.$$AuthUserEntityTableTableManager get authUserEntity =>
|
||||
i9.$$AuthUserEntityTableTableManager(_db, _db.authUserEntity);
|
||||
i10.$$UserMetadataEntityTableTableManager get userMetadataEntity =>
|
||||
i10.$$UserMetadataEntityTableTableManager(_db, _db.userMetadataEntity);
|
||||
i11.$$PartnerEntityTableTableManager get partnerEntity =>
|
||||
i11.$$PartnerEntityTableTableManager(_db, _db.partnerEntity);
|
||||
i12.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
|
||||
i12.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
|
||||
i13.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
|
||||
i13.$$RemoteAlbumAssetEntityTableTableManager(
|
||||
_db,
|
||||
_db.remoteAlbumAssetEntity,
|
||||
);
|
||||
i13.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i13
|
||||
i14.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i14
|
||||
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
|
||||
i14.$$MemoryEntityTableTableManager get memoryEntity =>
|
||||
i14.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
|
||||
i15.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
|
||||
i15.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
|
||||
i16.$$PersonEntityTableTableManager get personEntity =>
|
||||
i16.$$PersonEntityTableTableManager(_db, _db.personEntity);
|
||||
i17.$$AssetFaceEntityTableTableManager get assetFaceEntity =>
|
||||
i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
|
||||
i18.$$StoreEntityTableTableManager get storeEntity =>
|
||||
i18.$$StoreEntityTableTableManager(_db, _db.storeEntity);
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager get trashedLocalAssetEntity =>
|
||||
i19.$$TrashedLocalAssetEntityTableTableManager(
|
||||
i15.$$MemoryEntityTableTableManager get memoryEntity =>
|
||||
i15.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
|
||||
i16.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
|
||||
i16.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
|
||||
i17.$$PersonEntityTableTableManager get personEntity =>
|
||||
i17.$$PersonEntityTableTableManager(_db, _db.personEntity);
|
||||
i18.$$AssetFaceEntityTableTableManager get assetFaceEntity =>
|
||||
i18.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
|
||||
i19.$$StoreEntityTableTableManager get storeEntity =>
|
||||
i19.$$StoreEntityTableTableManager(_db, _db.storeEntity);
|
||||
i20.$$TrashedLocalAssetEntityTableTableManager get trashedLocalAssetEntity =>
|
||||
i20.$$TrashedLocalAssetEntityTableTableManager(
|
||||
_db,
|
||||
_db.trashedLocalAssetEntity,
|
||||
);
|
||||
|
||||
@@ -5485,6 +5485,701 @@ i1.GeneratedColumn<String> _column_95(String aliasedName) =>
|
||||
false,
|
||||
type: i1.DriftSqlType.string,
|
||||
);
|
||||
|
||||
final class Schema14 extends i0.VersionedSchema {
|
||||
Schema14({required super.database}) : super(version: 14);
|
||||
@override
|
||||
late final List<i1.DatabaseSchemaEntity> entities = [
|
||||
uploadTasks,
|
||||
uploadTaskStats,
|
||||
updateStatsInsert,
|
||||
updateStatsUpdate,
|
||||
updateStatsDelete,
|
||||
idxUploadTasksLocalId,
|
||||
idxUploadTasksAssetData,
|
||||
userEntity,
|
||||
remoteAssetEntity,
|
||||
stackEntity,
|
||||
localAssetEntity,
|
||||
remoteAlbumEntity,
|
||||
localAlbumEntity,
|
||||
localAlbumAssetEntity,
|
||||
idxLocalAssetChecksum,
|
||||
idxRemoteAssetOwnerChecksum,
|
||||
uQRemoteAssetsOwnerChecksum,
|
||||
uQRemoteAssetsOwnerLibraryChecksum,
|
||||
idxRemoteAssetChecksum,
|
||||
authUserEntity,
|
||||
userMetadataEntity,
|
||||
partnerEntity,
|
||||
remoteExifEntity,
|
||||
remoteAlbumAssetEntity,
|
||||
remoteAlbumUserEntity,
|
||||
memoryEntity,
|
||||
memoryAssetEntity,
|
||||
personEntity,
|
||||
assetFaceEntity,
|
||||
storeEntity,
|
||||
trashedLocalAssetEntity,
|
||||
idxLatLng,
|
||||
idxTrashedLocalAssetChecksum,
|
||||
idxTrashedLocalAssetAlbum,
|
||||
];
|
||||
late final Shape24 uploadTasks = Shape24(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'upload_tasks',
|
||||
withoutRowId: false,
|
||||
isStrict: false,
|
||||
tableConstraints: [],
|
||||
columns: [
|
||||
_column_96,
|
||||
_column_97,
|
||||
_column_98,
|
||||
_column_99,
|
||||
_column_100,
|
||||
_column_101,
|
||||
_column_102,
|
||||
_column_103,
|
||||
_column_104,
|
||||
_column_105,
|
||||
_column_106,
|
||||
_column_107,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape25 uploadTaskStats = Shape25(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'upload_task_stats',
|
||||
withoutRowId: false,
|
||||
isStrict: false,
|
||||
tableConstraints: [],
|
||||
columns: [
|
||||
_column_108,
|
||||
_column_109,
|
||||
_column_110,
|
||||
_column_111,
|
||||
_column_112,
|
||||
_column_113,
|
||||
_column_114,
|
||||
_column_115,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Trigger updateStatsInsert = i1.Trigger(
|
||||
'CREATE TRIGGER update_stats_insert BEFORE INSERT ON upload_tasks BEGIN UPDATE upload_task_stats SET pending_downloads = pending_downloads +(NEW.status = 0), queued_downloads = queued_downloads +(NEW.status = 1), failed_downloads = failed_downloads +(NEW.status = 2), pending_uploads = pending_uploads +(NEW.status = 3), queued_uploads = queued_uploads +(NEW.status = 4), failed_uploads = failed_uploads +(NEW.status = 5), completed_uploads = completed_uploads +(NEW.status = 6), skipped_uploads = skipped_uploads +(NEW.status = 7);END',
|
||||
'update_stats_insert',
|
||||
);
|
||||
final i1.Trigger updateStatsUpdate = i1.Trigger(
|
||||
'CREATE TRIGGER update_stats_update BEFORE UPDATE OF status ON upload_tasks WHEN OLD.status != NEW.status BEGIN UPDATE upload_task_stats SET pending_downloads = pending_downloads -(OLD.status = 0)+(NEW.status = 0), queued_downloads = queued_downloads -(OLD.status = 1)+(NEW.status = 1), failed_downloads = failed_downloads -(OLD.status = 2)+(NEW.status = 2), pending_uploads = pending_uploads -(OLD.status = 3)+(NEW.status = 3), queued_uploads = queued_uploads -(OLD.status = 4)+(NEW.status = 4), failed_uploads = failed_uploads -(OLD.status = 5)+(NEW.status = 5), completed_uploads = completed_uploads -(OLD.status = 6)+(NEW.status = 6), skipped_uploads = skipped_uploads -(OLD.status = 7)+(NEW.status = 7);END',
|
||||
'update_stats_update',
|
||||
);
|
||||
final i1.Trigger updateStatsDelete = i1.Trigger(
|
||||
'CREATE TRIGGER update_stats_delete BEFORE DELETE ON upload_tasks BEGIN UPDATE upload_task_stats SET pending_downloads = pending_downloads -(OLD.status = 0), queued_downloads = queued_downloads -(OLD.status = 1), failed_downloads = failed_downloads -(OLD.status = 2), pending_uploads = pending_uploads -(OLD.status = 3), queued_uploads = queued_uploads -(OLD.status = 4), failed_uploads = failed_uploads -(OLD.status = 5), completed_uploads = completed_uploads -(OLD.status = 6), skipped_uploads = skipped_uploads -(OLD.status = 7);END',
|
||||
'update_stats_delete',
|
||||
);
|
||||
final i1.Index idxUploadTasksLocalId = i1.Index(
|
||||
'idx_upload_tasks_local_id',
|
||||
'CREATE UNIQUE INDEX idx_upload_tasks_local_id ON upload_tasks (local_id, live_photo_video_id)',
|
||||
);
|
||||
final i1.Index idxUploadTasksAssetData = i1.Index(
|
||||
'idx_upload_tasks_asset_data',
|
||||
'CREATE INDEX idx_upload_tasks_asset_data ON upload_tasks (status, priority DESC, created_at)',
|
||||
);
|
||||
late final Shape20 userEntity = Shape20(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_91,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape17 remoteAssetEntity = Shape17(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_13,
|
||||
_column_14,
|
||||
_column_15,
|
||||
_column_16,
|
||||
_column_17,
|
||||
_column_18,
|
||||
_column_19,
|
||||
_column_20,
|
||||
_column_21,
|
||||
_column_86,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape3 stackEntity = Shape3(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'stack_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape2 localAssetEntity = Shape2(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape9 remoteAlbumEntity = Shape9(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_56,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_57,
|
||||
_column_58,
|
||||
_column_59,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape19 localAlbumEntity = Shape19(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_5,
|
||||
_column_31,
|
||||
_column_32,
|
||||
_column_90,
|
||||
_column_33,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape22 localAlbumAssetEntity = Shape22(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'local_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_34, _column_35, _column_33],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLocalAssetChecksum = i1.Index(
|
||||
'idx_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
|
||||
'idx_remote_asset_owner_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
|
||||
);
|
||||
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
|
||||
'UQ_remote_assets_owner_library_checksum',
|
||||
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
|
||||
);
|
||||
final i1.Index idxRemoteAssetChecksum = i1.Index(
|
||||
'idx_remote_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
|
||||
);
|
||||
late final Shape21 authUserEntity = Shape21(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'auth_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_1,
|
||||
_column_3,
|
||||
_column_2,
|
||||
_column_84,
|
||||
_column_85,
|
||||
_column_92,
|
||||
_column_93,
|
||||
_column_7,
|
||||
_column_94,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape4 userMetadataEntity = Shape4(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'user_metadata_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
|
||||
columns: [_column_25, _column_26, _column_27],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape5 partnerEntity = Shape5(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'partner_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
|
||||
columns: [_column_28, _column_29, _column_30],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape8 remoteExifEntity = Shape8(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_exif_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id)'],
|
||||
columns: [
|
||||
_column_36,
|
||||
_column_37,
|
||||
_column_38,
|
||||
_column_39,
|
||||
_column_40,
|
||||
_column_41,
|
||||
_column_11,
|
||||
_column_10,
|
||||
_column_42,
|
||||
_column_43,
|
||||
_column_44,
|
||||
_column_45,
|
||||
_column_46,
|
||||
_column_47,
|
||||
_column_48,
|
||||
_column_49,
|
||||
_column_50,
|
||||
_column_51,
|
||||
_column_52,
|
||||
_column_53,
|
||||
_column_54,
|
||||
_column_55,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape7 remoteAlbumAssetEntity = Shape7(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
|
||||
columns: [_column_36, _column_60],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape10 remoteAlbumUserEntity = Shape10(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'remote_album_user_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
|
||||
columns: [_column_60, _column_25, _column_61],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape11 memoryEntity = Shape11(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_18,
|
||||
_column_15,
|
||||
_column_8,
|
||||
_column_62,
|
||||
_column_63,
|
||||
_column_64,
|
||||
_column_65,
|
||||
_column_66,
|
||||
_column_67,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape12 memoryAssetEntity = Shape12(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'memory_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
|
||||
columns: [_column_36, _column_68],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape14 personEntity = Shape14(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'person_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_15,
|
||||
_column_1,
|
||||
_column_69,
|
||||
_column_71,
|
||||
_column_72,
|
||||
_column_73,
|
||||
_column_74,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape15 assetFaceEntity = Shape15(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'asset_face_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [
|
||||
_column_0,
|
||||
_column_36,
|
||||
_column_76,
|
||||
_column_77,
|
||||
_column_78,
|
||||
_column_79,
|
||||
_column_80,
|
||||
_column_81,
|
||||
_column_82,
|
||||
_column_83,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape18 storeEntity = Shape18(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'store_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id)'],
|
||||
columns: [_column_87, _column_88, _column_89],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
late final Shape23 trashedLocalAssetEntity = Shape23(
|
||||
source: i0.VersionedTable(
|
||||
entityName: 'trashed_local_asset_entity',
|
||||
withoutRowId: true,
|
||||
isStrict: true,
|
||||
tableConstraints: ['PRIMARY KEY(id, album_id)'],
|
||||
columns: [
|
||||
_column_1,
|
||||
_column_8,
|
||||
_column_9,
|
||||
_column_5,
|
||||
_column_10,
|
||||
_column_11,
|
||||
_column_12,
|
||||
_column_0,
|
||||
_column_95,
|
||||
_column_22,
|
||||
_column_14,
|
||||
_column_23,
|
||||
],
|
||||
attachedDatabase: database,
|
||||
),
|
||||
alias: null,
|
||||
);
|
||||
final i1.Index idxLatLng = i1.Index(
|
||||
'idx_lat_lng',
|
||||
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetChecksum = i1.Index(
|
||||
'idx_trashed_local_asset_checksum',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_checksum ON trashed_local_asset_entity (checksum)',
|
||||
);
|
||||
final i1.Index idxTrashedLocalAssetAlbum = i1.Index(
|
||||
'idx_trashed_local_asset_album',
|
||||
'CREATE INDEX IF NOT EXISTS idx_trashed_local_asset_album ON trashed_local_asset_entity (album_id)',
|
||||
);
|
||||
}
|
||||
|
||||
class Shape24 extends i0.VersionedTable {
|
||||
Shape24({required super.source, required super.alias}) : super.aliased();
|
||||
i1.GeneratedColumn<int> get id =>
|
||||
columnsByName['id']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get attempts =>
|
||||
columnsByName['attempts']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get createdAt =>
|
||||
columnsByName['created_at']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<String> get filePath =>
|
||||
columnsByName['file_path']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<int> get isLivePhoto =>
|
||||
columnsByName['is_live_photo']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get lastError =>
|
||||
columnsByName['last_error']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<String> get livePhotoVideoId =>
|
||||
columnsByName['live_photo_video_id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<String> get localId =>
|
||||
columnsByName['local_id']! as i1.GeneratedColumn<String>;
|
||||
i1.GeneratedColumn<int> get method =>
|
||||
columnsByName['method']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<double> get priority =>
|
||||
columnsByName['priority']! as i1.GeneratedColumn<double>;
|
||||
i1.GeneratedColumn<int> get retryAfter =>
|
||||
columnsByName['retry_after']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get status =>
|
||||
columnsByName['status']! as i1.GeneratedColumn<int>;
|
||||
}
|
||||
|
||||
i1.GeneratedColumn<int> _column_96(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'id',
|
||||
aliasedName,
|
||||
true,
|
||||
hasAutoIncrement: true,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'PRIMARY KEY AUTOINCREMENT',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_97(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'attempts',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_98(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'created_at',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<String> _column_99(String aliasedName) =>
|
||||
i1.GeneratedColumn<String>(
|
||||
'file_path',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.string,
|
||||
$customConstraints: '',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_100(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'is_live_photo',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: '',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_101(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'last_error',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: '',
|
||||
);
|
||||
i1.GeneratedColumn<String> _column_102(String aliasedName) =>
|
||||
i1.GeneratedColumn<String>(
|
||||
'live_photo_video_id',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.string,
|
||||
$customConstraints: '',
|
||||
);
|
||||
i1.GeneratedColumn<String> _column_103(String aliasedName) =>
|
||||
i1.GeneratedColumn<String>(
|
||||
'local_id',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.string,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_104(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'method',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<double> _column_105(String aliasedName) =>
|
||||
i1.GeneratedColumn<double>(
|
||||
'priority',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.double,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_106(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'retry_after',
|
||||
aliasedName,
|
||||
true,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: '',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_107(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'status',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
|
||||
class Shape25 extends i0.VersionedTable {
|
||||
Shape25({required super.source, required super.alias}) : super.aliased();
|
||||
i1.GeneratedColumn<int> get pendingDownloads =>
|
||||
columnsByName['pending_downloads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get pendingUploads =>
|
||||
columnsByName['pending_uploads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get queuedDownloads =>
|
||||
columnsByName['queued_downloads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get queuedUploads =>
|
||||
columnsByName['queued_uploads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get failedDownloads =>
|
||||
columnsByName['failed_downloads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get failedUploads =>
|
||||
columnsByName['failed_uploads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get completedUploads =>
|
||||
columnsByName['completed_uploads']! as i1.GeneratedColumn<int>;
|
||||
i1.GeneratedColumn<int> get skippedUploads =>
|
||||
columnsByName['skipped_uploads']! as i1.GeneratedColumn<int>;
|
||||
}
|
||||
|
||||
i1.GeneratedColumn<int> _column_108(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'pending_downloads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_109(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'pending_uploads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_110(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'queued_downloads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_111(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'queued_uploads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_112(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'failed_downloads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_113(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'failed_uploads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_114(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'completed_uploads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i1.GeneratedColumn<int> _column_115(String aliasedName) =>
|
||||
i1.GeneratedColumn<int>(
|
||||
'skipped_uploads',
|
||||
aliasedName,
|
||||
false,
|
||||
type: i1.DriftSqlType.int,
|
||||
$customConstraints: 'NOT NULL',
|
||||
);
|
||||
i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
|
||||
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
|
||||
@@ -5498,6 +6193,7 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
required Future<void> Function(i1.Migrator m, Schema14 schema) from13To14,
|
||||
}) {
|
||||
return (currentVersion, database) async {
|
||||
switch (currentVersion) {
|
||||
@@ -5561,6 +6257,11 @@ i0.MigrationStepWithVersion migrationSteps({
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from12To13(migrator, schema);
|
||||
return 13;
|
||||
case 13:
|
||||
final schema = Schema14(database: database);
|
||||
final migrator = i1.Migrator(database, schema);
|
||||
await from13To14(migrator, schema);
|
||||
return 14;
|
||||
default:
|
||||
throw ArgumentError.value('Unknown migration from $currentVersion');
|
||||
}
|
||||
@@ -5580,6 +6281,7 @@ i1.OnUpgrade stepByStep({
|
||||
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
|
||||
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
|
||||
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
|
||||
required Future<void> Function(i1.Migrator m, Schema14 schema) from13To14,
|
||||
}) => i0.VersionedSchema.stepByStepHelper(
|
||||
step: migrationSteps(
|
||||
from1To2: from1To2,
|
||||
@@ -5594,5 +6296,6 @@ i1.OnUpgrade stepByStep({
|
||||
from10To11: from10To11,
|
||||
from11To12: from11To12,
|
||||
from12To13: from12To13,
|
||||
from13To14: from13To14,
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:photo_manager/photo_manager.dart';
|
||||
|
||||
@@ -90,17 +89,5 @@ class StorageRepository {
|
||||
} catch (error, stackTrace) {
|
||||
log.warning("Error clearing cache", error, stackTrace);
|
||||
}
|
||||
|
||||
if (!CurrentPlatform.isIOS) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await Directory.systemTemp.exists()) {
|
||||
await Directory.systemTemp.delete(recursive: true);
|
||||
}
|
||||
} catch (error, stackTrace) {
|
||||
log.warning("Error deleting temporary directory", error, stackTrace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ import 'dart:io';
|
||||
import 'dart:math';
|
||||
|
||||
import 'package:auto_route/auto_route.dart';
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:device_info_plus/device_info_plus.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
@@ -11,7 +10,6 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:flutter_displaymode/flutter_displaymode.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/constants/locales.dart';
|
||||
import 'package:immich_mobile/domain/services/background_worker.service.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
@@ -55,6 +53,9 @@ void main() async {
|
||||
// Warm-up isolate pool for worker manager
|
||||
await workerManagerPatch.init(dynamicSpawning: true, isolatesCount: max(Platform.numberOfProcessors - 1, 5));
|
||||
await migrateDatabaseIfNeeded(isar, drift);
|
||||
if (Store.isBetaTimelineEnabled) {
|
||||
await uploadApi.initialize();
|
||||
}
|
||||
HttpSSLOptions.apply();
|
||||
|
||||
runApp(
|
||||
@@ -102,18 +103,6 @@ Future<void> initApp() async {
|
||||
|
||||
initializeTimeZones();
|
||||
|
||||
// Initialize the file downloader
|
||||
await FileDownloader().configure(
|
||||
// maxConcurrent: 6, maxConcurrentByHost(server):6, maxConcurrentByGroup: 3
|
||||
|
||||
// On Android, if files are larger than 256MB, run in foreground service
|
||||
globalConfig: [(Config.holdingQueue, (6, 6, 3)), (Config.runInForegroundIfFileLargerThan, 256)],
|
||||
);
|
||||
|
||||
await FileDownloader().trackTasksInGroup(kDownloadGroupLivePhoto, markDownloadedComplete: false);
|
||||
|
||||
await FileDownloader().trackTasks();
|
||||
|
||||
LicenseRegistry.addLicense(() async* {
|
||||
for (final license in nonPubLicenses.entries) {
|
||||
yield LicenseEntryWithLineBreaks([license.key], license.value);
|
||||
@@ -199,9 +188,6 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
|
||||
void didChangeDependencies() {
|
||||
super.didChangeDependencies();
|
||||
Intl.defaultLocale = context.locale.toLanguageTag();
|
||||
WidgetsBinding.instance.addPostFrameCallback((_) {
|
||||
configureFileDownloaderNotifications();
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
|
||||
@@ -7,7 +7,7 @@ import 'package:path/path.dart';
|
||||
|
||||
enum ShareIntentAttachmentType { image, video }
|
||||
|
||||
enum UploadStatus { enqueued, running, complete, notFound, failed, canceled, waitingToRetry, paused }
|
||||
enum UploadStatus { enqueued, running, complete, notFound, failed, canceled, waitingToRetry, paused, preparing }
|
||||
|
||||
class ShareIntentAttachment {
|
||||
final String path;
|
||||
|
||||
@@ -93,7 +93,7 @@ class _DriftBackupPageState extends ConsumerState<DriftBackupPage> {
|
||||
Logger("DriftBackupPage").warning("Remote sync did not complete successfully, skipping backup");
|
||||
return;
|
||||
}
|
||||
await backupNotifier.startBackup(currentUser.id);
|
||||
await backupNotifier.startBackup();
|
||||
}
|
||||
|
||||
Future<void> stopBackup() async {
|
||||
|
||||
@@ -116,11 +116,10 @@ class _DriftBackupAlbumSelectionPageState extends ConsumerState<DriftBackupAlbum
|
||||
unawaited(
|
||||
backupNotifier.cancel().whenComplete(
|
||||
() => backgroundSync.syncRemote().then((success) {
|
||||
if (success) {
|
||||
return backupNotifier.startBackup(user.id);
|
||||
} else {
|
||||
Logger('DriftBackupAlbumSelectionPage').warning('Background sync failed, not starting backup');
|
||||
if (!success) {
|
||||
Logger('DriftBackupAlbumSelectionPage').warning('Remote sync failed');
|
||||
}
|
||||
return backupNotifier.startBackup();
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -63,7 +63,7 @@ class DriftBackupOptionsPage extends ConsumerWidget {
|
||||
backupNotifier.cancel().whenComplete(
|
||||
() => backgroundSync.syncRemote().then((success) {
|
||||
if (success) {
|
||||
return backupNotifier.startBackup(currentUser.id);
|
||||
return backupNotifier.startBackup();
|
||||
} else {
|
||||
Logger('DriftBackupOptionsPage').warning('Background sync failed, not starting backup');
|
||||
}
|
||||
|
||||
@@ -8,8 +8,8 @@ import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/providers/auth.provider.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
import 'package:immich_mobile/providers/server_info.provider.dart';
|
||||
import 'package:immich_mobile/providers/websocket.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
@@ -53,7 +53,6 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
|
||||
final infoProvider = ref.read(serverInfoProvider.notifier);
|
||||
final wsProvider = ref.read(websocketProvider.notifier);
|
||||
final backgroundManager = ref.read(backgroundSyncProvider);
|
||||
final backupProvider = ref.read(driftBackupProvider.notifier);
|
||||
|
||||
unawaited(
|
||||
ref.read(authProvider.notifier).saveAuthInfo(accessToken: accessToken).then(
|
||||
@@ -63,22 +62,13 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
|
||||
unawaited(infoProvider.getServerInfo());
|
||||
|
||||
if (Store.isBetaTimelineEnabled) {
|
||||
bool syncSuccess = false;
|
||||
await Future.wait([
|
||||
backgroundManager.syncLocal(full: true),
|
||||
backgroundManager.syncRemote().then((success) => syncSuccess = success),
|
||||
backgroundManager.syncRemote(),
|
||||
]);
|
||||
|
||||
if (syncSuccess) {
|
||||
await Future.wait([
|
||||
backgroundManager.hashAssets().then((_) {
|
||||
_resumeBackup(backupProvider);
|
||||
}),
|
||||
_resumeBackup(backupProvider),
|
||||
]);
|
||||
} else {
|
||||
await backgroundManager.hashAssets();
|
||||
}
|
||||
await backgroundManager.hashAssets();
|
||||
await uploadApi.refresh();
|
||||
|
||||
if (Store.get(StoreKey.syncAlbums, false)) {
|
||||
await backgroundManager.syncLinkedAlbum();
|
||||
@@ -126,17 +116,6 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _resumeBackup(DriftBackupNotifier notifier) async {
|
||||
final isEnableBackup = Store.get(StoreKey.enableBackup, false);
|
||||
|
||||
if (isEnableBackup) {
|
||||
final currentUser = Store.tryGet(StoreKey.currentUser);
|
||||
if (currentUser != null) {
|
||||
unawaited(notifier.handleBackupResume(currentUser.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return const Scaffold(
|
||||
|
||||
@@ -37,10 +37,7 @@ class ShareIntentPage extends HookConsumerWidget {
|
||||
}
|
||||
|
||||
void upload() async {
|
||||
for (final attachment in candidates) {
|
||||
await ref.read(shareIntentUploadProvider.notifier).upload(attachment.file);
|
||||
}
|
||||
|
||||
await ref.read(shareIntentUploadProvider.notifier).upload(candidates);
|
||||
isUploaded.value = true;
|
||||
}
|
||||
|
||||
@@ -212,6 +209,11 @@ class UploadStatusIcon extends StatelessWidget {
|
||||
color: context.primaryColor,
|
||||
semanticLabel: 'paused'.tr(),
|
||||
),
|
||||
UploadStatus.preparing => Icon(
|
||||
Icons.hourglass_top_rounded,
|
||||
color: context.primaryColor,
|
||||
semanticLabel: 'preparing'.tr(),
|
||||
),
|
||||
};
|
||||
|
||||
return statusIcon;
|
||||
|
||||
368
mobile/lib/platform/upload_api.g.dart
generated
Normal file
368
mobile/lib/platform/upload_api.g.dart
generated
Normal file
@@ -0,0 +1,368 @@
|
||||
// Autogenerated from Pigeon (v26.0.2), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, unused_shown_name, unnecessary_import, no_leading_underscores_for_local_identifiers
|
||||
|
||||
import 'dart:async';
|
||||
import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List;
|
||||
|
||||
import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer;
|
||||
import 'package:flutter/services.dart';
|
||||
|
||||
PlatformException _createConnectionError(String channelName) {
|
||||
return PlatformException(
|
||||
code: 'channel-error',
|
||||
message: 'Unable to establish connection on channel: "$channelName".',
|
||||
);
|
||||
}
|
||||
|
||||
bool _deepEquals(Object? a, Object? b) {
|
||||
if (a is List && b is List) {
|
||||
return a.length == b.length && a.indexed.every(((int, dynamic) item) => _deepEquals(item.$2, b[item.$1]));
|
||||
}
|
||||
if (a is Map && b is Map) {
|
||||
return a.length == b.length &&
|
||||
a.entries.every(
|
||||
(MapEntry<Object?, Object?> entry) =>
|
||||
(b as Map<Object?, Object?>).containsKey(entry.key) && _deepEquals(entry.value, b[entry.key]),
|
||||
);
|
||||
}
|
||||
return a == b;
|
||||
}
|
||||
|
||||
enum UploadApiErrorCode {
|
||||
unknown,
|
||||
assetNotFound,
|
||||
fileNotFound,
|
||||
resourceNotFound,
|
||||
invalidResource,
|
||||
encodingFailed,
|
||||
writeFailed,
|
||||
notEnoughSpace,
|
||||
networkError,
|
||||
photosInternalError,
|
||||
photosUnknownError,
|
||||
noServerUrl,
|
||||
noDeviceId,
|
||||
noAccessToken,
|
||||
interrupted,
|
||||
cancelled,
|
||||
downloadStalled,
|
||||
forceQuit,
|
||||
outOfResources,
|
||||
backgroundUpdatesDisabled,
|
||||
uploadTimeout,
|
||||
iCloudRateLimit,
|
||||
iCloudThrottled,
|
||||
}
|
||||
|
||||
enum UploadApiStatus {
|
||||
downloadPending,
|
||||
downloadQueued,
|
||||
downloadFailed,
|
||||
uploadPending,
|
||||
uploadQueued,
|
||||
uploadFailed,
|
||||
uploadComplete,
|
||||
uploadSkipped,
|
||||
}
|
||||
|
||||
class UploadApiTaskStatus {
|
||||
UploadApiTaskStatus({
|
||||
required this.id,
|
||||
required this.filename,
|
||||
required this.status,
|
||||
this.errorCode,
|
||||
this.httpStatusCode,
|
||||
});
|
||||
|
||||
String id;
|
||||
|
||||
String filename;
|
||||
|
||||
UploadApiStatus status;
|
||||
|
||||
UploadApiErrorCode? errorCode;
|
||||
|
||||
int? httpStatusCode;
|
||||
|
||||
List<Object?> _toList() {
|
||||
return <Object?>[id, filename, status, errorCode, httpStatusCode];
|
||||
}
|
||||
|
||||
Object encode() {
|
||||
return _toList();
|
||||
}
|
||||
|
||||
static UploadApiTaskStatus decode(Object result) {
|
||||
result as List<Object?>;
|
||||
return UploadApiTaskStatus(
|
||||
id: result[0]! as String,
|
||||
filename: result[1]! as String,
|
||||
status: result[2]! as UploadApiStatus,
|
||||
errorCode: result[3] as UploadApiErrorCode?,
|
||||
httpStatusCode: result[4] as int?,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
// ignore: avoid_equals_and_hash_code_on_mutable_classes
|
||||
bool operator ==(Object other) {
|
||||
if (other is! UploadApiTaskStatus || other.runtimeType != runtimeType) {
|
||||
return false;
|
||||
}
|
||||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
return _deepEquals(encode(), other.encode());
|
||||
}
|
||||
|
||||
@override
|
||||
// ignore: avoid_equals_and_hash_code_on_mutable_classes
|
||||
int get hashCode => Object.hashAll(_toList());
|
||||
}
|
||||
|
||||
class UploadApiTaskProgress {
|
||||
UploadApiTaskProgress({required this.id, required this.progress, this.speed, this.totalBytes});
|
||||
|
||||
String id;
|
||||
|
||||
double progress;
|
||||
|
||||
double? speed;
|
||||
|
||||
int? totalBytes;
|
||||
|
||||
List<Object?> _toList() {
|
||||
return <Object?>[id, progress, speed, totalBytes];
|
||||
}
|
||||
|
||||
Object encode() {
|
||||
return _toList();
|
||||
}
|
||||
|
||||
static UploadApiTaskProgress decode(Object result) {
|
||||
result as List<Object?>;
|
||||
return UploadApiTaskProgress(
|
||||
id: result[0]! as String,
|
||||
progress: result[1]! as double,
|
||||
speed: result[2] as double?,
|
||||
totalBytes: result[3] as int?,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
// ignore: avoid_equals_and_hash_code_on_mutable_classes
|
||||
bool operator ==(Object other) {
|
||||
if (other is! UploadApiTaskProgress || other.runtimeType != runtimeType) {
|
||||
return false;
|
||||
}
|
||||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
return _deepEquals(encode(), other.encode());
|
||||
}
|
||||
|
||||
@override
|
||||
// ignore: avoid_equals_and_hash_code_on_mutable_classes
|
||||
int get hashCode => Object.hashAll(_toList());
|
||||
}
|
||||
|
||||
class _PigeonCodec extends StandardMessageCodec {
|
||||
const _PigeonCodec();
|
||||
@override
|
||||
void writeValue(WriteBuffer buffer, Object? value) {
|
||||
if (value is int) {
|
||||
buffer.putUint8(4);
|
||||
buffer.putInt64(value);
|
||||
} else if (value is UploadApiErrorCode) {
|
||||
buffer.putUint8(129);
|
||||
writeValue(buffer, value.index);
|
||||
} else if (value is UploadApiStatus) {
|
||||
buffer.putUint8(130);
|
||||
writeValue(buffer, value.index);
|
||||
} else if (value is UploadApiTaskStatus) {
|
||||
buffer.putUint8(131);
|
||||
writeValue(buffer, value.encode());
|
||||
} else if (value is UploadApiTaskProgress) {
|
||||
buffer.putUint8(132);
|
||||
writeValue(buffer, value.encode());
|
||||
} else {
|
||||
super.writeValue(buffer, value);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Object? readValueOfType(int type, ReadBuffer buffer) {
|
||||
switch (type) {
|
||||
case 129:
|
||||
final int? value = readValue(buffer) as int?;
|
||||
return value == null ? null : UploadApiErrorCode.values[value];
|
||||
case 130:
|
||||
final int? value = readValue(buffer) as int?;
|
||||
return value == null ? null : UploadApiStatus.values[value];
|
||||
case 131:
|
||||
return UploadApiTaskStatus.decode(readValue(buffer)!);
|
||||
case 132:
|
||||
return UploadApiTaskProgress.decode(readValue(buffer)!);
|
||||
default:
|
||||
return super.readValueOfType(type, buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const StandardMethodCodec pigeonMethodCodec = StandardMethodCodec(_PigeonCodec());
|
||||
|
||||
class UploadApi {
|
||||
/// Constructor for [UploadApi]. The [binaryMessenger] named argument is
|
||||
/// available for dependency injection. If it is left null, the default
|
||||
/// BinaryMessenger will be used which routes to the host platform.
|
||||
UploadApi({BinaryMessenger? binaryMessenger, String messageChannelSuffix = ''})
|
||||
: pigeonVar_binaryMessenger = binaryMessenger,
|
||||
pigeonVar_messageChannelSuffix = messageChannelSuffix.isNotEmpty ? '.$messageChannelSuffix' : '';
|
||||
final BinaryMessenger? pigeonVar_binaryMessenger;
|
||||
|
||||
static const MessageCodec<Object?> pigeonChannelCodec = _PigeonCodec();
|
||||
|
||||
final String pigeonVar_messageChannelSuffix;
|
||||
|
||||
Future<void> initialize() async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.UploadApi.initialize$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> refresh() async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.UploadApi.refresh$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> cancelAll() async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.UploadApi.cancelAll$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(null);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> enqueueAssets(List<String> localIds) async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.UploadApi.enqueueAssets$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(<Object?>[localIds]);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> enqueueFiles(List<String> paths) async {
|
||||
final String pigeonVar_channelName =
|
||||
'dev.flutter.pigeon.immich_mobile.UploadApi.enqueueFiles$pigeonVar_messageChannelSuffix';
|
||||
final BasicMessageChannel<Object?> pigeonVar_channel = BasicMessageChannel<Object?>(
|
||||
pigeonVar_channelName,
|
||||
pigeonChannelCodec,
|
||||
binaryMessenger: pigeonVar_binaryMessenger,
|
||||
);
|
||||
final Future<Object?> pigeonVar_sendFuture = pigeonVar_channel.send(<Object?>[paths]);
|
||||
final List<Object?>? pigeonVar_replyList = await pigeonVar_sendFuture as List<Object?>?;
|
||||
if (pigeonVar_replyList == null) {
|
||||
throw _createConnectionError(pigeonVar_channelName);
|
||||
} else if (pigeonVar_replyList.length > 1) {
|
||||
throw PlatformException(
|
||||
code: pigeonVar_replyList[0]! as String,
|
||||
message: pigeonVar_replyList[1] as String?,
|
||||
details: pigeonVar_replyList[2],
|
||||
);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Stream<UploadApiTaskStatus> streamStatus({String instanceName = ''}) {
|
||||
if (instanceName.isNotEmpty) {
|
||||
instanceName = '.$instanceName';
|
||||
}
|
||||
final EventChannel streamStatusChannel = EventChannel(
|
||||
'dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamStatus$instanceName',
|
||||
pigeonMethodCodec,
|
||||
);
|
||||
return streamStatusChannel.receiveBroadcastStream().map((dynamic event) {
|
||||
return event as UploadApiTaskStatus;
|
||||
});
|
||||
}
|
||||
|
||||
Stream<UploadApiTaskProgress> streamProgress({String instanceName = ''}) {
|
||||
if (instanceName.isNotEmpty) {
|
||||
instanceName = '.$instanceName';
|
||||
}
|
||||
final EventChannel streamProgressChannel = EventChannel(
|
||||
'dev.flutter.pigeon.immich_mobile.UploadFlutterApi.streamProgress$instanceName',
|
||||
pigeonMethodCodec,
|
||||
);
|
||||
return streamProgressChannel.receiveBroadcastStream().map((dynamic event) {
|
||||
return event as UploadApiTaskProgress;
|
||||
});
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/domain/services/log.service.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/models/backup/backup_state.model.dart';
|
||||
@@ -11,7 +10,6 @@ import 'package:immich_mobile/providers/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/auth.provider.dart';
|
||||
import 'package:immich_mobile/providers/background_sync.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/ios_background_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/manual_upload.provider.dart';
|
||||
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
|
||||
@@ -148,21 +146,13 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
|
||||
final isAlbumLinkedSyncEnable = _ref.read(appSettingsServiceProvider).getSetting(AppSettingsEnum.syncAlbums);
|
||||
|
||||
try {
|
||||
bool syncSuccess = false;
|
||||
await Future.wait([
|
||||
_safeRun(backgroundManager.syncLocal(), "syncLocal"),
|
||||
_safeRun(backgroundManager.syncRemote().then((success) => syncSuccess = success), "syncRemote"),
|
||||
_safeRun(backgroundManager.syncRemote(), "syncRemote"),
|
||||
]);
|
||||
if (syncSuccess) {
|
||||
await Future.wait([
|
||||
_safeRun(backgroundManager.hashAssets(), "hashAssets").then((_) {
|
||||
_resumeBackup();
|
||||
}),
|
||||
_resumeBackup(),
|
||||
]);
|
||||
} else {
|
||||
await _safeRun(backgroundManager.hashAssets(), "hashAssets");
|
||||
}
|
||||
|
||||
await _safeRun(backgroundManager.hashAssets(), "hashAssets");
|
||||
await _safeRun(uploadApi.refresh(), "refresh");
|
||||
|
||||
if (isAlbumLinkedSyncEnable) {
|
||||
await _safeRun(backgroundManager.syncLinkedAlbum(), "syncLinkedAlbum");
|
||||
@@ -172,20 +162,6 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _resumeBackup() async {
|
||||
final isEnableBackup = _ref.read(appSettingsServiceProvider).getSetting(AppSettingsEnum.enableBackup);
|
||||
|
||||
if (isEnableBackup) {
|
||||
final currentUser = Store.tryGet(StoreKey.currentUser);
|
||||
if (currentUser != null) {
|
||||
await _safeRun(
|
||||
_ref.read(driftBackupProvider.notifier).handleBackupResume(currentUser.id),
|
||||
"handleBackupResume",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper method to check if operations should continue
|
||||
bool _shouldContinueOperation() {
|
||||
return [AppLifeCycleEnum.resumed, AppLifeCycleEnum.active].contains(state) &&
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import 'dart:io';
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/string_extensions.dart';
|
||||
import 'package:immich_mobile/models/upload/share_intent_attachment.model.dart';
|
||||
import 'package:immich_mobile/platform/upload_api.g.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
import 'package:immich_mobile/routing/router.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/services/share_intent_service.dart';
|
||||
import 'package:immich_mobile/services/upload.service.dart';
|
||||
import 'package:path/path.dart';
|
||||
|
||||
final shareIntentUploadProvider = StateNotifierProvider<ShareIntentUploadStateNotifier, List<ShareIntentAttachment>>(
|
||||
((ref) => ShareIntentUploadStateNotifier(
|
||||
@@ -25,10 +22,12 @@ class ShareIntentUploadStateNotifier extends StateNotifier<List<ShareIntentAttac
|
||||
final AppRouter router;
|
||||
final UploadService _uploadService;
|
||||
final ShareIntentService _shareIntentService;
|
||||
late final StreamSubscription<UploadApiTaskStatus> _taskStatusStream;
|
||||
late final StreamSubscription<UploadApiTaskProgress> _taskProgressStream;
|
||||
|
||||
ShareIntentUploadStateNotifier(this.router, this._uploadService, this._shareIntentService) : super([]) {
|
||||
_uploadService.taskStatusStream.listen(_updateUploadStatus);
|
||||
_uploadService.taskProgressStream.listen(_taskProgressCallback);
|
||||
_taskStatusStream = _uploadService.taskStatusStream.listen(_updateUploadStatus);
|
||||
_taskProgressStream = _uploadService.taskProgressStream.listen(_taskProgressCallback);
|
||||
}
|
||||
|
||||
void init() {
|
||||
@@ -36,6 +35,13 @@ class ShareIntentUploadStateNotifier extends StateNotifier<List<ShareIntentAttac
|
||||
_shareIntentService.init();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
unawaited(_taskStatusStream.cancel());
|
||||
unawaited(_taskProgressStream.cancel());
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
void onSharedMedia(List<ShareIntentAttachment> attachments) {
|
||||
router.removeWhere((route) => route.name == "ShareIntentRoute");
|
||||
clearAttachments();
|
||||
@@ -65,82 +71,35 @@ class ShareIntentUploadStateNotifier extends StateNotifier<List<ShareIntentAttac
|
||||
state = [];
|
||||
}
|
||||
|
||||
void _updateUploadStatus(TaskStatusUpdate task) async {
|
||||
if (task.status == TaskStatus.canceled) {
|
||||
return;
|
||||
}
|
||||
|
||||
final taskId = task.task.taskId;
|
||||
void _updateUploadStatus(UploadApiTaskStatus task) {
|
||||
final uploadStatus = switch (task.status) {
|
||||
TaskStatus.complete => UploadStatus.complete,
|
||||
TaskStatus.failed => UploadStatus.failed,
|
||||
TaskStatus.canceled => UploadStatus.canceled,
|
||||
TaskStatus.enqueued => UploadStatus.enqueued,
|
||||
TaskStatus.running => UploadStatus.running,
|
||||
TaskStatus.paused => UploadStatus.paused,
|
||||
TaskStatus.notFound => UploadStatus.notFound,
|
||||
TaskStatus.waitingToRetry => UploadStatus.waitingToRetry,
|
||||
UploadApiStatus.uploadComplete => UploadStatus.complete,
|
||||
UploadApiStatus.uploadFailed || UploadApiStatus.downloadFailed => UploadStatus.failed,
|
||||
UploadApiStatus.uploadQueued => UploadStatus.enqueued,
|
||||
_ => UploadStatus.preparing,
|
||||
};
|
||||
|
||||
final taskId = task.id.toInt();
|
||||
state = [
|
||||
for (final attachment in state)
|
||||
if (attachment.id == taskId.toInt()) attachment.copyWith(status: uploadStatus) else attachment,
|
||||
if (attachment.id == taskId) attachment.copyWith(status: uploadStatus) else attachment,
|
||||
];
|
||||
}
|
||||
|
||||
void _taskProgressCallback(TaskProgressUpdate update) {
|
||||
void _taskProgressCallback(UploadApiTaskProgress update) {
|
||||
// Ignore if the task is canceled or completed
|
||||
if (update.progress == downloadFailed || update.progress == downloadCompleted) {
|
||||
return;
|
||||
}
|
||||
|
||||
final taskId = update.task.taskId;
|
||||
final taskId = update.id.toInt();
|
||||
state = [
|
||||
for (final attachment in state)
|
||||
if (attachment.id == taskId.toInt()) attachment.copyWith(uploadProgress: update.progress) else attachment,
|
||||
if (attachment.id == taskId) attachment.copyWith(uploadProgress: update.progress) else attachment,
|
||||
];
|
||||
}
|
||||
|
||||
Future<void> upload(File file) async {
|
||||
final task = await _buildUploadTask(hash(file.path).toString(), file);
|
||||
|
||||
await _uploadService.enqueueTasks([task]);
|
||||
}
|
||||
|
||||
Future<UploadTask> _buildUploadTask(String id, File file, {Map<String, String>? fields}) async {
|
||||
final serverEndpoint = Store.get(StoreKey.serverEndpoint);
|
||||
final url = Uri.parse('$serverEndpoint/assets').toString();
|
||||
final headers = ApiService.getRequestHeaders();
|
||||
final deviceId = Store.get(StoreKey.deviceId);
|
||||
|
||||
final (baseDirectory, directory, filename) = await Task.split(filePath: file.path);
|
||||
final stats = await file.stat();
|
||||
final fileCreatedAt = stats.changed;
|
||||
final fileModifiedAt = stats.modified;
|
||||
|
||||
final fieldsMap = {
|
||||
'filename': filename,
|
||||
'deviceAssetId': id,
|
||||
'deviceId': deviceId,
|
||||
'fileCreatedAt': fileCreatedAt.toUtc().toIso8601String(),
|
||||
'fileModifiedAt': fileModifiedAt.toUtc().toIso8601String(),
|
||||
'isFavorite': 'false',
|
||||
'duration': '0',
|
||||
if (fields != null) ...fields,
|
||||
};
|
||||
|
||||
return UploadTask(
|
||||
taskId: id,
|
||||
httpRequestMethod: 'POST',
|
||||
url: url,
|
||||
headers: headers,
|
||||
filename: filename,
|
||||
fields: fieldsMap,
|
||||
baseDirectory: baseDirectory,
|
||||
directory: directory,
|
||||
fileField: 'assetData',
|
||||
group: kManualUploadGroup,
|
||||
updates: Updates.statusAndProgress,
|
||||
);
|
||||
Future<void> upload(List<ShareIntentAttachment> files) {
|
||||
return uploadApi.enqueueFiles(files.map((e) => e.path).toList(growable: false));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
// ignore_for_file: public_member_api_docs, sort_constructors_first
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/extensions/string_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/backup.repository.dart';
|
||||
import 'package:immich_mobile/platform/upload_api.g.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/user.provider.dart';
|
||||
import 'package:immich_mobile/services/upload.service.dart';
|
||||
@@ -196,6 +194,9 @@ final driftBackupProvider = StateNotifierProvider<DriftBackupNotifier, DriftBack
|
||||
});
|
||||
|
||||
class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
late final StreamSubscription<UploadApiTaskStatus> _taskStatusStream;
|
||||
late final StreamSubscription<UploadApiTaskProgress> _taskProgressStream;
|
||||
|
||||
DriftBackupNotifier(this._uploadService)
|
||||
: super(
|
||||
const DriftBackupState(
|
||||
@@ -211,17 +212,20 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
error: BackupError.none,
|
||||
),
|
||||
) {
|
||||
{
|
||||
_uploadService.taskStatusStream.listen(_handleTaskStatusUpdate);
|
||||
_uploadService.taskProgressStream.listen(_handleTaskProgressUpdate);
|
||||
}
|
||||
_taskStatusStream = _uploadService.taskStatusStream.listen(_handleTaskStatusUpdate);
|
||||
_taskProgressStream = _uploadService.taskProgressStream.listen(_handleTaskProgressUpdate);
|
||||
}
|
||||
|
||||
final UploadService _uploadService;
|
||||
StreamSubscription<TaskStatusUpdate>? _statusSubscription;
|
||||
StreamSubscription<TaskProgressUpdate>? _progressSubscription;
|
||||
final _logger = Logger("DriftBackupNotifier");
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
unawaited(_taskStatusStream.cancel());
|
||||
unawaited(_taskProgressStream.cancel());
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
/// Remove upload item from state
|
||||
void _removeUploadItem(String taskId) {
|
||||
if (state.uploadItems.containsKey(taskId)) {
|
||||
@@ -231,16 +235,12 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
}
|
||||
}
|
||||
|
||||
void _handleTaskStatusUpdate(TaskStatusUpdate update) {
|
||||
final taskId = update.task.taskId;
|
||||
void _handleTaskStatusUpdate(UploadApiTaskStatus update) {
|
||||
final taskId = update.id;
|
||||
|
||||
switch (update.status) {
|
||||
case TaskStatus.complete:
|
||||
if (update.task.group == kBackupGroup) {
|
||||
if (update.responseStatusCode == 201) {
|
||||
state = state.copyWith(backupCount: state.backupCount + 1, remainderCount: state.remainderCount - 1);
|
||||
}
|
||||
}
|
||||
case UploadApiStatus.uploadComplete:
|
||||
state = state.copyWith(backupCount: state.backupCount + 1, remainderCount: state.remainderCount - 1);
|
||||
|
||||
// Remove the completed task from the upload items
|
||||
if (state.uploadItems.containsKey(taskId)) {
|
||||
@@ -249,40 +249,45 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
});
|
||||
}
|
||||
|
||||
case TaskStatus.failed:
|
||||
// Ignore retry errors to avoid confusing users
|
||||
if (update.exception?.description == 'Delayed or retried enqueue failed') {
|
||||
_removeUploadItem(taskId);
|
||||
return;
|
||||
}
|
||||
|
||||
case UploadApiStatus.uploadFailed:
|
||||
final currentItem = state.uploadItems[taskId];
|
||||
if (currentItem == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String? error;
|
||||
final exception = update.exception;
|
||||
if (exception != null && exception is TaskHttpException) {
|
||||
final message = tryJsonDecode(exception.description)?['message'] as String?;
|
||||
if (message != null) {
|
||||
final responseCode = exception.httpResponseCode;
|
||||
error = "${exception.exceptionType}, response code $responseCode: $message";
|
||||
}
|
||||
}
|
||||
error ??= update.exception?.toString();
|
||||
// String? error;
|
||||
// final exception = update.exception;
|
||||
// if (exception != null && exception is TaskHttpException) {
|
||||
// final message = tryJsonDecode(exception.description)?['message'] as String?;
|
||||
// if (message != null) {
|
||||
// final responseCode = exception.httpResponseCode;
|
||||
// error = "${exception.exceptionType}, response code $responseCode: $message";
|
||||
// }
|
||||
// }
|
||||
// error ??= update.exception?.toString();
|
||||
|
||||
// state = state.copyWith(
|
||||
// uploadItems: {
|
||||
// ...state.uploadItems,
|
||||
// taskId: currentItem.copyWith(isFailed: true, error: error),
|
||||
// },
|
||||
// );
|
||||
// _logger.fine("Upload failed for taskId: $taskId, exception: ${update.exception}");
|
||||
break;
|
||||
|
||||
case UploadApiStatus.uploadPending:
|
||||
state = state.copyWith(
|
||||
uploadItems: {
|
||||
...state.uploadItems,
|
||||
taskId: currentItem.copyWith(isFailed: true, error: error),
|
||||
taskId: DriftUploadStatus(
|
||||
taskId: update.id.toString(),
|
||||
filename: update.filename,
|
||||
fileSize: 0,
|
||||
networkSpeedAsString: "",
|
||||
progress: 0.0,
|
||||
),
|
||||
},
|
||||
);
|
||||
_logger.fine("Upload failed for taskId: $taskId, exception: ${update.exception}");
|
||||
break;
|
||||
|
||||
case TaskStatus.canceled:
|
||||
_removeUploadItem(update.task.taskId);
|
||||
break;
|
||||
|
||||
default:
|
||||
@@ -290,42 +295,21 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
}
|
||||
}
|
||||
|
||||
void _handleTaskProgressUpdate(TaskProgressUpdate update) {
|
||||
final taskId = update.task.taskId;
|
||||
final filename = update.task.displayName;
|
||||
void _handleTaskProgressUpdate(UploadApiTaskProgress update) {
|
||||
final taskId = update.id;
|
||||
final progress = update.progress;
|
||||
final currentItem = state.uploadItems[taskId];
|
||||
if (currentItem != null) {
|
||||
if (progress == kUploadStatusCanceled) {
|
||||
_removeUploadItem(update.task.taskId);
|
||||
return;
|
||||
}
|
||||
|
||||
state = state.copyWith(
|
||||
uploadItems: {
|
||||
...state.uploadItems,
|
||||
taskId: update.hasExpectedFileSize
|
||||
? currentItem.copyWith(
|
||||
progress: progress,
|
||||
fileSize: update.expectedFileSize,
|
||||
networkSpeedAsString: update.networkSpeedAsString,
|
||||
)
|
||||
: currentItem.copyWith(progress: progress),
|
||||
},
|
||||
);
|
||||
|
||||
if (currentItem == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = state.copyWith(
|
||||
uploadItems: {
|
||||
...state.uploadItems,
|
||||
taskId: DriftUploadStatus(
|
||||
taskId: taskId,
|
||||
filename: filename,
|
||||
taskId: currentItem.copyWith(
|
||||
progress: progress,
|
||||
fileSize: update.expectedFileSize,
|
||||
networkSpeedAsString: update.networkSpeedAsString,
|
||||
fileSize: update.totalBytes,
|
||||
networkSpeedAsString: update.speed?.toStringAsFixed(2) ?? "",
|
||||
),
|
||||
},
|
||||
);
|
||||
@@ -350,52 +334,18 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
|
||||
state = state.copyWith(isSyncing: isSyncing);
|
||||
}
|
||||
|
||||
Future<void> startBackup(String userId) {
|
||||
Future<void> startBackup() {
|
||||
state = state.copyWith(error: BackupError.none);
|
||||
return _uploadService.startBackup(userId, _updateEnqueueCount);
|
||||
}
|
||||
|
||||
void _updateEnqueueCount(EnqueueStatus status) {
|
||||
state = state.copyWith(enqueueCount: status.enqueueCount, enqueueTotalCount: status.totalCount);
|
||||
return _uploadService.startBackup();
|
||||
}
|
||||
|
||||
Future<void> cancel() async {
|
||||
dPrint(() => "Canceling backup tasks...");
|
||||
state = state.copyWith(enqueueCount: 0, enqueueTotalCount: 0, isCanceling: true, error: BackupError.none);
|
||||
|
||||
final activeTaskCount = await _uploadService.cancelBackup();
|
||||
|
||||
if (activeTaskCount > 0) {
|
||||
dPrint(() => "$activeTaskCount tasks left, continuing to cancel...");
|
||||
await cancel();
|
||||
} else {
|
||||
dPrint(() => "All tasks canceled successfully.");
|
||||
// Clear all upload items when cancellation is complete
|
||||
state = state.copyWith(isCanceling: false, uploadItems: {});
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> handleBackupResume(String userId) async {
|
||||
_logger.info("Resuming backup tasks...");
|
||||
state = state.copyWith(error: BackupError.none);
|
||||
final tasks = await _uploadService.getActiveTasks(kBackupGroup);
|
||||
_logger.info("Found ${tasks.length} tasks");
|
||||
|
||||
if (tasks.isEmpty) {
|
||||
// Start a new backup queue
|
||||
_logger.info("Start a new backup queue");
|
||||
return startBackup(userId);
|
||||
}
|
||||
|
||||
_logger.info("Tasks to resume: ${tasks.length}");
|
||||
return _uploadService.resumeBackup();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_statusSubscription?.cancel();
|
||||
_progressSubscription?.cancel();
|
||||
super.dispose();
|
||||
await _uploadService.cancelBackup();
|
||||
dPrint(() => "All tasks canceled successfully.");
|
||||
state = state.copyWith(isCanceling: false, uploadItems: {});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import 'package:immich_mobile/platform/background_worker_lock_api.g.dart';
|
||||
import 'package:immich_mobile/platform/connectivity_api.g.dart';
|
||||
import 'package:immich_mobile/platform/native_sync_api.g.dart';
|
||||
import 'package:immich_mobile/platform/thumbnail_api.g.dart';
|
||||
import 'package:immich_mobile/platform/upload_api.g.dart';
|
||||
|
||||
final backgroundWorkerFgServiceProvider = Provider((_) => BackgroundWorkerFgService(BackgroundWorkerFgHostApi()));
|
||||
|
||||
@@ -17,3 +18,5 @@ final nativeSyncApiProvider = Provider<NativeSyncApi>((_) => NativeSyncApi());
|
||||
final connectivityApiProvider = Provider<ConnectivityApi>((_) => ConnectivityApi());
|
||||
|
||||
final thumbnailApi = ThumbnailApi();
|
||||
|
||||
final uploadApi = UploadApi();
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:cancellation_token_http/http.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:immich_mobile/utils/debug_print.dart';
|
||||
|
||||
class UploadTaskWithFile {
|
||||
final File file;
|
||||
final UploadTask task;
|
||||
|
||||
const UploadTaskWithFile({required this.file, required this.task});
|
||||
}
|
||||
|
||||
final uploadRepositoryProvider = Provider((ref) => UploadRepository());
|
||||
|
||||
class UploadRepository {
|
||||
void Function(TaskStatusUpdate)? onUploadStatus;
|
||||
void Function(TaskProgressUpdate)? onTaskProgress;
|
||||
|
||||
UploadRepository() {
|
||||
FileDownloader().registerCallbacks(
|
||||
group: kBackupGroup,
|
||||
taskStatusCallback: (update) => onUploadStatus?.call(update),
|
||||
taskProgressCallback: (update) => onTaskProgress?.call(update),
|
||||
);
|
||||
FileDownloader().registerCallbacks(
|
||||
group: kBackupLivePhotoGroup,
|
||||
taskStatusCallback: (update) => onUploadStatus?.call(update),
|
||||
taskProgressCallback: (update) => onTaskProgress?.call(update),
|
||||
);
|
||||
FileDownloader().registerCallbacks(
|
||||
group: kManualUploadGroup,
|
||||
taskStatusCallback: (update) => onUploadStatus?.call(update),
|
||||
taskProgressCallback: (update) => onTaskProgress?.call(update),
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> enqueueBackground(UploadTask task) {
|
||||
return FileDownloader().enqueue(task);
|
||||
}
|
||||
|
||||
Future<List<bool>> enqueueBackgroundAll(List<UploadTask> tasks) {
|
||||
return FileDownloader().enqueueAll(tasks);
|
||||
}
|
||||
|
||||
Future<void> deleteDatabaseRecords(String group) {
|
||||
return FileDownloader().database.deleteAllRecords(group: group);
|
||||
}
|
||||
|
||||
Future<bool> cancelAll(String group) {
|
||||
return FileDownloader().cancelAll(group: group);
|
||||
}
|
||||
|
||||
Future<int> reset(String group) {
|
||||
return FileDownloader().reset(group: group);
|
||||
}
|
||||
|
||||
/// Get a list of tasks that are ENQUEUED or RUNNING
|
||||
Future<List<Task>> getActiveTasks(String group) {
|
||||
return FileDownloader().allTasks(group: group);
|
||||
}
|
||||
|
||||
Future<void> start() {
|
||||
return FileDownloader().start();
|
||||
}
|
||||
|
||||
Future<void> getUploadInfo() async {
|
||||
final [enqueuedTasks, runningTasks, canceledTasks, waitingTasks, pausedTasks] = await Future.wait([
|
||||
FileDownloader().database.allRecordsWithStatus(TaskStatus.enqueued, group: kBackupGroup),
|
||||
FileDownloader().database.allRecordsWithStatus(TaskStatus.running, group: kBackupGroup),
|
||||
FileDownloader().database.allRecordsWithStatus(TaskStatus.canceled, group: kBackupGroup),
|
||||
FileDownloader().database.allRecordsWithStatus(TaskStatus.waitingToRetry, group: kBackupGroup),
|
||||
FileDownloader().database.allRecordsWithStatus(TaskStatus.paused, group: kBackupGroup),
|
||||
]);
|
||||
|
||||
dPrint(
|
||||
() =>
|
||||
"""
|
||||
Upload Info:
|
||||
Enqueued: ${enqueuedTasks.length}
|
||||
Running: ${runningTasks.length}
|
||||
Canceled: ${canceledTasks.length}
|
||||
Waiting: ${waitingTasks.length}
|
||||
Paused: ${pausedTasks.length}
|
||||
""",
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> backupWithDartClient(Iterable<UploadTaskWithFile> tasks, CancellationToken cancelToken) async {
|
||||
final httpClient = Client();
|
||||
final String savedEndpoint = Store.get(StoreKey.serverEndpoint);
|
||||
|
||||
Logger logger = Logger('UploadRepository');
|
||||
for (final candidate in tasks) {
|
||||
if (cancelToken.isCancelled) {
|
||||
logger.warning("Backup was cancelled by the user");
|
||||
break;
|
||||
}
|
||||
|
||||
try {
|
||||
final fileStream = candidate.file.openRead();
|
||||
final assetRawUploadData = MultipartFile(
|
||||
"assetData",
|
||||
fileStream,
|
||||
candidate.file.lengthSync(),
|
||||
filename: candidate.task.filename,
|
||||
);
|
||||
|
||||
final baseRequest = MultipartRequest('POST', Uri.parse('$savedEndpoint/assets'));
|
||||
|
||||
baseRequest.headers.addAll(candidate.task.headers);
|
||||
baseRequest.fields.addAll(candidate.task.fields);
|
||||
baseRequest.files.add(assetRawUploadData);
|
||||
|
||||
final response = await httpClient.send(baseRequest, cancellationToken: cancelToken);
|
||||
|
||||
final responseBody = jsonDecode(await response.stream.bytesToString());
|
||||
|
||||
if (![200, 201].contains(response.statusCode)) {
|
||||
final error = responseBody;
|
||||
|
||||
logger.warning(
|
||||
"Error(${error['statusCode']}) uploading ${candidate.task.filename} | Created on ${candidate.task.fields["fileCreatedAt"]} | ${error['error']}",
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
} on CancelledException {
|
||||
logger.warning("Backup was cancelled by the user");
|
||||
break;
|
||||
} catch (error, stackTrace) {
|
||||
logger.warning("Error backup asset: ${error.toString()}: $stackTrace");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,502 +1,43 @@
|
||||
import 'dart:async';
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:background_downloader/background_downloader.dart';
|
||||
import 'package:cancellation_token_http/http.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/constants/constants.dart';
|
||||
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
|
||||
import 'package:immich_mobile/domain/models/store.model.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
import 'package:immich_mobile/extensions/platform_extensions.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/backup.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
|
||||
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
|
||||
import 'package:immich_mobile/providers/app_settings.provider.dart';
|
||||
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
|
||||
import 'package:immich_mobile/repositories/asset_media.repository.dart';
|
||||
import 'package:immich_mobile/repositories/upload.repository.dart';
|
||||
import 'package:immich_mobile/services/api.service.dart';
|
||||
import 'package:immich_mobile/services/app_settings.service.dart';
|
||||
import 'package:immich_mobile/utils/debug_print.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:path/path.dart' as p;
|
||||
import 'package:immich_mobile/platform/upload_api.g.dart';
|
||||
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
|
||||
|
||||
final uploadServiceProvider = Provider((ref) {
|
||||
final service = UploadService(
|
||||
ref.watch(uploadRepositoryProvider),
|
||||
ref.watch(backupRepositoryProvider),
|
||||
ref.watch(storageRepositoryProvider),
|
||||
ref.watch(localAssetRepository),
|
||||
ref.watch(appSettingsServiceProvider),
|
||||
ref.watch(assetMediaRepositoryProvider),
|
||||
);
|
||||
|
||||
ref.onDispose(service.dispose);
|
||||
final service = UploadService(ref.watch(backupRepositoryProvider));
|
||||
return service;
|
||||
});
|
||||
|
||||
class UploadService {
|
||||
UploadService(
|
||||
this._uploadRepository,
|
||||
this._backupRepository,
|
||||
this._storageRepository,
|
||||
this._localAssetRepository,
|
||||
this._appSettingsService,
|
||||
this._assetMediaRepository,
|
||||
) {
|
||||
_uploadRepository.onUploadStatus = _onUploadCallback;
|
||||
_uploadRepository.onTaskProgress = _onTaskProgressCallback;
|
||||
}
|
||||
final Stream<UploadApiTaskStatus> taskStatusStream;
|
||||
final Stream<UploadApiTaskProgress> taskProgressStream;
|
||||
UploadService(this._backupRepository) : taskStatusStream = streamStatus(), taskProgressStream = streamProgress();
|
||||
|
||||
final UploadRepository _uploadRepository;
|
||||
final DriftBackupRepository _backupRepository;
|
||||
final StorageRepository _storageRepository;
|
||||
final DriftLocalAssetRepository _localAssetRepository;
|
||||
final AppSettingsService _appSettingsService;
|
||||
final AssetMediaRepository _assetMediaRepository;
|
||||
final Logger _logger = Logger('UploadService');
|
||||
|
||||
final StreamController<TaskStatusUpdate> _taskStatusController = StreamController<TaskStatusUpdate>.broadcast();
|
||||
final StreamController<TaskProgressUpdate> _taskProgressController = StreamController<TaskProgressUpdate>.broadcast();
|
||||
|
||||
Stream<TaskStatusUpdate> get taskStatusStream => _taskStatusController.stream;
|
||||
Stream<TaskProgressUpdate> get taskProgressStream => _taskProgressController.stream;
|
||||
|
||||
bool shouldAbortQueuingTasks = false;
|
||||
|
||||
void _onTaskProgressCallback(TaskProgressUpdate update) {
|
||||
if (!_taskProgressController.isClosed) {
|
||||
_taskProgressController.add(update);
|
||||
}
|
||||
}
|
||||
|
||||
void _onUploadCallback(TaskStatusUpdate update) {
|
||||
if (!_taskStatusController.isClosed) {
|
||||
_taskStatusController.add(update);
|
||||
}
|
||||
_handleTaskStatusUpdate(update);
|
||||
}
|
||||
|
||||
void dispose() {
|
||||
_taskStatusController.close();
|
||||
_taskProgressController.close();
|
||||
}
|
||||
|
||||
Future<List<bool>> enqueueTasks(List<UploadTask> tasks) {
|
||||
return _uploadRepository.enqueueBackgroundAll(tasks);
|
||||
}
|
||||
|
||||
Future<List<Task>> getActiveTasks(String group) {
|
||||
return _uploadRepository.getActiveTasks(group);
|
||||
}
|
||||
|
||||
Future<({int total, int remainder, int processing})> getBackupCounts(String userId) {
|
||||
return _backupRepository.getAllCounts(userId);
|
||||
}
|
||||
|
||||
Future<void> manualBackup(List<LocalAsset> localAssets) async {
|
||||
await _storageRepository.clearCache();
|
||||
List<UploadTask> tasks = [];
|
||||
for (final asset in localAssets) {
|
||||
final task = await getUploadTask(
|
||||
asset,
|
||||
group: kManualUploadGroup,
|
||||
priority: 1, // High priority after upload motion photo part
|
||||
);
|
||||
if (task != null) {
|
||||
tasks.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
if (tasks.isNotEmpty) {
|
||||
await enqueueTasks(tasks);
|
||||
}
|
||||
Future<void> manualBackup(List<LocalAsset> localAssets) {
|
||||
return uploadApi.enqueueAssets(localAssets.map((e) => e.id).toList(growable: false));
|
||||
}
|
||||
|
||||
/// Find backup candidates
|
||||
/// Build the upload tasks
|
||||
/// Enqueue the tasks
|
||||
Future<void> startBackup(String userId, void Function(EnqueueStatus status) onEnqueueTasks) async {
|
||||
await _storageRepository.clearCache();
|
||||
|
||||
shouldAbortQueuingTasks = false;
|
||||
|
||||
final candidates = await _backupRepository.getCandidates(userId);
|
||||
if (candidates.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
const batchSize = 100;
|
||||
int count = 0;
|
||||
for (int i = 0; i < candidates.length; i += batchSize) {
|
||||
if (shouldAbortQueuingTasks) {
|
||||
break;
|
||||
}
|
||||
|
||||
final batch = candidates.skip(i).take(batchSize).toList();
|
||||
List<UploadTask> tasks = [];
|
||||
for (final asset in batch) {
|
||||
final task = await getUploadTask(asset);
|
||||
if (task != null) {
|
||||
tasks.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
if (tasks.isNotEmpty && !shouldAbortQueuingTasks) {
|
||||
count += tasks.length;
|
||||
await enqueueTasks(tasks);
|
||||
|
||||
onEnqueueTasks(EnqueueStatus(enqueueCount: count, totalCount: candidates.length));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> startBackupWithHttpClient(String userId, bool hasWifi, CancellationToken token) async {
|
||||
await _storageRepository.clearCache();
|
||||
|
||||
shouldAbortQueuingTasks = false;
|
||||
|
||||
final candidates = await _backupRepository.getCandidates(userId);
|
||||
if (candidates.isEmpty) {
|
||||
return;
|
||||
}
|
||||
|
||||
const batchSize = 100;
|
||||
for (int i = 0; i < candidates.length; i += batchSize) {
|
||||
if (shouldAbortQueuingTasks || token.isCancelled) {
|
||||
break;
|
||||
}
|
||||
|
||||
final batch = candidates.skip(i).take(batchSize).toList();
|
||||
List<UploadTaskWithFile> tasks = [];
|
||||
for (final asset in batch) {
|
||||
final requireWifi = _shouldRequireWiFi(asset);
|
||||
if (requireWifi && !hasWifi) {
|
||||
_logger.warning('Skipping upload for ${asset.id} because it requires WiFi');
|
||||
continue;
|
||||
}
|
||||
|
||||
final task = await _getUploadTaskWithFile(asset);
|
||||
if (task != null) {
|
||||
tasks.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
if (tasks.isNotEmpty && !shouldAbortQueuingTasks) {
|
||||
await _uploadRepository.backupWithDartClient(tasks, token);
|
||||
}
|
||||
}
|
||||
Future<void> startBackup() async {
|
||||
return uploadApi.refresh();
|
||||
}
|
||||
|
||||
/// Cancel all ongoing uploads and reset the upload queue
|
||||
///
|
||||
/// Return the number of left over tasks in the queue
|
||||
Future<int> cancelBackup() async {
|
||||
shouldAbortQueuingTasks = true;
|
||||
|
||||
await _storageRepository.clearCache();
|
||||
await _uploadRepository.reset(kBackupGroup);
|
||||
await _uploadRepository.deleteDatabaseRecords(kBackupGroup);
|
||||
|
||||
final activeTasks = await _uploadRepository.getActiveTasks(kBackupGroup);
|
||||
return activeTasks.length;
|
||||
}
|
||||
|
||||
Future<void> resumeBackup() {
|
||||
return _uploadRepository.start();
|
||||
}
|
||||
|
||||
void _handleTaskStatusUpdate(TaskStatusUpdate update) async {
|
||||
switch (update.status) {
|
||||
case TaskStatus.complete:
|
||||
unawaited(_handleLivePhoto(update));
|
||||
|
||||
if (CurrentPlatform.isIOS) {
|
||||
try {
|
||||
final path = await update.task.filePath();
|
||||
await File(path).delete();
|
||||
} catch (e) {
|
||||
_logger.severe('Error deleting file path for iOS: $e');
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _handleLivePhoto(TaskStatusUpdate update) async {
|
||||
try {
|
||||
if (update.task.metaData.isEmpty || update.task.metaData == '') {
|
||||
return;
|
||||
}
|
||||
|
||||
final metadata = UploadTaskMetadata.fromJson(update.task.metaData);
|
||||
if (!metadata.isLivePhotos) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (update.responseBody == null || update.responseBody!.isEmpty) {
|
||||
return;
|
||||
}
|
||||
final response = jsonDecode(update.responseBody!);
|
||||
|
||||
final localAsset = await _localAssetRepository.getById(metadata.localAssetId);
|
||||
if (localAsset == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
final uploadTask = await getLivePhotoUploadTask(localAsset, response['id'] as String);
|
||||
|
||||
if (uploadTask == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
await enqueueTasks([uploadTask]);
|
||||
} catch (error, stackTrace) {
|
||||
dPrint(() => "Error handling live photo upload task: $error $stackTrace");
|
||||
}
|
||||
}
|
||||
|
||||
Future<UploadTaskWithFile?> _getUploadTaskWithFile(LocalAsset asset) async {
|
||||
final entity = await _storageRepository.getAssetEntityForAsset(asset);
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final file = await _storageRepository.getFileForAsset(asset.id);
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final originalFileName = entity.isLivePhoto ? p.setExtension(asset.name, p.extension(file.path)) : asset.name;
|
||||
|
||||
String metadata = UploadTaskMetadata(
|
||||
localAssetId: asset.id,
|
||||
isLivePhotos: entity.isLivePhoto,
|
||||
livePhotoVideoId: '',
|
||||
).toJson();
|
||||
|
||||
return UploadTaskWithFile(
|
||||
file: file,
|
||||
task: await buildUploadTask(
|
||||
file,
|
||||
createdAt: asset.createdAt,
|
||||
modifiedAt: asset.updatedAt,
|
||||
originalFileName: originalFileName,
|
||||
deviceAssetId: asset.id,
|
||||
metadata: metadata,
|
||||
group: "group",
|
||||
priority: 0,
|
||||
isFavorite: asset.isFavorite,
|
||||
requiresWiFi: false,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@visibleForTesting
|
||||
Future<UploadTask?> getUploadTask(LocalAsset asset, {String group = kBackupGroup, int? priority}) async {
|
||||
final entity = await _storageRepository.getAssetEntityForAsset(asset);
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
File? file;
|
||||
|
||||
/// iOS LivePhoto has two files: a photo and a video.
|
||||
/// They are uploaded separately, with video file being upload first, then returned with the assetId
|
||||
/// The assetId is then used as a metadata for the photo file upload task.
|
||||
///
|
||||
/// We implement two separate upload groups for this, the normal one for the video file
|
||||
/// and the higher priority group for the photo file because the video file is already uploaded.
|
||||
///
|
||||
/// The cancel operation will only cancel the video group (normal group), the photo group will not
|
||||
/// be touched, as the video file is already uploaded.
|
||||
|
||||
if (entity.isLivePhoto) {
|
||||
file = await _storageRepository.getMotionFileForAsset(asset);
|
||||
} else {
|
||||
file = await _storageRepository.getFileForAsset(asset.id);
|
||||
}
|
||||
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final fileName = await _assetMediaRepository.getOriginalFilename(asset.id) ?? asset.name;
|
||||
final originalFileName = entity.isLivePhoto ? p.setExtension(fileName, p.extension(file.path)) : fileName;
|
||||
|
||||
String metadata = UploadTaskMetadata(
|
||||
localAssetId: asset.id,
|
||||
isLivePhotos: entity.isLivePhoto,
|
||||
livePhotoVideoId: '',
|
||||
).toJson();
|
||||
|
||||
final requiresWiFi = _shouldRequireWiFi(asset);
|
||||
|
||||
return buildUploadTask(
|
||||
file,
|
||||
createdAt: asset.createdAt,
|
||||
modifiedAt: asset.updatedAt,
|
||||
originalFileName: originalFileName,
|
||||
deviceAssetId: asset.id,
|
||||
metadata: metadata,
|
||||
group: group,
|
||||
priority: priority,
|
||||
isFavorite: asset.isFavorite,
|
||||
requiresWiFi: requiresWiFi,
|
||||
);
|
||||
}
|
||||
|
||||
@visibleForTesting
|
||||
Future<UploadTask?> getLivePhotoUploadTask(LocalAsset asset, String livePhotoVideoId) async {
|
||||
final entity = await _storageRepository.getAssetEntityForAsset(asset);
|
||||
if (entity == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final file = await _storageRepository.getFileForAsset(asset.id);
|
||||
if (file == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final fields = {'livePhotoVideoId': livePhotoVideoId};
|
||||
|
||||
final requiresWiFi = _shouldRequireWiFi(asset);
|
||||
final originalFileName = await _assetMediaRepository.getOriginalFilename(asset.id) ?? asset.name;
|
||||
|
||||
return buildUploadTask(
|
||||
file,
|
||||
createdAt: asset.createdAt,
|
||||
modifiedAt: asset.updatedAt,
|
||||
originalFileName: originalFileName,
|
||||
deviceAssetId: asset.id,
|
||||
fields: fields,
|
||||
group: kBackupLivePhotoGroup,
|
||||
priority: 0, // Highest priority to get upload immediately
|
||||
isFavorite: asset.isFavorite,
|
||||
requiresWiFi: requiresWiFi,
|
||||
);
|
||||
}
|
||||
|
||||
bool _shouldRequireWiFi(LocalAsset asset) {
|
||||
bool requiresWiFi = true;
|
||||
|
||||
if (asset.isVideo && _appSettingsService.getSetting(AppSettingsEnum.useCellularForUploadVideos)) {
|
||||
requiresWiFi = false;
|
||||
} else if (!asset.isVideo && _appSettingsService.getSetting(AppSettingsEnum.useCellularForUploadPhotos)) {
|
||||
requiresWiFi = false;
|
||||
}
|
||||
|
||||
return requiresWiFi;
|
||||
}
|
||||
|
||||
Future<UploadTask> buildUploadTask(
|
||||
File file, {
|
||||
required String group,
|
||||
required DateTime createdAt,
|
||||
required DateTime modifiedAt,
|
||||
Map<String, String>? fields,
|
||||
String? originalFileName,
|
||||
String? deviceAssetId,
|
||||
String? metadata,
|
||||
int? priority,
|
||||
bool? isFavorite,
|
||||
bool requiresWiFi = true,
|
||||
}) async {
|
||||
final serverEndpoint = Store.get(StoreKey.serverEndpoint);
|
||||
final url = Uri.parse('$serverEndpoint/assets').toString();
|
||||
final headers = ApiService.getRequestHeaders();
|
||||
final deviceId = Store.get(StoreKey.deviceId);
|
||||
final (baseDirectory, directory, filename) = await Task.split(filePath: file.path);
|
||||
final fieldsMap = {
|
||||
'filename': originalFileName ?? filename,
|
||||
'deviceAssetId': deviceAssetId ?? '',
|
||||
'deviceId': deviceId,
|
||||
'fileCreatedAt': createdAt.toUtc().toIso8601String(),
|
||||
'fileModifiedAt': modifiedAt.toUtc().toIso8601String(),
|
||||
'isFavorite': isFavorite?.toString() ?? 'false',
|
||||
'duration': '0',
|
||||
if (fields != null) ...fields,
|
||||
};
|
||||
|
||||
return UploadTask(
|
||||
taskId: deviceAssetId,
|
||||
displayName: originalFileName ?? filename,
|
||||
httpRequestMethod: 'POST',
|
||||
url: url,
|
||||
headers: headers,
|
||||
filename: filename,
|
||||
fields: fieldsMap,
|
||||
baseDirectory: baseDirectory,
|
||||
directory: directory,
|
||||
fileField: 'assetData',
|
||||
metaData: metadata ?? '',
|
||||
group: group,
|
||||
requiresWiFi: requiresWiFi,
|
||||
priority: priority ?? 5,
|
||||
updates: Updates.statusAndProgress,
|
||||
retries: 3,
|
||||
);
|
||||
Future<void> cancelBackup() {
|
||||
return uploadApi.cancelAll();
|
||||
}
|
||||
}
|
||||
|
||||
class UploadTaskMetadata {
|
||||
final String localAssetId;
|
||||
final bool isLivePhotos;
|
||||
final String livePhotoVideoId;
|
||||
|
||||
const UploadTaskMetadata({required this.localAssetId, required this.isLivePhotos, required this.livePhotoVideoId});
|
||||
|
||||
UploadTaskMetadata copyWith({String? localAssetId, bool? isLivePhotos, String? livePhotoVideoId}) {
|
||||
return UploadTaskMetadata(
|
||||
localAssetId: localAssetId ?? this.localAssetId,
|
||||
isLivePhotos: isLivePhotos ?? this.isLivePhotos,
|
||||
livePhotoVideoId: livePhotoVideoId ?? this.livePhotoVideoId,
|
||||
);
|
||||
}
|
||||
|
||||
Map<String, dynamic> toMap() {
|
||||
return <String, dynamic>{
|
||||
'localAssetId': localAssetId,
|
||||
'isLivePhotos': isLivePhotos,
|
||||
'livePhotoVideoId': livePhotoVideoId,
|
||||
};
|
||||
}
|
||||
|
||||
factory UploadTaskMetadata.fromMap(Map<String, dynamic> map) {
|
||||
return UploadTaskMetadata(
|
||||
localAssetId: map['localAssetId'] as String,
|
||||
isLivePhotos: map['isLivePhotos'] as bool,
|
||||
livePhotoVideoId: map['livePhotoVideoId'] as String,
|
||||
);
|
||||
}
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
|
||||
factory UploadTaskMetadata.fromJson(String source) =>
|
||||
UploadTaskMetadata.fromMap(json.decode(source) as Map<String, dynamic>);
|
||||
|
||||
@override
|
||||
String toString() =>
|
||||
'UploadTaskMetadata(localAssetId: $localAssetId, isLivePhotos: $isLivePhotos, livePhotoVideoId: $livePhotoVideoId)';
|
||||
|
||||
@override
|
||||
bool operator ==(covariant UploadTaskMetadata other) {
|
||||
if (identical(this, other)) return true;
|
||||
|
||||
return other.localAssetId == localAssetId &&
|
||||
other.isLivePhotos == isLivePhotos &&
|
||||
other.livePhotoVideoId == livePhotoVideoId;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => localAssetId.hashCode ^ isLivePhotos.hashCode ^ livePhotoVideoId.hashCode;
|
||||
}
|
||||
|
||||
@@ -39,20 +39,6 @@ void configureFileDownloaderNotifications() {
|
||||
complete: TaskNotification('download_finished'.t(), '${'file_name'.t()}: {filename}'),
|
||||
progressBar: true,
|
||||
);
|
||||
|
||||
FileDownloader().configureNotificationForGroup(
|
||||
kManualUploadGroup,
|
||||
running: TaskNotification('uploading_media'.t(), 'backup_background_service_in_progress_notification'.t()),
|
||||
complete: TaskNotification('upload_finished'.t(), 'backup_background_service_complete_notification'.t()),
|
||||
groupNotificationId: kManualUploadGroup,
|
||||
);
|
||||
|
||||
FileDownloader().configureNotificationForGroup(
|
||||
kBackupGroup,
|
||||
running: TaskNotification('uploading_media'.t(), 'backup_background_service_in_progress_notification'.t()),
|
||||
complete: TaskNotification('upload_finished'.t(), 'backup_background_service_complete_notification'.t()),
|
||||
groupNotificationId: kBackupGroup,
|
||||
);
|
||||
}
|
||||
|
||||
abstract final class Bootstrap {
|
||||
|
||||
@@ -11,11 +11,13 @@ pigeon:
|
||||
dart run pigeon --input pigeon/background_worker_api.dart
|
||||
dart run pigeon --input pigeon/background_worker_lock_api.dart
|
||||
dart run pigeon --input pigeon/connectivity_api.dart
|
||||
dart run pigeon --input pigeon/upload_api.dart
|
||||
dart format lib/platform/native_sync_api.g.dart
|
||||
dart format lib/platform/thumbnail_api.g.dart
|
||||
dart format lib/platform/background_worker_api.g.dart
|
||||
dart format lib/platform/background_worker_lock_api.g.dart
|
||||
dart format lib/platform/connectivity_api.g.dart
|
||||
dart format lib/platform/upload_api.g.dart
|
||||
|
||||
watch:
|
||||
dart run build_runner watch --delete-conflicting-outputs
|
||||
|
||||
97
mobile/pigeon/upload_api.dart
Normal file
97
mobile/pigeon/upload_api.dart
Normal file
@@ -0,0 +1,97 @@
|
||||
import 'package:pigeon/pigeon.dart';
|
||||
|
||||
enum UploadApiErrorCode {
|
||||
unknown("An unknown error occurred"),
|
||||
assetNotFound("Asset not found"),
|
||||
fileNotFound("File not found"),
|
||||
resourceNotFound("Resource not found"),
|
||||
invalidResource("Invalid resource"),
|
||||
encodingFailed("Encoding failed"),
|
||||
writeFailed("Write failed"),
|
||||
notEnoughSpace("Not enough space"),
|
||||
networkError("Network error"),
|
||||
photosInternalError("Apple Photos internal error"),
|
||||
photosUnknownError("Apple Photos unknown error"),
|
||||
noServerUrl("Server URL is not set"),
|
||||
noDeviceId("Device ID is not set"),
|
||||
noAccessToken("Access token is not set"),
|
||||
interrupted("Upload interrupted"),
|
||||
cancelled("Upload cancelled"),
|
||||
downloadStalled("Download stalled"),
|
||||
forceQuit("App was force quit"),
|
||||
outOfResources("Out of resources"),
|
||||
backgroundUpdatesDisabled("Background updates are disabled"),
|
||||
uploadTimeout("Upload timed out"),
|
||||
iCloudRateLimit("iCloud rate limit reached"),
|
||||
iCloudThrottled("iCloud requests are being throttled");
|
||||
|
||||
final String message;
|
||||
|
||||
const UploadApiErrorCode(this.message);
|
||||
}
|
||||
|
||||
enum UploadApiStatus {
|
||||
downloadPending,
|
||||
downloadQueued,
|
||||
downloadFailed,
|
||||
uploadPending,
|
||||
uploadQueued,
|
||||
uploadFailed,
|
||||
uploadComplete,
|
||||
uploadSkipped,
|
||||
}
|
||||
|
||||
class UploadApiTaskStatus {
|
||||
final String id;
|
||||
final String filename;
|
||||
final UploadApiStatus status;
|
||||
final UploadApiErrorCode? errorCode;
|
||||
final int? httpStatusCode;
|
||||
|
||||
const UploadApiTaskStatus(this.id, this.filename, this.status, this.errorCode, this.httpStatusCode);
|
||||
}
|
||||
|
||||
class UploadApiTaskProgress {
|
||||
final String id;
|
||||
final double progress;
|
||||
final double? speed;
|
||||
final int? totalBytes;
|
||||
|
||||
const UploadApiTaskProgress(this.id, this.progress, this.speed, this.totalBytes);
|
||||
}
|
||||
|
||||
@ConfigurePigeon(
|
||||
PigeonOptions(
|
||||
dartOut: 'lib/platform/upload_api.g.dart',
|
||||
swiftOut: 'ios/Runner/Upload/UploadTask.g.swift',
|
||||
swiftOptions: SwiftOptions(includeErrorClass: false),
|
||||
kotlinOut: 'android/app/src/main/kotlin/app/alextran/immich/upload/UploadTask.g.kt',
|
||||
kotlinOptions: KotlinOptions(package: 'app.alextran.immich.upload'),
|
||||
dartOptions: DartOptions(),
|
||||
dartPackageName: 'immich_mobile',
|
||||
),
|
||||
)
|
||||
@HostApi()
|
||||
abstract class UploadApi {
|
||||
@async
|
||||
void initialize();
|
||||
|
||||
@async
|
||||
void refresh();
|
||||
|
||||
@async
|
||||
void cancelAll();
|
||||
|
||||
@async
|
||||
void enqueueAssets(List<String> localIds);
|
||||
|
||||
@async
|
||||
void enqueueFiles(List<String> paths);
|
||||
}
|
||||
|
||||
@EventChannelApi()
|
||||
abstract class UploadFlutterApi {
|
||||
UploadApiTaskStatus streamStatus();
|
||||
|
||||
UploadApiTaskProgress streamProgress();
|
||||
}
|
||||
5
mobile/test/drift/main/generated/schema.dart
generated
5
mobile/test/drift/main/generated/schema.dart
generated
@@ -16,6 +16,7 @@ import 'schema_v10.dart' as v10;
|
||||
import 'schema_v11.dart' as v11;
|
||||
import 'schema_v12.dart' as v12;
|
||||
import 'schema_v13.dart' as v13;
|
||||
import 'schema_v14.dart' as v14;
|
||||
|
||||
class GeneratedHelper implements SchemaInstantiationHelper {
|
||||
@override
|
||||
@@ -47,10 +48,12 @@ class GeneratedHelper implements SchemaInstantiationHelper {
|
||||
return v12.DatabaseAtV12(db);
|
||||
case 13:
|
||||
return v13.DatabaseAtV13(db);
|
||||
case 14:
|
||||
return v14.DatabaseAtV14(db);
|
||||
default:
|
||||
throw MissingSchemaException(version, versions);
|
||||
}
|
||||
}
|
||||
|
||||
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13];
|
||||
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14];
|
||||
}
|
||||
|
||||
8800
mobile/test/drift/main/generated/schema_v14.dart
generated
Normal file
8800
mobile/test/drift/main/generated/schema_v14.dart
generated
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user