Use remotedb for trash
This commit is contained in:
@@ -15,7 +15,7 @@ import 'package:photos/db/collections_db.dart';
|
||||
import 'package:photos/db/files_db.dart';
|
||||
import "package:photos/db/memories_db.dart";
|
||||
import "package:photos/db/ml/db.dart";
|
||||
import 'package:photos/db/trash_db.dart';
|
||||
import "package:photos/db/remote/table/trash.dart";
|
||||
import 'package:photos/db/upload_locks_db.dart';
|
||||
import "package:photos/events/endpoint_updated_event.dart";
|
||||
import 'package:photos/events/signed_in_event.dart';
|
||||
@@ -23,6 +23,7 @@ import 'package:photos/events/user_logged_out_event.dart';
|
||||
import 'package:photos/models/api/user/key_attributes.dart';
|
||||
import 'package:photos/models/api/user/key_gen_result.dart';
|
||||
import 'package:photos/models/api/user/private_key_attributes.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import 'package:photos/services/favorites_service.dart';
|
||||
import "package:photos/services/home_widget_service.dart";
|
||||
@@ -195,7 +196,7 @@ class Configuration {
|
||||
|
||||
await UploadLocksDB.instance.clearTable();
|
||||
await IgnoredFilesService.instance.reset();
|
||||
await TrashDB.instance.clearTable();
|
||||
await remoteDB.clearTrash();
|
||||
unawaited(HomeWidgetService.instance.clearWidget(autoLogout));
|
||||
if (!autoLogout) {
|
||||
// Following services won't be initialized if it's the case of autoLogout
|
||||
|
||||
@@ -14,10 +14,10 @@ import "package:photos/models/file/remote/asset.dart";
|
||||
import "package:sqlite_async/sqlite_async.dart";
|
||||
|
||||
// ignore: constant_identifier_names
|
||||
enum RemoteTable { collections, collection_files, files, entities }
|
||||
enum RemoteTable { collections, collection_files, files, entities, trash }
|
||||
|
||||
class RemoteDB with SqlDbBase {
|
||||
static const _databaseName = "remotex2.db";
|
||||
static const _databaseName = "remotex4.db";
|
||||
static const _batchInsertMaxCount = 1000;
|
||||
late final SqliteDatabase _sqliteDB;
|
||||
|
||||
@@ -55,7 +55,7 @@ class RemoteDB with SqlDbBase {
|
||||
return result;
|
||||
}
|
||||
|
||||
Future<List<RemoteAsset>> getAllFiles() async {
|
||||
Future<List<RemoteAsset>> getRemoteAssets() async {
|
||||
final result = <RemoteAsset>[];
|
||||
final cursor = await _sqliteDB.getAll("SELECT * FROM files");
|
||||
for (final row in cursor) {
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import "dart:typed_data";
|
||||
|
||||
import "package:photos/models/api/diff/diff.dart";
|
||||
import "package:photos/models/api/diff/trash_time.dart";
|
||||
import "package:photos/models/file/file.dart";
|
||||
import "package:photos/models/file/remote/asset.dart";
|
||||
import "package:photos/models/file/remote/collection_file.dart";
|
||||
import "package:photos/models/file/remote/rl_mapping.dart";
|
||||
|
||||
RemoteAsset fromRow(Map<String, dynamic> row) {
|
||||
@@ -28,3 +33,23 @@ RLMapping rowToUploadLocalMapping(Map<String, Object?> row) {
|
||||
MappingTypeExtension.fromName(row['local_mapping_src'] as String),
|
||||
);
|
||||
}
|
||||
|
||||
EnteFile trashRowToEnteFile(Map<String, Object?> row) {
|
||||
final RemoteAsset asset = fromRow(row);
|
||||
final TrashTime time = TrashTime(
|
||||
createdAt: row['created_at'] as int,
|
||||
updatedAt: row['updated_at'] as int,
|
||||
deleteBy: row['delete_by'] as int,
|
||||
);
|
||||
final cf = CollectionFile(
|
||||
fileID: asset.id,
|
||||
collectionID: row['collection_id'] as int,
|
||||
encFileKey: row['enc_key'] as Uint8List,
|
||||
encFileKeyNonce: row['enc_key_nonce'] as Uint8List,
|
||||
updatedAt: time.updatedAt,
|
||||
createdAt: time.createdAt,
|
||||
);
|
||||
final file = EnteFile.fromRemoteAsset(asset, cf);
|
||||
file.trashTime = time;
|
||||
return file;
|
||||
}
|
||||
|
||||
@@ -34,7 +34,13 @@ final String filesUpdateColumns = filesColumns
|
||||
.join(', ');
|
||||
|
||||
const trashedFilesColumns =
|
||||
'id, owner_id, file_header, thumb_header, metadata, priv_metadata, pub_metadata, info, trash_data';
|
||||
'id, owner_id, collection_id, enc_key,enc_key_nonce, file_header, thumb_header, metadata, priv_metadata, pub_metadata, info, created_at, updated_at, delete_by';
|
||||
|
||||
final String trashedFilesUpdateColumns = trashedFilesColumns
|
||||
.split(', ')
|
||||
.where((column) => (column != 'id'))
|
||||
.map((column) => '$column = excluded.$column') // Use excluded virtual table
|
||||
.join(', ');
|
||||
|
||||
const uploadLocalMappingColumns =
|
||||
'file_id, local_id, local_clould_id, local_mapping_src';
|
||||
@@ -94,6 +100,24 @@ class RemoteDBMigration {
|
||||
info TEXT
|
||||
)
|
||||
''',
|
||||
'''
|
||||
CREATE TABLE trash (
|
||||
id INTEGER PRIMARY KEY,
|
||||
owner_id INTEGER NOT NULL,
|
||||
collection_id INTEGER NOT NULL,
|
||||
enc_key BLOB NOT NULL,
|
||||
enc_key_nonce BLOB NOT NULL,
|
||||
metadata TEXT NOT NULL,
|
||||
priv_metadata TEXT,
|
||||
pub_metadata TEXT,
|
||||
info TEXT,
|
||||
file_header BLOB NOT NULL,
|
||||
thumb_header BLOB NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
deleted_by INTEGER NOT NULL
|
||||
)
|
||||
''',
|
||||
'''
|
||||
CREATE TRIGGER delete_orphaned_files
|
||||
AFTER DELETE ON collection_files
|
||||
|
||||
49
mobile/lib/db/remote/table/trash.dart
Normal file
49
mobile/lib/db/remote/table/trash.dart
Normal file
@@ -0,0 +1,49 @@
|
||||
import "package:collection/collection.dart";
|
||||
import "package:flutter/foundation.dart";
|
||||
import "package:photos/db/remote/db.dart";
|
||||
import "package:photos/db/remote/mappers.dart";
|
||||
import "package:photos/db/remote/schema.dart";
|
||||
import "package:photos/models/api/diff/diff.dart";
|
||||
import "package:photos/models/file/file.dart";
|
||||
|
||||
extension TrashTable on RemoteDB {
|
||||
Future<void> insertTrashDiffItems(List<DiffFileItem> items) async {
|
||||
if (items.isEmpty) return;
|
||||
final stopwatch = Stopwatch()..start();
|
||||
await Future.forEach(items.slices(1000), (slice) async {
|
||||
final List<List<Object?>> trashRowValues = [];
|
||||
for (final item in slice) {
|
||||
trashRowValues.add(item.trashRowValues());
|
||||
}
|
||||
await Future.wait([
|
||||
sqliteDB.executeBatch(
|
||||
'INSERT INTO trash ($trashedFilesColumns) values(${getParams(14)})',
|
||||
trashRowValues,
|
||||
),
|
||||
]);
|
||||
});
|
||||
debugPrint(
|
||||
'$runtimeType insertCollectionFilesDiff complete in ${stopwatch.elapsed.inMilliseconds}ms for ${items.length}',
|
||||
);
|
||||
}
|
||||
|
||||
// removes the items and returns the number of items removed
|
||||
Future<int> removeTrashItems(List<int> ids) async {
|
||||
if (ids.isEmpty) return 0;
|
||||
final result = await sqliteDB.execute(
|
||||
'DELETE FROM trash WHERE id IN (${ids.join(",")})',
|
||||
);
|
||||
return result.isNotEmpty ? result.first['changes'] as int : 0;
|
||||
}
|
||||
|
||||
Future<List<EnteFile>> getTrashFiles() async {
|
||||
final result = await sqliteDB.execute(
|
||||
'SELECT * FROM trash',
|
||||
);
|
||||
return result.map((e) => trashRowToEnteFile(e)).toList();
|
||||
}
|
||||
|
||||
Future<void> clearTrash() async {
|
||||
await sqliteDB.execute('DELETE FROM trash');
|
||||
}
|
||||
}
|
||||
@@ -1,250 +0,0 @@
|
||||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:path/path.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import 'package:photos/models/file_load_result.dart';
|
||||
import 'package:sqflite/sqflite.dart';
|
||||
|
||||
// The TrashDB doesn't need to flatten and store all attributes of a file.
|
||||
// Before adding any other column, we should evaluate if we need to query on that
|
||||
// column or not while showing trashed items. Even if we miss storing any new attributes,
|
||||
// during restore, all file attributes will be fetched & stored as required.
|
||||
class TrashDB {
|
||||
static const _databaseName = "ente.trash.db";
|
||||
static const _databaseVersion = 1;
|
||||
static final Logger _logger = Logger("TrashDB");
|
||||
static const tableName = 'trash';
|
||||
|
||||
static const columnUploadedFileID = 'uploaded_file_id';
|
||||
static const columnCollectionID = 'collection_id';
|
||||
static const columnOwnerID = 'owner_id';
|
||||
static const columnTrashUpdatedAt = 't_updated_at';
|
||||
static const columnTrashDeleteBy = 't_delete_by';
|
||||
static const columnEncryptedKey = 'encrypted_key';
|
||||
static const columnKeyDecryptionNonce = 'key_decryption_nonce';
|
||||
static const columnFileDecryptionHeader = 'file_decryption_header';
|
||||
static const columnThumbnailDecryptionHeader = 'thumbnail_decryption_header';
|
||||
static const columnUpdationTime = 'updation_time';
|
||||
|
||||
static const columnCreationTime = 'creation_time';
|
||||
static const columnLocalID = 'local_id';
|
||||
|
||||
// standard file metadata, which isn't editable
|
||||
static const columnFileMetadata = 'file_metadata';
|
||||
|
||||
static const columnMMdEncodedJson = 'mmd_encoded_json';
|
||||
static const columnMMdVersion = 'mmd_ver';
|
||||
|
||||
static const columnPubMMdEncodedJson = 'pub_mmd_encoded_json';
|
||||
static const columnPubMMdVersion = 'pub_mmd_ver';
|
||||
|
||||
Future _onCreate(Database db, int version) async {
|
||||
await db.execute(
|
||||
'''
|
||||
CREATE TABLE $tableName (
|
||||
$columnUploadedFileID INTEGER PRIMARY KEY NOT NULL,
|
||||
$columnCollectionID INTEGER NOT NULL,
|
||||
$columnOwnerID INTEGER,
|
||||
$columnTrashUpdatedAt INTEGER NOT NULL,
|
||||
$columnTrashDeleteBy INTEGER NOT NULL,
|
||||
$columnEncryptedKey TEXT,
|
||||
$columnKeyDecryptionNonce TEXT,
|
||||
$columnFileDecryptionHeader TEXT,
|
||||
$columnThumbnailDecryptionHeader TEXT,
|
||||
$columnUpdationTime INTEGER,
|
||||
$columnLocalID TEXT,
|
||||
$columnCreationTime INTEGER NOT NULL,
|
||||
$columnFileMetadata TEXT DEFAULT '{}',
|
||||
$columnMMdEncodedJson TEXT DEFAULT '{}',
|
||||
$columnMMdVersion INTEGER DEFAULT 0,
|
||||
$columnPubMMdEncodedJson TEXT DEFAULT '{}',
|
||||
$columnPubMMdVersion INTEGER DEFAULT 0
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS creation_time_index ON $tableName($columnCreationTime);
|
||||
CREATE INDEX IF NOT EXISTS delete_by_time_index ON $tableName($columnTrashDeleteBy);
|
||||
CREATE INDEX IF NOT EXISTS updated_at_time_index ON $tableName($columnTrashUpdatedAt);
|
||||
''',
|
||||
);
|
||||
}
|
||||
|
||||
TrashDB._privateConstructor();
|
||||
|
||||
static final TrashDB instance = TrashDB._privateConstructor();
|
||||
|
||||
// only have a single app-wide reference to the database
|
||||
static Future<Database>? _dbFuture;
|
||||
|
||||
Future<Database> get database async {
|
||||
// lazily instantiate the db the first time it is accessed
|
||||
_dbFuture ??= _initDatabase();
|
||||
return _dbFuture!;
|
||||
}
|
||||
|
||||
// this opens the database (and creates it if it doesn't exist)
|
||||
Future<Database> _initDatabase() async {
|
||||
final Directory documentsDirectory =
|
||||
await getApplicationDocumentsDirectory();
|
||||
final String path = join(documentsDirectory.path, _databaseName);
|
||||
_logger.info("DB path " + path);
|
||||
return await openDatabase(
|
||||
path,
|
||||
version: _databaseVersion,
|
||||
onCreate: _onCreate,
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> clearTable() async {
|
||||
final db = await instance.database;
|
||||
await db.delete(tableName);
|
||||
}
|
||||
|
||||
Future<int> count() async {
|
||||
final db = await instance.database;
|
||||
final count = Sqflite.firstIntValue(
|
||||
await db.rawQuery('SELECT COUNT(*) FROM $tableName'),
|
||||
);
|
||||
return count ?? 0;
|
||||
}
|
||||
|
||||
Future<void> insertMultiple(List<TrashFile> trashFiles) async {
|
||||
final startTime = DateTime.now();
|
||||
final db = await instance.database;
|
||||
var batch = db.batch();
|
||||
int batchCounter = 0;
|
||||
for (TrashFile trash in trashFiles) {
|
||||
if (batchCounter == 400) {
|
||||
await batch.commit(noResult: true);
|
||||
batch = db.batch();
|
||||
batchCounter = 0;
|
||||
}
|
||||
batch.insert(
|
||||
tableName,
|
||||
_getRowForTrash(trash),
|
||||
conflictAlgorithm: ConflictAlgorithm.replace,
|
||||
);
|
||||
batchCounter++;
|
||||
}
|
||||
await batch.commit(noResult: true);
|
||||
final endTime = DateTime.now();
|
||||
final duration = Duration(
|
||||
microseconds:
|
||||
endTime.microsecondsSinceEpoch - startTime.microsecondsSinceEpoch,
|
||||
);
|
||||
_logger.info(
|
||||
"Batch insert of " +
|
||||
trashFiles.length.toString() +
|
||||
" took " +
|
||||
duration.inMilliseconds.toString() +
|
||||
"ms.",
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> delete(List<int> uploadedFileIDs) async {
|
||||
final db = await instance.database;
|
||||
return db.delete(
|
||||
tableName,
|
||||
where: '$columnUploadedFileID IN (${uploadedFileIDs.join(', ')})',
|
||||
);
|
||||
}
|
||||
|
||||
Future<int> update(TrashFile file) async {
|
||||
final db = await instance.database;
|
||||
return await db.update(
|
||||
tableName,
|
||||
_getRowForTrash(file),
|
||||
where: '$columnUploadedFileID = ?',
|
||||
whereArgs: [file.uploadedFileID],
|
||||
);
|
||||
}
|
||||
|
||||
Future<FileLoadResult> getTrashedFiles(
|
||||
int startTime,
|
||||
int endTime, {
|
||||
int? limit,
|
||||
bool? asc,
|
||||
}) async {
|
||||
final db = await instance.database;
|
||||
final order = (asc ?? false ? 'ASC' : 'DESC');
|
||||
final results = await db.query(
|
||||
tableName,
|
||||
where: '$columnCreationTime >= ? AND $columnCreationTime <= ?',
|
||||
whereArgs: [startTime, endTime],
|
||||
orderBy: '$columnCreationTime ' + order,
|
||||
limit: limit,
|
||||
);
|
||||
final files = _convertToFiles(results);
|
||||
return FileLoadResult(files, files.length == limit);
|
||||
}
|
||||
|
||||
List<TrashFile> _convertToFiles(List<Map<String, dynamic>> results) {
|
||||
final List<TrashFile> trashedFiles = [];
|
||||
for (final result in results) {
|
||||
trashedFiles.add(_getTrashFromRow(result));
|
||||
}
|
||||
return trashedFiles;
|
||||
}
|
||||
|
||||
TrashFile _getTrashFromRow(Map<String, dynamic> row) {
|
||||
final trashFile = TrashFile();
|
||||
trashFile.updateAt = row[columnTrashUpdatedAt];
|
||||
trashFile.deleteBy = row[columnTrashDeleteBy];
|
||||
trashFile.uploadedFileID = row[columnUploadedFileID];
|
||||
// dirty hack to ensure that the file_downloads & cache mechanism works
|
||||
trashFile.generatedID = -1 * trashFile.uploadedFileID!;
|
||||
trashFile.ownerID = row[columnOwnerID];
|
||||
trashFile.collectionID =
|
||||
row[columnCollectionID] == -1 ? null : row[columnCollectionID];
|
||||
trashFile.encryptedKey = row[columnEncryptedKey];
|
||||
trashFile.keyDecryptionNonce = row[columnKeyDecryptionNonce];
|
||||
trashFile.fileDecryptionHeader = row[columnFileDecryptionHeader];
|
||||
trashFile.thumbnailDecryptionHeader = row[columnThumbnailDecryptionHeader];
|
||||
trashFile.updationTime = row[columnUpdationTime] ?? 0;
|
||||
trashFile.creationTime = row[columnCreationTime];
|
||||
final fileMetadata = row[columnFileMetadata] ?? '{}';
|
||||
trashFile.applyMetadata(jsonDecode(fileMetadata));
|
||||
trashFile.localID = row[columnLocalID];
|
||||
|
||||
trashFile.mMdVersion = row[columnMMdVersion] ?? 0;
|
||||
trashFile.mMdEncodedJson = row[columnMMdEncodedJson] ?? '{}';
|
||||
|
||||
trashFile.pubMmdVersion = row[columnPubMMdVersion] ?? 0;
|
||||
trashFile.pubMmdEncodedJson = row[columnPubMMdEncodedJson] ?? '{}';
|
||||
|
||||
if (trashFile.pubMagicMetadata != null &&
|
||||
trashFile.pubMagicMetadata!.editedTime != null) {
|
||||
// override existing creationTime to avoid re-writing all queries related
|
||||
// to loading the gallery
|
||||
row[columnCreationTime] = trashFile.pubMagicMetadata!.editedTime!;
|
||||
}
|
||||
|
||||
return trashFile;
|
||||
}
|
||||
|
||||
Map<String, dynamic> _getRowForTrash(TrashFile trash) {
|
||||
final row = <String, dynamic>{};
|
||||
row[columnTrashUpdatedAt] = trash.updateAt;
|
||||
row[columnTrashDeleteBy] = trash.deleteBy;
|
||||
row[columnUploadedFileID] = trash.uploadedFileID;
|
||||
row[columnCollectionID] = trash.collectionID;
|
||||
row[columnOwnerID] = trash.ownerID;
|
||||
row[columnEncryptedKey] = trash.encryptedKey;
|
||||
row[columnKeyDecryptionNonce] = trash.keyDecryptionNonce;
|
||||
row[columnFileDecryptionHeader] = trash.fileDecryptionHeader;
|
||||
row[columnThumbnailDecryptionHeader] = trash.thumbnailDecryptionHeader;
|
||||
row[columnUpdationTime] = trash.updationTime;
|
||||
|
||||
row[columnLocalID] = trash.localID;
|
||||
row[columnCreationTime] = trash.creationTime;
|
||||
row[columnFileMetadata] = jsonEncode(trash.metadata);
|
||||
|
||||
row[columnMMdVersion] = trash.mMdVersion;
|
||||
row[columnMMdEncodedJson] = trash.mMdEncodedJson ?? '{}';
|
||||
|
||||
row[columnPubMMdVersion] = trash.pubMmdVersion;
|
||||
row[columnPubMMdEncodedJson] = trash.pubMmdEncodedJson ?? '{}';
|
||||
return row;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import "package:photos/core/configuration.dart";
|
||||
import "package:photos/models/file/file.dart";
|
||||
import "package:photos/models/file/file_type.dart";
|
||||
import "package:photos/models/file/trash_file.dart";
|
||||
import "package:photos/services/collections_service.dart";
|
||||
|
||||
extension FilePropsExtn on EnteFile {
|
||||
@@ -42,7 +41,7 @@ extension FilePropsExtn on EnteFile {
|
||||
|
||||
bool get canEditMetaInfo => isUploaded && isOwner;
|
||||
|
||||
bool get isTrash => this is TrashFile;
|
||||
bool get isTrash => trashTime != null;
|
||||
|
||||
// Return true if the file was uploaded via collect photos workflow
|
||||
bool get isCollect => uploaderName != null;
|
||||
|
||||
@@ -6,6 +6,7 @@ import 'package:logging/logging.dart';
|
||||
import 'package:path/path.dart';
|
||||
import 'package:photo_manager/photo_manager.dart';
|
||||
import 'package:photos/core/constants.dart';
|
||||
import "package:photos/models/api/diff/trash_time.dart";
|
||||
import 'package:photos/models/file/file_type.dart';
|
||||
import "package:photos/models/file/remote/asset.dart";
|
||||
import "package:photos/models/file/remote/collection_file.dart";
|
||||
@@ -22,6 +23,7 @@ class EnteFile {
|
||||
AssetEntity? lAsset;
|
||||
RemoteAsset? rAsset;
|
||||
CollectionFile? cf;
|
||||
TrashTime? trashTime;
|
||||
int? generatedID;
|
||||
int? uploadedFileID;
|
||||
int? ownerID;
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import 'package:photos/models/file/file.dart';
|
||||
|
||||
class TrashFile extends EnteFile {
|
||||
// time when file was put in the trash for first time
|
||||
late int createdAt;
|
||||
|
||||
// for non-deleted trash items, updateAt is usually equal to the latest time
|
||||
// when the file was moved to trash
|
||||
late int updateAt;
|
||||
|
||||
// time after which will will be deleted from trash & user's storage usage
|
||||
// will go down
|
||||
late int deleteBy;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import "package:photos/models/api/diff/diff.dart";
|
||||
|
||||
const kIgnoreReasonTrash = "trash";
|
||||
|
||||
@@ -10,21 +10,21 @@ class IgnoredFile {
|
||||
|
||||
IgnoredFile(this.localID, this.title, this.deviceFolder, this.reason);
|
||||
|
||||
static fromTrashItem(TrashFile? trashFile) {
|
||||
if (trashFile == null) return null;
|
||||
if (trashFile.localID == null ||
|
||||
trashFile.localID!.isEmpty ||
|
||||
trashFile.title == null ||
|
||||
trashFile.title!.isEmpty ||
|
||||
trashFile.deviceFolder == null ||
|
||||
trashFile.deviceFolder!.isEmpty) {
|
||||
static fromTrashItem(DiffFileItem? item) {
|
||||
if (item == null) return null;
|
||||
final fileItem = item.fileItem;
|
||||
if (fileItem.localID == null ||
|
||||
fileItem.localID!.isEmpty ||
|
||||
fileItem.title.isEmpty ||
|
||||
fileItem.deviceFolder == null ||
|
||||
fileItem.deviceFolder!.isEmpty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return IgnoredFile(
|
||||
trashFile.localID,
|
||||
trashFile.title,
|
||||
trashFile.deviceFolder,
|
||||
fileItem.localID,
|
||||
fileItem.title,
|
||||
fileItem.deviceFolder,
|
||||
kIgnoreReasonTrash,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -16,10 +16,10 @@ import "package:photos/services/machine_learning/face_ml/face_recognition_servic
|
||||
import "package:photos/services/magic_cache_service.dart";
|
||||
import "package:photos/services/memories_cache_service.dart";
|
||||
import "package:photos/services/permission/service.dart";
|
||||
import "package:photos/services/remote/assets/remote_cache.dart";
|
||||
import "package:photos/services/remote/cache/remote_cache.dart";
|
||||
import "package:photos/services/smart_memories_service.dart";
|
||||
import "package:photos/services/storage_bonus_service.dart";
|
||||
import "package:photos/services/sync/trash_sync_service.dart";
|
||||
import "package:photos/services/remote/trash_service.dart";
|
||||
import "package:photos/services/update_service.dart";
|
||||
import "package:photos/utils/local_settings.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
@@ -19,7 +19,7 @@ import 'package:photos/db/files_db.dart';
|
||||
import "package:photos/db/local/table/path_config_table.dart";
|
||||
import "package:photos/db/remote/db.dart";
|
||||
import "package:photos/db/remote/table/collection_files.dart";
|
||||
import 'package:photos/db/trash_db.dart';
|
||||
import "package:photos/db/remote/table/trash.dart";
|
||||
import 'package:photos/events/collection_updated_event.dart';
|
||||
import 'package:photos/events/files_updated_event.dart';
|
||||
import 'package:photos/events/force_reload_home_gallery_event.dart';
|
||||
@@ -1806,8 +1806,8 @@ class CollectionsService {
|
||||
data: params,
|
||||
);
|
||||
await _filesDB.insertMultiple(batch);
|
||||
await TrashDB.instance
|
||||
.delete(batch.map((e) => e.uploadedFileID!).toList());
|
||||
await remoteDB
|
||||
.removeTrashItems(batch.map((e) => e.uploadedFileID!).toList());
|
||||
Bus.instance.fire(
|
||||
CollectionUpdatedEvent(toCollectionID, batch, "restore"),
|
||||
);
|
||||
|
||||
@@ -40,9 +40,9 @@ class RemoteCache {
|
||||
|
||||
Future<void> _load() async {
|
||||
if (isLoaded == null) {
|
||||
final assets = await remoteDB.getAllFiles();
|
||||
for (final asset in assets) {
|
||||
remoteAssets[asset.id] = asset;
|
||||
final rAssets = await remoteDB.getRemoteAssets();
|
||||
for (final item in rAssets) {
|
||||
remoteAssets[item.id] = item;
|
||||
}
|
||||
isLoaded = true;
|
||||
}
|
||||
@@ -10,11 +10,11 @@ import "package:photos/core/constants.dart";
|
||||
import "package:photos/log/devlog.dart";
|
||||
import "package:photos/models/api/diff/diff.dart";
|
||||
|
||||
class CollectionFilesService {
|
||||
final Logger _logger = Logger("CollectionFilesService");
|
||||
class RemoteFileDiffService {
|
||||
final Logger _logger = Logger("RemoteFileDiffService");
|
||||
final Dio _enteDio;
|
||||
|
||||
CollectionFilesService(this._enteDio);
|
||||
RemoteFileDiffService(this._enteDio);
|
||||
Future<DiffResult> getCollectionItemsDiff(
|
||||
int collectionID,
|
||||
int sinceTime,
|
||||
@@ -116,59 +116,15 @@ class CollectionFilesService {
|
||||
deletedFiles.add(deletedItem);
|
||||
continue;
|
||||
}
|
||||
|
||||
final Uint8List encFileKey = CryptoUtil.base642bin(item["encryptedKey"]);
|
||||
final Uint8List encFileKeyNonce =
|
||||
CryptoUtil.base642bin(item["keyDecryptionNonce"]);
|
||||
final fileKey =
|
||||
CryptoUtil.decryptSync(encFileKey, collectionKey, encFileKeyNonce);
|
||||
|
||||
final encodedMetadata = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item["metadata"]["encryptedData"]),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item["metadata"]["decryptionHeader"]),
|
||||
final FileItem fileItem = constructFileItem(
|
||||
item,
|
||||
collectionKey,
|
||||
encFileKey,
|
||||
encFileKeyNonce,
|
||||
);
|
||||
final Map<String, dynamic> defaultMeta =
|
||||
jsonDecode(utf8.decode(encodedMetadata));
|
||||
if (!defaultMeta.containsKey('version')) {
|
||||
defaultMeta['version'] = 0;
|
||||
}
|
||||
if (defaultMeta['hash'] == null &&
|
||||
defaultMeta.containsKey('imageHash') &&
|
||||
defaultMeta.containsKey('videoHash')) {
|
||||
// old web version was putting live photo hash in different fields
|
||||
defaultMeta['hash'] =
|
||||
'${defaultMeta['imageHash']}$kLivePhotoHashSeparator${defaultMeta['videoHash']}';
|
||||
}
|
||||
Metadata? pubMagicMetadata;
|
||||
Metadata? privateMagicMetadata;
|
||||
|
||||
if (item['magicMetadata'] != null) {
|
||||
final utfEncodedMmd = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item['magicMetadata']['data']),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item['magicMetadata']['header']),
|
||||
);
|
||||
privateMagicMetadata = Metadata(
|
||||
data: jsonDecode(utf8.decode(utfEncodedMmd)),
|
||||
version: item['magicMetadata']['version'],
|
||||
);
|
||||
}
|
||||
if (item['pubMagicMetadata'] != null) {
|
||||
final utfEncodedMmd = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item['pubMagicMetadata']['data']),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item['pubMagicMetadata']['header']),
|
||||
);
|
||||
pubMagicMetadata = Metadata(
|
||||
data: jsonDecode(utf8.decode(utfEncodedMmd)),
|
||||
version: item['pubMagicMetadata']['version'],
|
||||
);
|
||||
}
|
||||
final String fileDecryptionHeader = item["file"]["decryptionHeader"];
|
||||
final String thumbnailDecryptionHeader =
|
||||
item["thumbnail"]["decryptionHeader"];
|
||||
final Info? info = Info.fromJson(item["info"]);
|
||||
final DiffFileItem file = DiffFileItem(
|
||||
collectionID: collectionID,
|
||||
updatedAt: collectionUpdationTime,
|
||||
@@ -176,17 +132,7 @@ class CollectionFilesService {
|
||||
encFileKeyNonce: encFileKeyNonce,
|
||||
isDeleted: false,
|
||||
createdAt: item["createdAt"] ?? defaultCreatedAt,
|
||||
fileItem: FileItem(
|
||||
fileID: fileID,
|
||||
ownerID: ownerID,
|
||||
thumnailDecryptionHeader:
|
||||
CryptoUtil.base642bin(thumbnailDecryptionHeader),
|
||||
fileDecryotionHeader: CryptoUtil.base642bin(fileDecryptionHeader),
|
||||
metadata: Metadata(data: defaultMeta, version: 0),
|
||||
magicMetadata: privateMagicMetadata,
|
||||
pubMagicMetadata: pubMagicMetadata,
|
||||
info: info,
|
||||
),
|
||||
fileItem: fileItem,
|
||||
);
|
||||
updatedFiles.add(file);
|
||||
}
|
||||
@@ -197,6 +143,87 @@ class CollectionFilesService {
|
||||
latestUpdatedAtTime,
|
||||
);
|
||||
}
|
||||
|
||||
static FileItem constructFileItem(
|
||||
Map<String, dynamic> item,
|
||||
Uint8List collectionKey,
|
||||
Uint8List encFileKey,
|
||||
Uint8List encFileKeyNonce,
|
||||
) {
|
||||
final int fileID = item["id"] as int;
|
||||
final int ownerID = item["ownerID"];
|
||||
|
||||
// Decrypt file key
|
||||
final fileKey =
|
||||
CryptoUtil.decryptSync(encFileKey, collectionKey, encFileKeyNonce);
|
||||
|
||||
// Decrypt and parse metadata
|
||||
final encodedMetadata = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item["metadata"]["encryptedData"]),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item["metadata"]["decryptionHeader"]),
|
||||
);
|
||||
final Map<String, dynamic> defaultMeta =
|
||||
jsonDecode(utf8.decode(encodedMetadata));
|
||||
|
||||
// Apply metadata defaults and fixes
|
||||
if (!defaultMeta.containsKey('version')) {
|
||||
defaultMeta['version'] = 0;
|
||||
}
|
||||
if (defaultMeta['hash'] == null &&
|
||||
defaultMeta.containsKey('imageHash') &&
|
||||
defaultMeta.containsKey('videoHash')) {
|
||||
// old web version was putting live photo hash in different fields
|
||||
defaultMeta['hash'] =
|
||||
'${defaultMeta['imageHash']}$kLivePhotoHashSeparator${defaultMeta['videoHash']}';
|
||||
}
|
||||
|
||||
// Decrypt magic metadata if present
|
||||
Metadata? privateMagicMetadata;
|
||||
if (item['magicMetadata'] != null) {
|
||||
final utfEncodedMmd = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item['magicMetadata']['data']),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item['magicMetadata']['header']),
|
||||
);
|
||||
privateMagicMetadata = Metadata(
|
||||
data: jsonDecode(utf8.decode(utfEncodedMmd)),
|
||||
version: item['magicMetadata']['version'],
|
||||
);
|
||||
}
|
||||
|
||||
// Decrypt public magic metadata if present
|
||||
Metadata? pubMagicMetadata;
|
||||
if (item['pubMagicMetadata'] != null) {
|
||||
final utfEncodedMmd = CryptoUtil.decryptChaChaSync(
|
||||
CryptoUtil.base642bin(item['pubMagicMetadata']['data']),
|
||||
fileKey,
|
||||
CryptoUtil.base642bin(item['pubMagicMetadata']['header']),
|
||||
);
|
||||
pubMagicMetadata = Metadata(
|
||||
data: jsonDecode(utf8.decode(utfEncodedMmd)),
|
||||
version: item['pubMagicMetadata']['version'],
|
||||
);
|
||||
}
|
||||
|
||||
// Extract decryption headers and info
|
||||
final String fileDecryptionHeader = item["file"]["decryptionHeader"];
|
||||
final String thumbnailDecryptionHeader =
|
||||
item["thumbnail"]["decryptionHeader"];
|
||||
final Info? info = Info.fromJson(item["info"]);
|
||||
|
||||
return FileItem(
|
||||
fileID: fileID,
|
||||
ownerID: ownerID,
|
||||
thumnailDecryptionHeader:
|
||||
CryptoUtil.base642bin(thumbnailDecryptionHeader),
|
||||
fileDecryotionHeader: CryptoUtil.base642bin(fileDecryptionHeader),
|
||||
metadata: Metadata(data: defaultMeta, version: 0),
|
||||
magicMetadata: privateMagicMetadata,
|
||||
pubMagicMetadata: pubMagicMetadata,
|
||||
info: info,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class DiffResult {
|
||||
@@ -5,16 +5,16 @@ import "package:photos/events/diff_sync_complete_event.dart";
|
||||
import "package:photos/events/sync_status_update_event.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/services/collections_service.dart";
|
||||
import "package:photos/services/remote/fetch/collection_files.dart";
|
||||
import "package:photos/services/remote/fetch/files_diff.dart";
|
||||
|
||||
class RemoteDiffService {
|
||||
final Logger _logger = Logger('RemoteDiffService');
|
||||
final CollectionsService _collectionsService;
|
||||
final CollectionFilesService collectionFiles;
|
||||
final RemoteFileDiffService filesDiffService;
|
||||
|
||||
RemoteDiffService(
|
||||
this._collectionsService,
|
||||
this.collectionFiles,
|
||||
this.filesDiffService,
|
||||
);
|
||||
|
||||
bool _isExistingSyncSilent = false;
|
||||
@@ -64,7 +64,7 @@ class RemoteDiffService {
|
||||
int currentSinceTime = sinceTime;
|
||||
bool hasMore = true;
|
||||
while (hasMore) {
|
||||
final diff = await collectionFiles.getCollectionItemsDiff(
|
||||
final diff = await filesDiffService.getCollectionItemsDiff(
|
||||
collectionID,
|
||||
currentSinceTime,
|
||||
collectionKey,
|
||||
@@ -6,7 +6,7 @@ import 'package:photos/models/file/file.dart';
|
||||
import "package:photos/models/file/remote/asset.dart";
|
||||
import "package:photos/models/file/remote/collection_file.dart";
|
||||
import "package:photos/services/collections_service.dart";
|
||||
import "package:photos/services/remote/fetch/collection_files.dart";
|
||||
import "package:photos/services/remote/fetch/files_diff.dart";
|
||||
import "package:photos/utils/dialog_util.dart";
|
||||
|
||||
Future<List<EnteFile>> getPublicFiles(
|
||||
@@ -16,7 +16,7 @@ Future<List<EnteFile>> getPublicFiles(
|
||||
) async {
|
||||
try {
|
||||
final collectionFilService =
|
||||
CollectionFilesService(NetworkClient.instance.enteDio);
|
||||
RemoteFileDiffService(NetworkClient.instance.enteDio);
|
||||
bool hasMore = false;
|
||||
final sharedFiles = <EnteFile>[];
|
||||
final headers =
|
||||
|
||||
271
mobile/lib/services/remote/trash_service.dart
Normal file
271
mobile/lib/services/remote/trash_service.dart
Normal file
@@ -0,0 +1,271 @@
|
||||
import 'dart:async';
|
||||
import "dart:math";
|
||||
import "dart:typed_data";
|
||||
|
||||
import 'package:dio/dio.dart';
|
||||
import "package:ente_crypto/ente_crypto.dart";
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:photos/core/constants.dart';
|
||||
import 'package:photos/core/event_bus.dart';
|
||||
import "package:photos/db/remote/table/collection_files.dart";
|
||||
import "package:photos/db/remote/table/trash.dart";
|
||||
import 'package:photos/events/force_reload_trash_page_event.dart';
|
||||
import 'package:photos/events/trash_updated_event.dart';
|
||||
import 'package:photos/extensions/list.dart';
|
||||
import 'package:photos/models/api/collection/trash_item_request.dart';
|
||||
import "package:photos/models/api/diff/diff.dart";
|
||||
import "package:photos/models/api/diff/trash_time.dart";
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/ignored_file.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/services/collections_service.dart";
|
||||
import "package:photos/services/ignored_files_service.dart";
|
||||
import "package:photos/services/remote/fetch/files_diff.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
class TrashSyncService {
|
||||
final _logger = Logger("TrashSyncService");
|
||||
|
||||
static const kLastTrashSyncTime = "last_trash_sync_time_v2";
|
||||
final SharedPreferences _prefs;
|
||||
final Dio _enteDio;
|
||||
|
||||
TrashSyncService(this._prefs, this._enteDio) {
|
||||
_logger.fine("TrashSyncService constructor");
|
||||
}
|
||||
|
||||
Future<void> syncTrash() async {
|
||||
bool hasMore = true;
|
||||
do {
|
||||
final diff = await getTrashFilesDiff(_getSyncTime());
|
||||
bool isLocalTrashUpdated = false;
|
||||
if (diff.trashedFiles.isNotEmpty) {
|
||||
isLocalTrashUpdated = true;
|
||||
await remoteDB.insertTrashDiffItems(diff.trashedFiles);
|
||||
}
|
||||
if (diff.deletedIDs.isNotEmpty || diff.restoredIDs.isNotEmpty) {
|
||||
_logger.fine(
|
||||
"deleting ${diff.deletedIDs.length} deleted items and restoring ${diff.restoredIDs.length} restored items",
|
||||
);
|
||||
final ids = diff.deletedIDs + diff.restoredIDs;
|
||||
final itemsDeleted = await remoteDB.removeTrashItems(ids);
|
||||
isLocalTrashUpdated = isLocalTrashUpdated || itemsDeleted > 0;
|
||||
}
|
||||
|
||||
await _updateIgnoredFiles(diff);
|
||||
if (diff.lastSyncedTimeStamp != 0) {
|
||||
await _setSyncTime(diff.lastSyncedTimeStamp);
|
||||
}
|
||||
if (isLocalTrashUpdated) {
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
}
|
||||
hasMore = diff.hasMore;
|
||||
} while (hasMore);
|
||||
}
|
||||
|
||||
Future<void> _updateIgnoredFiles(TrashDiff diff) async {
|
||||
final ignoredFiles = <IgnoredFile>[];
|
||||
for (DiffFileItem t in diff.trashedFiles) {
|
||||
final file = IgnoredFile.fromTrashItem(t);
|
||||
if (file != null) {
|
||||
ignoredFiles.add(file);
|
||||
}
|
||||
}
|
||||
if (ignoredFiles.isNotEmpty) {
|
||||
_logger.fine('updating ${ignoredFiles.length} ignored files ');
|
||||
await IgnoredFilesService.instance.cacheAndInsert(ignoredFiles);
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> _setSyncTime(int time) async {
|
||||
return _prefs.setInt(kLastTrashSyncTime, time);
|
||||
}
|
||||
|
||||
int _getSyncTime() {
|
||||
return _prefs.getInt(kLastTrashSyncTime) ?? 0;
|
||||
}
|
||||
|
||||
Future<void> trashFilesOnServer(List<TrashRequest> trashRequestItems) async {
|
||||
final includedFileIDs = <int>{};
|
||||
final uniqueItems = <TrashRequest>[];
|
||||
final ownedCollectionIDs =
|
||||
CollectionsService.instance.getAllOwnedCollectionIDs();
|
||||
for (final item in trashRequestItems) {
|
||||
if (!includedFileIDs.contains(item.fileID)) {
|
||||
// Check if the collectionID in the request is owned by the user
|
||||
if (ownedCollectionIDs.contains(item.collectionID)) {
|
||||
uniqueItems.add(item);
|
||||
includedFileIDs.add(item.fileID);
|
||||
} else {
|
||||
// If not owned, use a different owned collectionID
|
||||
bool foundAnotherOwnedCollection = false;
|
||||
final fileCollectionIDs =
|
||||
await remoteDB.getAllCollectionIDsOfFile(item.fileID);
|
||||
|
||||
for (final collectionID in fileCollectionIDs) {
|
||||
if (ownedCollectionIDs.contains(collectionID)) {
|
||||
final newItem = TrashRequest(item.fileID, collectionID);
|
||||
uniqueItems.add(newItem);
|
||||
includedFileIDs.add(item.fileID);
|
||||
foundAnotherOwnedCollection = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!foundAnotherOwnedCollection) {
|
||||
_logger.severe(
|
||||
"File ${item.fileID} is not owned by the user and has no other owned collection",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
final requestData = <String, dynamic>{};
|
||||
final batchedItems = uniqueItems.chunks(batchSize);
|
||||
for (final batch in batchedItems) {
|
||||
requestData["items"] = [];
|
||||
for (final item in batch) {
|
||||
requestData["items"].add(item.toJson());
|
||||
}
|
||||
await _trashFiles(requestData);
|
||||
}
|
||||
await remoteDB.deleteFiles(includedFileIDs.toList());
|
||||
}
|
||||
|
||||
Future<TrashDiff> getTrashFilesDiff(int sinceTime) async {
|
||||
try {
|
||||
final response = await _enteDio.get(
|
||||
"/trash/v2/diff",
|
||||
queryParameters: {
|
||||
"sinceTime": sinceTime,
|
||||
},
|
||||
);
|
||||
int latestUpdatedAtTime = 0;
|
||||
final trashedFiles = <DiffFileItem>[];
|
||||
final deletedUploadIDs = <int>[];
|
||||
final restoredFiles = <int>[];
|
||||
|
||||
final diff = response.data["diff"] as List;
|
||||
final bool hasMore = response.data["hasMore"] as bool;
|
||||
for (final trashItem in diff) {
|
||||
final TrashTime trashTime = TrashTime.fromMap(trashItem);
|
||||
final int id = trashItem["file"]["id"] as int;
|
||||
latestUpdatedAtTime = max(latestUpdatedAtTime, trashTime.updatedAt);
|
||||
if (trashItem["isDeleted"]) {
|
||||
deletedUploadIDs.add(id);
|
||||
continue;
|
||||
}
|
||||
if (trashItem['isRestored']) {
|
||||
restoredFiles.add(id);
|
||||
continue;
|
||||
}
|
||||
|
||||
final item = trashItem["file"];
|
||||
final int collectionID = item["collectionID"];
|
||||
final int cfUpdatedAt = item["updationTime"];
|
||||
|
||||
final Uint8List encFileKey =
|
||||
CryptoUtil.base642bin(item["encryptedKey"]);
|
||||
final Uint8List encFileKeyNonce =
|
||||
CryptoUtil.base642bin(item["keyDecryptionNonce"]);
|
||||
|
||||
final collectionKey =
|
||||
CollectionsService.instance.getCollectionKey(collectionID);
|
||||
|
||||
final fileItem = RemoteFileDiffService.constructFileItem(
|
||||
item,
|
||||
collectionKey,
|
||||
CryptoUtil.base642bin(item["encryptedKey"]),
|
||||
CryptoUtil.base642bin(item["keyDecryptionNonce"]),
|
||||
);
|
||||
final diffItem = DiffFileItem(
|
||||
collectionID: collectionID,
|
||||
updatedAt: cfUpdatedAt,
|
||||
encFileKey: encFileKey,
|
||||
encFileKeyNonce: encFileKeyNonce,
|
||||
isDeleted: false,
|
||||
createdAt: item["createdAt"] ?? DateTime.now().millisecondsSinceEpoch,
|
||||
fileItem: fileItem,
|
||||
trashTime: trashTime,
|
||||
);
|
||||
trashedFiles.add(diffItem);
|
||||
}
|
||||
return TrashDiff(
|
||||
trashedFiles,
|
||||
restoredFiles,
|
||||
deletedUploadIDs,
|
||||
hasMore,
|
||||
latestUpdatedAtTime,
|
||||
);
|
||||
} catch (e, s) {
|
||||
_logger.severe(e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<Response<dynamic>> _trashFiles(
|
||||
Map<String, dynamic> requestData,
|
||||
) async {
|
||||
return _enteDio.post(
|
||||
"/files/trash",
|
||||
data: requestData,
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> deleteFromTrash(List<EnteFile> files) async {
|
||||
final params = <String, dynamic>{};
|
||||
final uniqueFileIds = files.map((e) => e.uploadedFileID!).toSet().toList();
|
||||
final batchedFileIDs = uniqueFileIds.chunks(batchSize);
|
||||
for (final batch in batchedFileIDs) {
|
||||
params["fileIDs"] = [];
|
||||
for (final fileID in batch) {
|
||||
params["fileIDs"].add(fileID);
|
||||
}
|
||||
try {
|
||||
await _enteDio.post(
|
||||
"/trash/delete",
|
||||
data: params,
|
||||
);
|
||||
await remoteDB.removeTrashItems(batch);
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
} catch (e, s) {
|
||||
_logger.severe("failed to delete from trash", e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
// no need to await on syncing trash from remote
|
||||
unawaited(syncTrash());
|
||||
}
|
||||
|
||||
Future<void> emptyTrash() async {
|
||||
final params = <String, dynamic>{};
|
||||
params["lastUpdatedAt"] = _getSyncTime();
|
||||
try {
|
||||
await _enteDio.post(
|
||||
"/trash/empty",
|
||||
data: params,
|
||||
);
|
||||
await remoteDB.clearTrash();
|
||||
unawaited(syncTrash());
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
Bus.instance.fire(ForceReloadTrashPageEvent());
|
||||
} catch (e, s) {
|
||||
_logger.severe("failed to empty trash", e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class TrashDiff {
|
||||
final List<DiffFileItem> trashedFiles;
|
||||
final List<int> restoredIDs;
|
||||
final List<int> deletedIDs;
|
||||
final bool hasMore;
|
||||
final int lastSyncedTimeStamp;
|
||||
TrashDiff(
|
||||
this.trashedFiles,
|
||||
this.restoredIDs,
|
||||
this.deletedIDs,
|
||||
this.hasMore,
|
||||
this.lastSyncedTimeStamp,
|
||||
);
|
||||
}
|
||||
@@ -30,8 +30,8 @@ import 'package:photos/services/ignored_files_service.dart';
|
||||
import "package:photos/services/language_service.dart";
|
||||
import 'package:photos/services/local_file_update_service.dart';
|
||||
import "package:photos/services/notification_service.dart";
|
||||
import "package:photos/services/remote/fetch/collection_files.dart";
|
||||
import "package:photos/services/remote/fetch/diff.dart";
|
||||
import "package:photos/services/remote/fetch/files_diff.dart";
|
||||
import "package:photos/services/remote/fetch/remote_diff.dart";
|
||||
import 'package:photos/services/sync/sync_service.dart';
|
||||
import "package:photos/services/video_preview_service.dart";
|
||||
import 'package:photos/utils/file_uploader.dart';
|
||||
@@ -83,7 +83,7 @@ class RemoteSyncService {
|
||||
_prefs = preferences;
|
||||
newService = RemoteDiffService(
|
||||
_collectionsService,
|
||||
CollectionFilesService(NetworkClient.instance.enteDio),
|
||||
RemoteFileDiffService(NetworkClient.instance.enteDio),
|
||||
);
|
||||
|
||||
Bus.instance.on<LocalPhotosUpdatedEvent>().listen((event) async {
|
||||
@@ -127,7 +127,7 @@ class RemoteSyncService {
|
||||
await queueLocalAssetForUpload();
|
||||
}
|
||||
await _pullDiff();
|
||||
// await trashSyncService.syncTrash();
|
||||
await trashSyncService.syncTrash();
|
||||
if (!hasSyncedBefore) {
|
||||
await _prefs.setBool(_isFirstRemoteSyncDone, true);
|
||||
await queueLocalAssetForUpload();
|
||||
|
||||
@@ -1,320 +0,0 @@
|
||||
import 'dart:async';
|
||||
import "dart:convert";
|
||||
import "dart:math";
|
||||
|
||||
import 'package:dio/dio.dart';
|
||||
import "package:ente_crypto/ente_crypto.dart";
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:photos/core/constants.dart';
|
||||
import 'package:photos/core/event_bus.dart';
|
||||
import "package:photos/db/remote/table/collection_files.dart";
|
||||
import 'package:photos/db/trash_db.dart';
|
||||
import 'package:photos/events/collection_updated_event.dart';
|
||||
import 'package:photos/events/force_reload_trash_page_event.dart';
|
||||
import 'package:photos/events/trash_updated_event.dart';
|
||||
import 'package:photos/extensions/list.dart';
|
||||
import 'package:photos/models/api/collection/trash_item_request.dart';
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import 'package:photos/models/ignored_file.dart';
|
||||
import "package:photos/models/metadata/file_magic.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/services/collections_service.dart";
|
||||
import "package:photos/services/ignored_files_service.dart";
|
||||
import "package:photos/utils/file_key.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
class TrashSyncService {
|
||||
final _logger = Logger("TrashSyncService");
|
||||
|
||||
final _trashDB = TrashDB.instance;
|
||||
static const kLastTrashSyncTime = "last_trash_sync_time";
|
||||
late SharedPreferences _prefs;
|
||||
final Dio _enteDio;
|
||||
|
||||
TrashSyncService(this._prefs, this._enteDio) {
|
||||
_logger.fine("TrashSyncService constructor");
|
||||
}
|
||||
|
||||
void init(SharedPreferences preferences) {
|
||||
_prefs = preferences;
|
||||
}
|
||||
|
||||
Future<void> syncTrash() async {
|
||||
final lastSyncTime = _getSyncTime();
|
||||
bool isLocalTrashUpdated = false;
|
||||
final diff = await getTrashFilesDiff(lastSyncTime);
|
||||
if (diff.trashedFiles.isNotEmpty) {
|
||||
isLocalTrashUpdated = true;
|
||||
_logger.fine("inserting ${diff.trashedFiles.length} items in trash");
|
||||
await _trashDB.insertMultiple(diff.trashedFiles);
|
||||
}
|
||||
if (diff.deletedUploadIDs.isNotEmpty) {
|
||||
_logger.fine("discard ${diff.deletedUploadIDs.length} deleted items");
|
||||
final itemsDeleted = await _trashDB.delete(diff.deletedUploadIDs);
|
||||
isLocalTrashUpdated = isLocalTrashUpdated || itemsDeleted > 0;
|
||||
}
|
||||
if (diff.restoredFiles.isNotEmpty) {
|
||||
_logger.fine("discard ${diff.restoredFiles.length} restored items");
|
||||
final itemsDeleted = await _trashDB
|
||||
.delete(diff.restoredFiles.map((e) => e.uploadedFileID!).toList());
|
||||
isLocalTrashUpdated = isLocalTrashUpdated || itemsDeleted > 0;
|
||||
}
|
||||
|
||||
await _updateIgnoredFiles(diff);
|
||||
|
||||
if (diff.lastSyncedTimeStamp != 0) {
|
||||
await _setSyncTime(diff.lastSyncedTimeStamp);
|
||||
}
|
||||
if (isLocalTrashUpdated) {
|
||||
_logger
|
||||
.fine('local trash updated, fire ${(TrashUpdatedEvent).toString()}');
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
}
|
||||
if (diff.hasMore) {
|
||||
return await syncTrash();
|
||||
} else if (diff.trashedFiles.isNotEmpty ||
|
||||
diff.deletedUploadIDs.isNotEmpty) {
|
||||
Bus.instance.fire(
|
||||
CollectionUpdatedEvent(
|
||||
0,
|
||||
<EnteFile>[],
|
||||
"trash_change",
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _updateIgnoredFiles(TrashDiff diff) async {
|
||||
final ignoredFiles = <IgnoredFile>[];
|
||||
for (TrashFile t in diff.trashedFiles) {
|
||||
final file = IgnoredFile.fromTrashItem(t);
|
||||
if (file != null) {
|
||||
ignoredFiles.add(file);
|
||||
}
|
||||
}
|
||||
if (ignoredFiles.isNotEmpty) {
|
||||
_logger.fine('updating ${ignoredFiles.length} ignored files ');
|
||||
await IgnoredFilesService.instance.cacheAndInsert(ignoredFiles);
|
||||
}
|
||||
}
|
||||
|
||||
Future<bool> _setSyncTime(int time) async {
|
||||
return _prefs.setInt(kLastTrashSyncTime, time);
|
||||
}
|
||||
|
||||
int _getSyncTime() {
|
||||
return _prefs.getInt(kLastTrashSyncTime) ?? 0;
|
||||
}
|
||||
|
||||
Future<void> trashFilesOnServer(List<TrashRequest> trashRequestItems) async {
|
||||
final includedFileIDs = <int>{};
|
||||
final uniqueItems = <TrashRequest>[];
|
||||
final ownedCollectionIDs =
|
||||
CollectionsService.instance.getAllOwnedCollectionIDs();
|
||||
for (final item in trashRequestItems) {
|
||||
if (!includedFileIDs.contains(item.fileID)) {
|
||||
// Check if the collectionID in the request is owned by the user
|
||||
if (ownedCollectionIDs.contains(item.collectionID)) {
|
||||
uniqueItems.add(item);
|
||||
includedFileIDs.add(item.fileID);
|
||||
} else {
|
||||
// If not owned, use a different owned collectionID
|
||||
final bool foundAnotherOwnedCollection = false;
|
||||
// todo: rewrite neeraj
|
||||
// final fileCollectionIDs =
|
||||
// await FilesDB.instance.getAllCollectionIDsOfFile(item.fileID);
|
||||
|
||||
// for (final collectionID in fileCollectionIDs) {
|
||||
// if (ownedCollectionIDs.contains(collectionID)) {
|
||||
// final newItem = TrashRequest(item.fileID, collectionID);
|
||||
// uniqueItems.add(newItem);
|
||||
// includedFileIDs.add(item.fileID);
|
||||
// foundAnotherOwnedCollection = true;
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
if (!foundAnotherOwnedCollection) {
|
||||
_logger.severe(
|
||||
"File ${item.fileID} is not owned by the user and has no other owned collection",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
final requestData = <String, dynamic>{};
|
||||
final batchedItems = uniqueItems.chunks(batchSize);
|
||||
for (final batch in batchedItems) {
|
||||
requestData["items"] = [];
|
||||
for (final item in batch) {
|
||||
requestData["items"].add(item.toJson());
|
||||
}
|
||||
await _trashFiles(requestData);
|
||||
}
|
||||
await remoteDB.deleteFiles(includedFileIDs.toList());
|
||||
}
|
||||
|
||||
Future<TrashDiff> getTrashFilesDiff(int sinceTime) async {
|
||||
try {
|
||||
final response = await _enteDio.get(
|
||||
"/trash/v2/diff",
|
||||
queryParameters: {
|
||||
"sinceTime": sinceTime,
|
||||
},
|
||||
);
|
||||
int latestUpdatedAtTime = 0;
|
||||
final trashedFiles = <TrashFile>[];
|
||||
final deletedUploadIDs = <int>[];
|
||||
final restoredFiles = <TrashFile>[];
|
||||
|
||||
final diff = response.data["diff"] as List;
|
||||
final bool hasMore = response.data["hasMore"] as bool;
|
||||
final startTime = DateTime.now();
|
||||
for (final item in diff) {
|
||||
final trash = TrashFile();
|
||||
trash.createdAt = item['createdAt'];
|
||||
trash.updateAt = item['updatedAt'];
|
||||
latestUpdatedAtTime = max(latestUpdatedAtTime, trash.updateAt);
|
||||
if (item["isDeleted"]) {
|
||||
deletedUploadIDs.add(item["file"]["id"]);
|
||||
continue;
|
||||
}
|
||||
|
||||
trash.deleteBy = item['deleteBy'];
|
||||
trash.uploadedFileID = item["file"]["id"];
|
||||
trash.collectionID = item["file"]["collectionID"];
|
||||
trash.updationTime = item["file"]["updationTime"];
|
||||
trash.ownerID = item["file"]["ownerID"];
|
||||
trash.encryptedKey = item["file"]["encryptedKey"];
|
||||
trash.keyDecryptionNonce = item["file"]["keyDecryptionNonce"];
|
||||
trash.fileDecryptionHeader = item["file"]["file"]["decryptionHeader"];
|
||||
trash.thumbnailDecryptionHeader =
|
||||
item["file"]["thumbnail"]["decryptionHeader"];
|
||||
trash.metadataDecryptionHeader =
|
||||
item["file"]["metadata"]["decryptionHeader"];
|
||||
final fileDecryptionKey = getFileKey(trash);
|
||||
final encodedMetadata = await CryptoUtil.decryptChaCha(
|
||||
CryptoUtil.base642bin(item["file"]["metadata"]["encryptedData"]),
|
||||
fileDecryptionKey,
|
||||
CryptoUtil.base642bin(trash.metadataDecryptionHeader!),
|
||||
);
|
||||
final Map<String, dynamic> metadata =
|
||||
jsonDecode(utf8.decode(encodedMetadata));
|
||||
trash.applyMetadata(metadata);
|
||||
if (item["file"]['magicMetadata'] != null) {
|
||||
final utfEncodedMmd = await CryptoUtil.decryptChaCha(
|
||||
CryptoUtil.base642bin(item["file"]['magicMetadata']['data']),
|
||||
fileDecryptionKey,
|
||||
CryptoUtil.base642bin(item["file"]['magicMetadata']['header']),
|
||||
);
|
||||
trash.mMdEncodedJson = utf8.decode(utfEncodedMmd);
|
||||
trash.mMdVersion = item["file"]['magicMetadata']['version'];
|
||||
}
|
||||
if (item["file"]['pubMagicMetadata'] != null) {
|
||||
final utfEncodedMmd = await CryptoUtil.decryptChaCha(
|
||||
CryptoUtil.base642bin(item["file"]['pubMagicMetadata']['data']),
|
||||
fileDecryptionKey,
|
||||
CryptoUtil.base642bin(item["file"]['pubMagicMetadata']['header']),
|
||||
);
|
||||
trash.pubMmdEncodedJson = utf8.decode(utfEncodedMmd);
|
||||
trash.pubMmdVersion = item["file"]['pubMagicMetadata']['version'];
|
||||
trash.pubMagicMetadata =
|
||||
PubMagicMetadata.fromEncodedJson(trash.pubMmdEncodedJson!);
|
||||
}
|
||||
if (item['isRestored']) {
|
||||
restoredFiles.add(trash);
|
||||
continue;
|
||||
}
|
||||
trashedFiles.add(trash);
|
||||
}
|
||||
|
||||
final endTime = DateTime.now();
|
||||
_logger.info(
|
||||
"time for parsing " +
|
||||
diff.length.toString() +
|
||||
": " +
|
||||
Duration(
|
||||
microseconds: (endTime.microsecondsSinceEpoch -
|
||||
startTime.microsecondsSinceEpoch),
|
||||
).inMilliseconds.toString(),
|
||||
);
|
||||
return TrashDiff(
|
||||
trashedFiles,
|
||||
restoredFiles,
|
||||
deletedUploadIDs,
|
||||
hasMore,
|
||||
latestUpdatedAtTime,
|
||||
);
|
||||
} catch (e, s) {
|
||||
_logger.severe(e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
Future<Response<dynamic>> _trashFiles(
|
||||
Map<String, dynamic> requestData,
|
||||
) async {
|
||||
return _enteDio.post(
|
||||
"/files/trash",
|
||||
data: requestData,
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> deleteFromTrash(List<EnteFile> files) async {
|
||||
final params = <String, dynamic>{};
|
||||
final uniqueFileIds = files.map((e) => e.uploadedFileID!).toSet().toList();
|
||||
final batchedFileIDs = uniqueFileIds.chunks(batchSize);
|
||||
for (final batch in batchedFileIDs) {
|
||||
params["fileIDs"] = [];
|
||||
for (final fileID in batch) {
|
||||
params["fileIDs"].add(fileID);
|
||||
}
|
||||
try {
|
||||
await _enteDio.post(
|
||||
"/trash/delete",
|
||||
data: params,
|
||||
);
|
||||
await _trashDB.delete(batch);
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
} catch (e, s) {
|
||||
_logger.severe("failed to delete from trash", e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
// no need to await on syncing trash from remote
|
||||
unawaited(syncTrash());
|
||||
}
|
||||
|
||||
Future<void> emptyTrash() async {
|
||||
final params = <String, dynamic>{};
|
||||
params["lastUpdatedAt"] = _getSyncTime();
|
||||
try {
|
||||
await _enteDio.post(
|
||||
"/trash/empty",
|
||||
data: params,
|
||||
);
|
||||
await _trashDB.clearTable();
|
||||
unawaited(syncTrash());
|
||||
Bus.instance.fire(TrashUpdatedEvent());
|
||||
Bus.instance.fire(ForceReloadTrashPageEvent());
|
||||
} catch (e, s) {
|
||||
_logger.severe("failed to empty trash", e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class TrashDiff {
|
||||
final List<TrashFile> trashedFiles;
|
||||
final List<TrashFile> restoredFiles;
|
||||
final List<int> deletedUploadIDs;
|
||||
final bool hasMore;
|
||||
final int lastSyncedTimeStamp;
|
||||
TrashDiff(
|
||||
this.trashedFiles,
|
||||
this.restoredFiles,
|
||||
this.deletedUploadIDs,
|
||||
this.hasMore,
|
||||
this.lastSyncedTimeStamp,
|
||||
);
|
||||
}
|
||||
@@ -2,9 +2,10 @@ import 'dart:async';
|
||||
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:photos/core/event_bus.dart';
|
||||
import 'package:photos/db/trash_db.dart';
|
||||
import "package:photos/db/remote/db.dart";
|
||||
import 'package:photos/events/trash_updated_event.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/services/local_authentication_service.dart";
|
||||
import 'package:photos/ui/viewer/gallery/trash_page.dart';
|
||||
import 'package:photos/utils/navigation_util.dart';
|
||||
@@ -71,7 +72,7 @@ class _TrashSectionButtonState extends State<TrashSectionButton> {
|
||||
),
|
||||
const Padding(padding: EdgeInsets.all(6)),
|
||||
FutureBuilder<int>(
|
||||
future: TrashDB.instance.count(),
|
||||
future: remoteDB.rowCount(RemoteTable.trash),
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData && snapshot.data! > 0) {
|
||||
return RichText(
|
||||
|
||||
@@ -13,7 +13,6 @@ import "package:photos/l10n/l10n.dart";
|
||||
import "package:photos/models/file/extensions/file_props.dart";
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/file/file_type.dart';
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import "package:photos/models/metadata/common_keys.dart";
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
@@ -93,7 +92,7 @@ class FileAppBarState extends State<FileAppBar> {
|
||||
_reloadActions = false;
|
||||
}
|
||||
|
||||
final isTrashedFile = widget.file is TrashFile;
|
||||
final isTrashedFile = widget.file.isTrash;
|
||||
final shouldShowActions = widget.shouldShowActions && !isTrashedFile;
|
||||
return PreferredSize(
|
||||
preferredSize: const Size.fromHeight(kToolbarHeight),
|
||||
|
||||
@@ -10,7 +10,6 @@ import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/models/file/extensions/file_props.dart";
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/file/file_type.dart';
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
|
||||
import "package:photos/ui/actions/file/file_actions.dart";
|
||||
@@ -105,11 +104,11 @@ class FileBottomBarState extends State<FileBottomBar> {
|
||||
),
|
||||
),
|
||||
);
|
||||
if (widget.file is TrashFile) {
|
||||
if (widget.file.isTrash) {
|
||||
_addTrashOptions(children);
|
||||
}
|
||||
|
||||
if (!widget.showOnlyInfoButton && widget.file is! TrashFile) {
|
||||
if (!widget.showOnlyInfoButton && !widget.file.isTrash) {
|
||||
if (widget.file.fileType == FileType.image ||
|
||||
widget.file.fileType == FileType.livePhoto ||
|
||||
(widget.file.fileType == FileType.video)) {
|
||||
@@ -259,9 +258,7 @@ class FileBottomBarState extends State<FileBottomBar> {
|
||||
color: Colors.white,
|
||||
),
|
||||
onPressed: () async {
|
||||
final trashedFile = <TrashFile>[];
|
||||
trashedFile.add(widget.file as TrashFile);
|
||||
if (await deleteFromTrash(context, trashedFile) == true) {
|
||||
if (await deleteFromTrash(context, [widget.file]) == true) {
|
||||
Navigator.pop(context);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,7 +6,6 @@ import 'package:photos/ente_theme_data.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/models/api/collection/user.dart";
|
||||
import "package:photos/models/file/file.dart";
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import 'package:photos/theme/colors.dart';
|
||||
import "package:photos/theme/ente_theme.dart";
|
||||
import 'package:photos/ui/sharing/user_avator_widget.dart';
|
||||
@@ -218,12 +217,12 @@ class OwnerAvatarOverlayIcon extends StatelessWidget {
|
||||
}
|
||||
|
||||
class TrashedFileOverlayText extends StatelessWidget {
|
||||
final TrashFile file;
|
||||
final EnteFile file;
|
||||
const TrashedFileOverlayText(this.file, {super.key});
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final int daysLeft =
|
||||
((file.deleteBy - DateTime.now().microsecondsSinceEpoch) /
|
||||
((file.trashTime!.deleteBy - DateTime.now().microsecondsSinceEpoch) /
|
||||
Duration.microsecondsPerDay)
|
||||
.ceil();
|
||||
final text = S.of(context).trashDaysLeft(daysLeft);
|
||||
|
||||
@@ -10,7 +10,6 @@ import "package:photos/models/api/collection/user.dart";
|
||||
import "package:photos/models/file/extensions/file_props.dart";
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/file/file_type.dart';
|
||||
import 'package:photos/models/file/trash_file.dart';
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import 'package:photos/services/favorites_service.dart';
|
||||
import 'package:photos/ui/viewer/file/file_icons_widget.dart';
|
||||
@@ -226,7 +225,7 @@ class _ThumbnailWidgetState extends State<ThumbnailWidget> {
|
||||
}
|
||||
|
||||
if (widget.file.isTrash) {
|
||||
viewChildren.add(TrashedFileOverlayText(widget.file as TrashFile));
|
||||
viewChildren.add(TrashedFileOverlayText(widget.file));
|
||||
} else if (GalleryContextState.of(context)?.type == GroupType.size) {
|
||||
viewChildren.add(FileSizeOverlayText(widget.file));
|
||||
}
|
||||
|
||||
@@ -3,12 +3,15 @@ import 'dart:ui';
|
||||
import 'package:collection/collection.dart' show IterableExtension;
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:photos/core/event_bus.dart';
|
||||
import 'package:photos/db/trash_db.dart';
|
||||
import "package:photos/db/remote/db.dart";
|
||||
import "package:photos/db/remote/table/trash.dart";
|
||||
import 'package:photos/events/files_updated_event.dart';
|
||||
import 'package:photos/events/force_reload_trash_page_event.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/models/file_load_result.dart";
|
||||
import 'package:photos/models/gallery_type.dart';
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/ui/common/bottom_shadow.dart';
|
||||
import 'package:photos/ui/viewer/actions/file_selection_overlay_bar.dart';
|
||||
import 'package:photos/ui/viewer/gallery/gallery.dart';
|
||||
@@ -34,13 +37,9 @@ class TrashPage extends StatelessWidget {
|
||||
final bool filesAreSelected = _selectedFiles.files.isNotEmpty;
|
||||
|
||||
final gallery = Gallery(
|
||||
asyncLoader: (creationStartTime, creationEndTime, {limit, asc}) {
|
||||
return TrashDB.instance.getTrashedFiles(
|
||||
creationStartTime,
|
||||
creationEndTime,
|
||||
limit: limit,
|
||||
asc: asc,
|
||||
);
|
||||
asyncLoader: (creationStartTime, creationEndTime, {limit, asc}) async {
|
||||
final result = await remoteDB.getTrashFiles();
|
||||
return FileLoadResult(result, false);
|
||||
},
|
||||
reloadEvent: Bus.instance.on<FilesUpdatedEvent>().where(
|
||||
(event) =>
|
||||
@@ -104,7 +103,7 @@ class TrashPage extends StatelessWidget {
|
||||
|
||||
Widget _headerWidget() {
|
||||
return FutureBuilder<int>(
|
||||
future: TrashDB.instance.count(),
|
||||
future: remoteDB.rowCount(RemoteTable.trash),
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData && snapshot.data! > 0) {
|
||||
return Padding(
|
||||
|
||||
Reference in New Issue
Block a user