Compare commits

..

11 Commits

Author SHA1 Message Date
Prateek Sunal
40d4b21be9 chore: add heavy logging 2025-08-19 14:10:12 +05:30
Prateek Sunal
7027edb2d1 Merge remote-tracking branch 'origin/main' into bg-ml 2025-08-19 12:48:27 +05:30
Prateek Sunal
831720a49d Merge remote-tracking branch 'origin/decoded_image_refactor' into bg-ml 2025-08-19 12:48:20 +05:30
Vishnu Mohandas
621713d0b4 [mob] Remove unused import (#6892) 2025-08-19 12:33:15 +05:30
vishnukvmd
34813d2fae [mob] Remove unused import 2025-08-19 12:32:58 +05:30
Vishnu Mohandas
d8e4418d78 Remove ignore (#6890) 2025-08-19 11:43:36 +05:30
vishnukvmd
9771a5bc5d Remove ignore 2025-08-19 11:41:44 +05:30
Neeraj
aa4207f878 [auth] New translations (#6877)
New translations from
[Crowdin](https://crowdin.com/project/ente-authenticator-app)
2025-08-19 10:13:12 +05:30
Crowdin Bot
f2049ac7fa New Crowdin translations by GitHub Action 2025-08-18 01:18:04 +00:00
Prateek Sunal
ecf58c175b fix: add more things for testing 2025-08-13 19:51:05 +05:30
Prateek Sunal
cc0198f879 fix: add more logs for testing 2025-08-13 14:37:23 +05:30
15 changed files with 193 additions and 38 deletions

View File

@@ -1 +0,0 @@
surprise/

View File

@@ -88,6 +88,8 @@
"useRecoveryKey": "Χρήση κλειδιού ανάκτησης",
"incorrectPasswordTitle": "Λάθος κωδικός πρόσβασης",
"welcomeBack": "Καλωσορίσατε και πάλι!",
"emailAlreadyRegistered": "Το email είναι ήδη καταχωρημένο.",
"emailNotRegistered": "Το email δεν έχει καταχωρηθεί.",
"madeWithLoveAtPrefix": "φτιαγμένη με ❤️ στο ",
"supportDevs": "Εγγραφείτε στο <bold-green>ente</bold-green> για να μας υποστηρίξετε",
"supportDiscount": "Χρησιμοποιήστε τον κωδικό κουπονιού \"AUTH\" για να λάβετε 10% έκπτωση για τον πρώτο χρόνο",
@@ -171,6 +173,7 @@
"invalidQRCode": "Μη έγκυρος κωδικός QR",
"noRecoveryKeyTitle": "Χωρίς κλειδί ανάκτησης;",
"enterEmailHint": "Εισάγετε τη διεύθυνση email σας",
"enterNewEmailHint": "Εισάγετε την διεύθυνση ηλ. ταχυδρομείου σας",
"invalidEmailTitle": "Μη έγκυρη διεύθυνση email",
"invalidEmailMessage": "Παρακαλούμε εισάγετε μια έγκυρη διεύθυνση email.",
"deleteAccount": "Διαγραφή λογαριασμού",
@@ -258,6 +261,10 @@
"areYouSureYouWantToLogout": "Είστε σίγουροι ότι θέλετε να αποσυνδεθείτε;",
"yesLogout": "Ναι, αποσύνδεση",
"exit": "Εξοδος",
"theme": "Θέμα",
"lightTheme": "Φωτεινό",
"darkTheme": "Σκοτεινό",
"systemTheme": "Σύστημα",
"verifyingRecoveryKey": "Επαλήθευση κλειδιού ανάκτησης...",
"recoveryKeyVerified": "Το κλειδί ανάκτησης επαληθεύτηκε",
"recoveryKeySuccessBody": "Τέλεια! Το κλειδί ανάκτησης σας είναι έγκυρο. Σας ευχαριστούμε για την επαλήθευση.\n\nΠαρακαλώ θυμηθείτε να κρατήσετε το κλειδί ανάκτησης σας και σε αντίγραφο ασφαλείας.",
@@ -490,5 +497,24 @@
"appLockNotEnabled": "Το κλείδωμα εφαρμογής δεν είναι ενεργοποιημένο",
"appLockNotEnabledDescription": "Παρακαλώ ενεργοποιήστε το κλείδωμα εφαρμογής μέσω της επιλογής Ασφάλεια > Κλείδωμα εφαρμογής",
"authToViewPasskey": "Παρακαλώ πιστοποιηθείτε για να δείτε το κλειδί πρόσβασης",
"appLockOfflineModeWarning": "Έχετε επιλέξει να προχωρήσετε χωρίς αντίγραφα ασφαλείας. Αν ξεχάσετε τον κωδικό της εφαρμογής, θα κλειδωθείτε από την πρόσβαση στα δεδομένα σας."
"appLockOfflineModeWarning": "Έχετε επιλέξει να προχωρήσετε χωρίς αντίγραφα ασφαλείας. Αν ξεχάσετε τον κωδικό της εφαρμογής, θα κλειδωθείτε από την πρόσβαση στα δεδομένα σας.",
"duplicateCodes": "Διπλότυποι κωδικοί",
"noDuplicates": "✨ Δεν υπάρχουν διπλότυπα",
"youveNoDuplicateCodesThatCanBeCleared": "Δεν υπάρχουν διπλότυπα αρχεία που μπορούν να εκκαθαριστούν",
"deduplicateCodes": "Διπλότυποι κωδικοί",
"deselectAll": "Αποεπιλογή όλων",
"selectAll": "Επιλογή όλων",
"deleteDuplicates": "Διαγραφή διπλότυπων",
"plainHTML": "Απλό HTML",
"dropReviewiOS": "Αφήστε μια κριτική στο App Store",
"dropReviewAndroid": "Αφήστε μια κριτική στο Play Store",
"giveUsAStarOnGithub": "Δώστε μας ένα αστέρι στο Github",
"free5GB": "5GB δωρεάν στο <bold-green>ente</bold-green> Photos",
"freeStorageOffer": "10% έκπτωση στο <bold-green>ente</bold-green> photos",
"freeStorageOfferDescription": "Χρησιμοποιήστε τον κωδικό \"AUTH\" για να λάβετε 10% έκπτωση για τον πρώτο χρόνο",
"advanced": "Για προχωρημένους",
"algorithm": "Αλγόριθμος",
"type": "Τύπος",
"period": "Περίοδος",
"digits": "Ψηφία"
}

View File

@@ -45,7 +45,7 @@
"timeBasedKeyType": "Oparte na czasie (TOTP)",
"counterBasedKeyType": "Oparte na liczniku (HOTP)",
"saveAction": "Zapisz",
"nextTotpTitle": "następny",
"nextTotpTitle": "dalej",
"deleteCodeTitle": "Usunąć kod?",
"deleteCodeMessage": "Czy na pewno chcesz usunąć ten kod? Ta akcja jest nieodwracalna.",
"trashCode": "Przenieść kod do kosza?",

View File

@@ -185,10 +185,13 @@ Future<void> _runMinimally(String taskId, TimeLogger tlog) async {
// only runs for android
await _homeWidgetSync(true);
// await MLService.instance.init();
// await PersonService.init(entityService, MLDataDB.instance, prefs);
// await MLService.instance.runAllML(force: true);
await smartAlbumsService.syncSmartAlbums();
final isDeviceHealthy = await computeController.isDeviceHealthyFuture();
if (isDeviceHealthy) {
await MLService.instance.init();
await PersonService.init(entityService, MLDataDB.instance, prefs);
await MLService.instance.runAllML(force: true);
await smartAlbumsService.syncSmartAlbums();
}
}
Future<void> _init(bool isBackground, {String via = ''}) async {

View File

@@ -8,6 +8,7 @@ import "package:flutter/foundation.dart";
import "package:logging/logging.dart";
import "package:photos/core/event_bus.dart";
import "package:photos/events/compute_control_event.dart";
import "package:photos/main.dart";
import "package:thermal/thermal.dart";
enum _ComputeRunState {
@@ -42,6 +43,13 @@ class ComputeController {
ComputeController() {
_logger.info('ComputeController constructor');
// we don't need listeners to be initialized in background
if (isProcessBg) {
_logger.info('init done ');
return;
}
_startInteractionTimer(kDefaultInteractionTimeout);
if (Platform.isIOS) {
if (kDebugMode) {
@@ -71,6 +79,8 @@ class ComputeController {
}
bool requestCompute({bool ml = false, bool stream = false}) {
// TODO: Remove after testing
return false;
_logger.info("Requesting compute: ml: $ml, stream: $stream");
if (!_isDeviceHealthy || !_canRunGivenUserInteraction()) {
_logger.info("Device not healthy or user interacting, denying request.");
@@ -153,6 +163,7 @@ class ComputeController {
}
void _fireControlEvent() {
return;
final shouldRunCompute = _isDeviceHealthy && _canRunGivenUserInteraction();
if (shouldRunCompute != _canRunCompute) {
_canRunCompute = shouldRunCompute;
@@ -175,6 +186,25 @@ class ComputeController {
_startInteractionTimer(kDefaultInteractionTimeout);
}
Future<bool> isDeviceHealthyFuture() async {
if (!isProcessBg) return isDeviceHealthy;
// Update Thermal status
_lastThermalStatus = await _thermal.thermalStatus;
// Update Battery info and device health
if (Platform.isIOS) {
_iosLastBatteryInfo = await BatteryInfoPlugin().iosBatteryInfo;
_isDeviceHealthy = _computeIsiOSDeviceHealthy();
} else {
_androidLastBatteryInfo = await BatteryInfoPlugin().androidBatteryInfo;
_isDeviceHealthy = _computeIsAndroidDeviceHealthy();
}
_logger.info("Device health status: $_isDeviceHealthy");
return _isDeviceHealthy;
}
void _onAndroidBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) {
_androidLastBatteryInfo = batteryInfo;
_logger.info("Battery info: ${batteryInfo!.toJson()}");

View File

@@ -54,12 +54,19 @@ class FaceDetectionService extends MlModel {
'sessionAddress should be valid',
);
_logger.info(
"Running face detection for image with size ${dimensions.width}x${dimensions.height}",
);
final startTime = DateTime.now();
final (inputImageList, scaledSize) = await preprocessImageYoloFace(
dimensions,
rawRgbaBytes,
);
_logger.info(
"Preprocessed image to input list of size ${inputImageList.length} with scaled size $scaledSize",
);
final preprocessingTime = DateTime.now();
final preprocessingMs =
preprocessingTime.difference(startTime).inMilliseconds;
@@ -68,8 +75,14 @@ class FaceDetectionService extends MlModel {
List<List<List<double>>>? nestedResults = [];
try {
if (MlModel.usePlatformPlugin) {
_logger.info(
"Running inference using platform plugin",
);
nestedResults = await _runPlatformPluginPredict(inputImageList);
} else {
_logger.info(
"Running inference using ONNX runtime",
);
nestedResults = _runFFIBasedPredict(
sessionAddress,
inputImageList,
@@ -116,9 +129,15 @@ class FaceDetectionService extends MlModel {
final inputs = {'input': inputOrt};
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
_logger.info(
"Running face detection using ONNX runtime with input size ${inputImageList.length}",
);
final List<OrtValue?> outputs = session.run(runOptions, inputs);
final result =
outputs[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
_logger.info(
"Finished running face detection using ONNX runtime",
);
inputOrt.release();
runOptions.release();
for (var element in outputs) {
@@ -132,11 +151,18 @@ class FaceDetectionService extends MlModel {
Float32List inputImageList,
) async {
final OnnxDart plugin = OnnxDart();
_logger.info(
"Running face detection using OnnxDart plugin with input size ${inputImageList.length}",
);
final result = await plugin.predict(
inputImageList,
_modelName,
);
_logger.info(
"Finished running face detection using OnnxDart plugin",
);
final int resultLength = result!.length;
assert(resultLength % 25200 * 16 == 0);
const int outerLength = 1;

View File

@@ -81,6 +81,9 @@ class FaceRecognitionService {
final faceResults = <FaceResult>[];
final startTime = DateTime.now();
_logger.info(
"Starting runFacesPipeline with fileID $enteFileID",
);
// Get the faces
final List<FaceDetectionRelative> faceDetectionResult =
await _detectFacesSync(
@@ -90,6 +93,9 @@ class FaceRecognitionService {
faceDetectionAddress,
faceResults,
);
_logger.info(
"Detected ${faceDetectionResult.length} faces in image with fileID $enteFileID",
);
final detectFacesTime = DateTime.now();
final detectFacesMs = detectFacesTime.difference(startTime).inMilliseconds;
@@ -101,6 +107,9 @@ class FaceRecognitionService {
return [];
}
_logger.info(
"Detected ${faceDetectionResult.length} faces, proceeding to alignment and embedding",
);
// Align the faces
final Float32List faceAlignmentResult = await _alignFacesSync(
dim,
@@ -112,6 +121,9 @@ class FaceRecognitionService {
final alignFacesMs =
alignFacesTime.difference(detectFacesTime).inMilliseconds;
_logger.info(
"Aligned ${faceDetectionResult.length} faces in image with fileID $enteFileID",
);
// Get the embeddings of the faces
await _embedFacesSync(
faceAlignmentResult,
@@ -139,6 +151,9 @@ class FaceRecognitionService {
List<FaceResult> faceResults,
) async {
try {
_logger.info(
"Running face detection for fileID $fileID with interpreter at $interpreterAddress",
);
// Get the bounding boxes of the faces
final List<FaceDetectionRelative> faces =
await FaceDetectionService.predict(
@@ -147,6 +162,10 @@ class FaceRecognitionService {
interpreterAddress,
);
_logger.info(
"Detected ${faces.length} faces in image with fileID $fileID",
);
// Add detected faces to the faceResults
for (var i = 0; i < faces.length; i++) {
faceResults.add(

View File

@@ -10,6 +10,7 @@ import "package:photos/db/files_db.dart";
import "package:photos/db/ml/db.dart";
import "package:photos/events/compute_control_event.dart";
import "package:photos/events/people_changed_event.dart";
import "package:photos/main.dart";
import "package:photos/models/ml/face/face.dart";
import "package:photos/models/ml/ml_versions.dart";
import "package:photos/service_locator.dart";
@@ -69,31 +70,36 @@ class MLService {
_logger.info("client: $client");
// Listen on ComputeController
Bus.instance.on<ComputeControlEvent>().listen((event) {
if (!flagService.hasGrantedMLConsent) {
return;
}
/// Only listen for events when in foreground,
/// so we don't waste resources when the app is in background
/// and we just do things sequentially
if (!isProcessBg) {
Bus.instance.on<ComputeControlEvent>().listen((event) {
if (!flagService.hasGrantedMLConsent) {
return;
}
_mlControllerStatus = event.shouldRun;
if (_mlControllerStatus) {
if (_shouldPauseIndexingAndClustering) {
_cancelPauseIndexingAndClustering();
_logger.info(
"MLController allowed running ML, faces indexing undoing previous pause",
);
_mlControllerStatus = event.shouldRun;
if (_mlControllerStatus) {
if (_shouldPauseIndexingAndClustering) {
_cancelPauseIndexingAndClustering();
_logger.info(
"MLController allowed running ML, faces indexing undoing previous pause",
);
} else {
_logger.info(
"MLController allowed running ML, faces indexing starting",
);
}
unawaited(runAllML());
} else {
_logger.info(
"MLController allowed running ML, faces indexing starting",
"MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)",
);
pauseIndexingAndClustering();
}
unawaited(runAllML());
} else {
_logger.info(
"MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)",
);
pauseIndexingAndClustering();
}
});
});
}
_isInitialized = true;
_logger.info('init done');
@@ -136,7 +142,7 @@ class MLService {
);
await clusterAllImages();
}
if (_mlControllerStatus == true) {
if (!isProcessBg && _mlControllerStatus == true) {
// refresh discover section
magicCacheService.updateCache(forced: force).ignore();
// refresh memories section
@@ -148,7 +154,7 @@ class MLService {
if ((await mlDataDB.getUnclusteredFaceCount()) > 0) {
await clusterAllImages();
}
if (_mlControllerStatus == true) {
if (!isProcessBg && _mlControllerStatus == true) {
// refresh discover section
magicCacheService.updateCache().ignore();
// refresh memories section (only runs if forced is true)
@@ -160,8 +166,10 @@ class MLService {
} finally {
_logger.severe("ML finished running");
_isRunningML = false;
computeController.releaseCompute(ml: true);
VideoPreviewService.instance.queueFiles();
if (!isProcessBg) {
computeController.releaseCompute(ml: true);
VideoPreviewService.instance.queueFiles();
}
}
}

View File

@@ -86,6 +86,9 @@ class ClipImageEncoder extends MlModel {
Float32List inputList,
) async {
final OnnxDart plugin = OnnxDart();
_logger.info(
"Running Clip image predict using OnnxDart plugin with input size ${inputList.length}",
);
final result = await plugin.predict(
inputList,
_modelName,

View File

@@ -307,6 +307,9 @@ class SemanticSearchService {
Uint8List rawRgbaBytes,
int clipImageAddress,
) async {
_logger.info(
"Running Clip image encoding for file ID: $enteFileID",
);
final embedding = await ClipImageEncoder.predict(
dimensions,
rawRgbaBytes,
@@ -314,6 +317,10 @@ class SemanticSearchService {
enteFileID,
);
_logger.info(
"Clip image encoding completed for file ID: $enteFileID",
);
final clipResult = ClipResult(fileID: enteFileID, embedding: embedding);
return clipResult;

View File

@@ -95,6 +95,10 @@ class RemoteSyncService {
}
Future<void> sync({bool silently = false}) async {
// TODO: remove
// if (!isProcessBg) {
// return;
// }
if (!_config.hasConfiguredAccount()) {
_logger.info("Skipping remote sync since account is not configured");
return;
@@ -377,10 +381,9 @@ class RemoteSyncService {
localIDsToSync.removeAll(alreadyClaimedLocalIDs);
if (alreadyClaimedLocalIDs.isNotEmpty && !_hasCleanupStaleEntry) {
try {
await _db.removeQueuedLocalFiles(alreadyClaimedLocalIDs);
} catch(e, s) {
_logger.severe("removeQueuedLocalFiles failed",e,s);
await _db.removeQueuedLocalFiles(alreadyClaimedLocalIDs);
} catch (e, s) {
_logger.severe("removeQueuedLocalFiles failed", e, s);
}
}
}

View File

@@ -23,7 +23,8 @@ void callbackDispatcher() {
try {
BgTaskUtils.$.info('Task started $tlog');
await runBackgroundTask(taskName, tlog).timeout(
Platform.isIOS ? kBGTaskTimeout : const Duration(hours: 1),
// TODO: For testing don't do seppuku
Platform.isIOS && false ? kBGTaskTimeout : const Duration(hours: 1),
onTimeout: () async {
BgTaskUtils.$.warning(
"TLE, committing seppuku for taskID: $taskName",

View File

@@ -83,11 +83,17 @@ Future<DecodedImage> decodeImageFromPath(
"Failed to decode image from file: $imagePath using image package",
);
}
final bytes = imageData.getBytes(order: img_pkg.ChannelOrder.rgba);
Uint8List? bytes;
for (final order in img_pkg.ChannelOrder.values) {
bytes = imageData.getBytes(order: order);
_logger.info("Bytes length is: ${bytes.length}, for order: : $order");
}
final dimensions = Dimensions(
width: image!.width,
height: image.height,
);
_logger.info("Dimensions are: $dimensions");
return DecodedImage(
dimensions: dimensions,
rawRgbaBytes: bytes,
@@ -111,6 +117,9 @@ Future<DecodedImage> decodeImageFromPath(
}
late Image image;
_logger.info(
'Decoding image at path: $imagePath, format: $format, includeRgbaBytes: $includeRgbaBytes',
);
try {
image = await decodeImageFromData(imageData);
} catch (e, s) {
@@ -144,12 +153,18 @@ Future<DecodedImage> decodeImageFromPath(
);
}
}
_logger.info(
"Decoded image at path: $imagePath [i]",
);
if (!includeRgbaBytes) {
return DecodedImage(
dimensions: Dimensions(width: image.width, height: image.height),
image: includeDartUiImage ? image : null,
);
}
_logger.info(
"Getting Raw RGBA",
);
final rawRgbaBytes = await _getRawRgbaBytes(image);
return DecodedImage(
dimensions: Dimensions(width: image.width, height: image.height),

View File

@@ -128,6 +128,8 @@ abstract class SuperIsolate {
final taskID = newIsolateTaskID(operation.name);
_mainSendPort.send([taskID, operation.index, args, answerPort.sendPort]);
logger.info("Activity ${operation.name} started");
answerPort.listen((receivedMessage) {
if (receivedMessage['taskID'] != taskID) {
logger.severe("Received isolate message with wrong taskID");
@@ -136,6 +138,7 @@ abstract class SuperIsolate {
final logs = receivedMessage['logs'] as List<String>;
IsolateLogger.handLogStringsToMainLogger(logs);
final data = receivedMessage['data'];
if (data is Map && data.containsKey('error')) {
// Handle the error
final errorMessage = data['error'];
@@ -143,11 +146,13 @@ abstract class SuperIsolate {
final exception = Exception(errorMessage);
final stackTrace = StackTrace.fromString(errorStackTrace);
completer.completeError(exception, stackTrace);
logger.severe("Activity ${operation.name} failed");
} else {
completer.complete(data);
logger.info("Activity ${operation.name} completed");
}
_activeTasks--;
});
_activeTasks--;
return completer.future;
});

View File

@@ -7,6 +7,7 @@ import "package:photos/db/files_db.dart";
import "package:photos/db/ml/db.dart";
import "package:photos/db/ml/filedata.dart";
import "package:photos/extensions/list.dart";
import "package:photos/main.dart";
import "package:photos/models/file/extensions/file_props.dart";
import "package:photos/models/file/file.dart";
import "package:photos/models/file/file_type.dart";
@@ -411,19 +412,28 @@ Future<MLResult> analyzeImageStatic(Map args) async {
);
final startTime = DateTime.now();
_logger.info("Decoding image at path: $imagePath");
// Decode the image once to use for both face detection and alignment
final decodedImage = await decodeImageFromPath(
imagePath,
includeRgbaBytes: true,
includeDartUiImage: false,
inBackground: isProcessBg,
);
final rawRgbaBytes = decodedImage.rawRgbaBytes!;
final imageDimensions = decodedImage.dimensions;
_logger.info(
"Decoded image with rgbaLength: ${rawRgbaBytes.length}, dimensions: $imageDimensions",
);
final result = MLResult.fromEnteFileID(enteFileID);
result.decodedImageSize = imageDimensions;
final decodeTime = DateTime.now();
final decodeMs = decodeTime.difference(startTime).inMilliseconds;
_logger.info(
"Decoded image at path: $imagePath, in $decodeMs ms",
);
String faceMsString = "", clipMsString = "";
final pipelines = await Future.wait([
runFaces