Compare commits
11 Commits
decoded_im
...
bg-ml
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40d4b21be9 | ||
|
|
7027edb2d1 | ||
|
|
831720a49d | ||
|
|
621713d0b4 | ||
|
|
34813d2fae | ||
|
|
d8e4418d78 | ||
|
|
9771a5bc5d | ||
|
|
aa4207f878 | ||
|
|
f2049ac7fa | ||
|
|
ecf58c175b | ||
|
|
cc0198f879 |
1
mobile/apps/.gitignore
vendored
1
mobile/apps/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
surprise/
|
|
||||||
@@ -88,6 +88,8 @@
|
|||||||
"useRecoveryKey": "Χρήση κλειδιού ανάκτησης",
|
"useRecoveryKey": "Χρήση κλειδιού ανάκτησης",
|
||||||
"incorrectPasswordTitle": "Λάθος κωδικός πρόσβασης",
|
"incorrectPasswordTitle": "Λάθος κωδικός πρόσβασης",
|
||||||
"welcomeBack": "Καλωσορίσατε και πάλι!",
|
"welcomeBack": "Καλωσορίσατε και πάλι!",
|
||||||
|
"emailAlreadyRegistered": "Το email είναι ήδη καταχωρημένο.",
|
||||||
|
"emailNotRegistered": "Το email δεν έχει καταχωρηθεί.",
|
||||||
"madeWithLoveAtPrefix": "φτιαγμένη με ❤️ στο ",
|
"madeWithLoveAtPrefix": "φτιαγμένη με ❤️ στο ",
|
||||||
"supportDevs": "Εγγραφείτε στο <bold-green>ente</bold-green> για να μας υποστηρίξετε",
|
"supportDevs": "Εγγραφείτε στο <bold-green>ente</bold-green> για να μας υποστηρίξετε",
|
||||||
"supportDiscount": "Χρησιμοποιήστε τον κωδικό κουπονιού \"AUTH\" για να λάβετε 10% έκπτωση για τον πρώτο χρόνο",
|
"supportDiscount": "Χρησιμοποιήστε τον κωδικό κουπονιού \"AUTH\" για να λάβετε 10% έκπτωση για τον πρώτο χρόνο",
|
||||||
@@ -171,6 +173,7 @@
|
|||||||
"invalidQRCode": "Μη έγκυρος κωδικός QR",
|
"invalidQRCode": "Μη έγκυρος κωδικός QR",
|
||||||
"noRecoveryKeyTitle": "Χωρίς κλειδί ανάκτησης;",
|
"noRecoveryKeyTitle": "Χωρίς κλειδί ανάκτησης;",
|
||||||
"enterEmailHint": "Εισάγετε τη διεύθυνση email σας",
|
"enterEmailHint": "Εισάγετε τη διεύθυνση email σας",
|
||||||
|
"enterNewEmailHint": "Εισάγετε την διεύθυνση ηλ. ταχυδρομείου σας",
|
||||||
"invalidEmailTitle": "Μη έγκυρη διεύθυνση email",
|
"invalidEmailTitle": "Μη έγκυρη διεύθυνση email",
|
||||||
"invalidEmailMessage": "Παρακαλούμε εισάγετε μια έγκυρη διεύθυνση email.",
|
"invalidEmailMessage": "Παρακαλούμε εισάγετε μια έγκυρη διεύθυνση email.",
|
||||||
"deleteAccount": "Διαγραφή λογαριασμού",
|
"deleteAccount": "Διαγραφή λογαριασμού",
|
||||||
@@ -258,6 +261,10 @@
|
|||||||
"areYouSureYouWantToLogout": "Είστε σίγουροι ότι θέλετε να αποσυνδεθείτε;",
|
"areYouSureYouWantToLogout": "Είστε σίγουροι ότι θέλετε να αποσυνδεθείτε;",
|
||||||
"yesLogout": "Ναι, αποσύνδεση",
|
"yesLogout": "Ναι, αποσύνδεση",
|
||||||
"exit": "Εξοδος",
|
"exit": "Εξοδος",
|
||||||
|
"theme": "Θέμα",
|
||||||
|
"lightTheme": "Φωτεινό",
|
||||||
|
"darkTheme": "Σκοτεινό",
|
||||||
|
"systemTheme": "Σύστημα",
|
||||||
"verifyingRecoveryKey": "Επαλήθευση κλειδιού ανάκτησης...",
|
"verifyingRecoveryKey": "Επαλήθευση κλειδιού ανάκτησης...",
|
||||||
"recoveryKeyVerified": "Το κλειδί ανάκτησης επαληθεύτηκε",
|
"recoveryKeyVerified": "Το κλειδί ανάκτησης επαληθεύτηκε",
|
||||||
"recoveryKeySuccessBody": "Τέλεια! Το κλειδί ανάκτησης σας είναι έγκυρο. Σας ευχαριστούμε για την επαλήθευση.\n\nΠαρακαλώ θυμηθείτε να κρατήσετε το κλειδί ανάκτησης σας και σε αντίγραφο ασφαλείας.",
|
"recoveryKeySuccessBody": "Τέλεια! Το κλειδί ανάκτησης σας είναι έγκυρο. Σας ευχαριστούμε για την επαλήθευση.\n\nΠαρακαλώ θυμηθείτε να κρατήσετε το κλειδί ανάκτησης σας και σε αντίγραφο ασφαλείας.",
|
||||||
@@ -490,5 +497,24 @@
|
|||||||
"appLockNotEnabled": "Το κλείδωμα εφαρμογής δεν είναι ενεργοποιημένο",
|
"appLockNotEnabled": "Το κλείδωμα εφαρμογής δεν είναι ενεργοποιημένο",
|
||||||
"appLockNotEnabledDescription": "Παρακαλώ ενεργοποιήστε το κλείδωμα εφαρμογής μέσω της επιλογής Ασφάλεια > Κλείδωμα εφαρμογής",
|
"appLockNotEnabledDescription": "Παρακαλώ ενεργοποιήστε το κλείδωμα εφαρμογής μέσω της επιλογής Ασφάλεια > Κλείδωμα εφαρμογής",
|
||||||
"authToViewPasskey": "Παρακαλώ πιστοποιηθείτε για να δείτε το κλειδί πρόσβασης",
|
"authToViewPasskey": "Παρακαλώ πιστοποιηθείτε για να δείτε το κλειδί πρόσβασης",
|
||||||
"appLockOfflineModeWarning": "Έχετε επιλέξει να προχωρήσετε χωρίς αντίγραφα ασφαλείας. Αν ξεχάσετε τον κωδικό της εφαρμογής, θα κλειδωθείτε από την πρόσβαση στα δεδομένα σας."
|
"appLockOfflineModeWarning": "Έχετε επιλέξει να προχωρήσετε χωρίς αντίγραφα ασφαλείας. Αν ξεχάσετε τον κωδικό της εφαρμογής, θα κλειδωθείτε από την πρόσβαση στα δεδομένα σας.",
|
||||||
|
"duplicateCodes": "Διπλότυποι κωδικοί",
|
||||||
|
"noDuplicates": "✨ Δεν υπάρχουν διπλότυπα",
|
||||||
|
"youveNoDuplicateCodesThatCanBeCleared": "Δεν υπάρχουν διπλότυπα αρχεία που μπορούν να εκκαθαριστούν",
|
||||||
|
"deduplicateCodes": "Διπλότυποι κωδικοί",
|
||||||
|
"deselectAll": "Αποεπιλογή όλων",
|
||||||
|
"selectAll": "Επιλογή όλων",
|
||||||
|
"deleteDuplicates": "Διαγραφή διπλότυπων",
|
||||||
|
"plainHTML": "Απλό HTML",
|
||||||
|
"dropReviewiOS": "Αφήστε μια κριτική στο App Store",
|
||||||
|
"dropReviewAndroid": "Αφήστε μια κριτική στο Play Store",
|
||||||
|
"giveUsAStarOnGithub": "Δώστε μας ένα αστέρι στο Github",
|
||||||
|
"free5GB": "5GB δωρεάν στο <bold-green>ente</bold-green> Photos",
|
||||||
|
"freeStorageOffer": "10% έκπτωση στο <bold-green>ente</bold-green> photos",
|
||||||
|
"freeStorageOfferDescription": "Χρησιμοποιήστε τον κωδικό \"AUTH\" για να λάβετε 10% έκπτωση για τον πρώτο χρόνο",
|
||||||
|
"advanced": "Για προχωρημένους",
|
||||||
|
"algorithm": "Αλγόριθμος",
|
||||||
|
"type": "Τύπος",
|
||||||
|
"period": "Περίοδος",
|
||||||
|
"digits": "Ψηφία"
|
||||||
}
|
}
|
||||||
@@ -45,7 +45,7 @@
|
|||||||
"timeBasedKeyType": "Oparte na czasie (TOTP)",
|
"timeBasedKeyType": "Oparte na czasie (TOTP)",
|
||||||
"counterBasedKeyType": "Oparte na liczniku (HOTP)",
|
"counterBasedKeyType": "Oparte na liczniku (HOTP)",
|
||||||
"saveAction": "Zapisz",
|
"saveAction": "Zapisz",
|
||||||
"nextTotpTitle": "następny",
|
"nextTotpTitle": "dalej",
|
||||||
"deleteCodeTitle": "Usunąć kod?",
|
"deleteCodeTitle": "Usunąć kod?",
|
||||||
"deleteCodeMessage": "Czy na pewno chcesz usunąć ten kod? Ta akcja jest nieodwracalna.",
|
"deleteCodeMessage": "Czy na pewno chcesz usunąć ten kod? Ta akcja jest nieodwracalna.",
|
||||||
"trashCode": "Przenieść kod do kosza?",
|
"trashCode": "Przenieść kod do kosza?",
|
||||||
|
|||||||
@@ -185,10 +185,13 @@ Future<void> _runMinimally(String taskId, TimeLogger tlog) async {
|
|||||||
// only runs for android
|
// only runs for android
|
||||||
await _homeWidgetSync(true);
|
await _homeWidgetSync(true);
|
||||||
|
|
||||||
// await MLService.instance.init();
|
final isDeviceHealthy = await computeController.isDeviceHealthyFuture();
|
||||||
// await PersonService.init(entityService, MLDataDB.instance, prefs);
|
if (isDeviceHealthy) {
|
||||||
// await MLService.instance.runAllML(force: true);
|
await MLService.instance.init();
|
||||||
await smartAlbumsService.syncSmartAlbums();
|
await PersonService.init(entityService, MLDataDB.instance, prefs);
|
||||||
|
await MLService.instance.runAllML(force: true);
|
||||||
|
await smartAlbumsService.syncSmartAlbums();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<void> _init(bool isBackground, {String via = ''}) async {
|
Future<void> _init(bool isBackground, {String via = ''}) async {
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import "package:flutter/foundation.dart";
|
|||||||
import "package:logging/logging.dart";
|
import "package:logging/logging.dart";
|
||||||
import "package:photos/core/event_bus.dart";
|
import "package:photos/core/event_bus.dart";
|
||||||
import "package:photos/events/compute_control_event.dart";
|
import "package:photos/events/compute_control_event.dart";
|
||||||
|
import "package:photos/main.dart";
|
||||||
import "package:thermal/thermal.dart";
|
import "package:thermal/thermal.dart";
|
||||||
|
|
||||||
enum _ComputeRunState {
|
enum _ComputeRunState {
|
||||||
@@ -42,6 +43,13 @@ class ComputeController {
|
|||||||
|
|
||||||
ComputeController() {
|
ComputeController() {
|
||||||
_logger.info('ComputeController constructor');
|
_logger.info('ComputeController constructor');
|
||||||
|
|
||||||
|
// we don't need listeners to be initialized in background
|
||||||
|
if (isProcessBg) {
|
||||||
|
_logger.info('init done ');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_startInteractionTimer(kDefaultInteractionTimeout);
|
_startInteractionTimer(kDefaultInteractionTimeout);
|
||||||
if (Platform.isIOS) {
|
if (Platform.isIOS) {
|
||||||
if (kDebugMode) {
|
if (kDebugMode) {
|
||||||
@@ -71,6 +79,8 @@ class ComputeController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool requestCompute({bool ml = false, bool stream = false}) {
|
bool requestCompute({bool ml = false, bool stream = false}) {
|
||||||
|
// TODO: Remove after testing
|
||||||
|
return false;
|
||||||
_logger.info("Requesting compute: ml: $ml, stream: $stream");
|
_logger.info("Requesting compute: ml: $ml, stream: $stream");
|
||||||
if (!_isDeviceHealthy || !_canRunGivenUserInteraction()) {
|
if (!_isDeviceHealthy || !_canRunGivenUserInteraction()) {
|
||||||
_logger.info("Device not healthy or user interacting, denying request.");
|
_logger.info("Device not healthy or user interacting, denying request.");
|
||||||
@@ -153,6 +163,7 @@ class ComputeController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void _fireControlEvent() {
|
void _fireControlEvent() {
|
||||||
|
return;
|
||||||
final shouldRunCompute = _isDeviceHealthy && _canRunGivenUserInteraction();
|
final shouldRunCompute = _isDeviceHealthy && _canRunGivenUserInteraction();
|
||||||
if (shouldRunCompute != _canRunCompute) {
|
if (shouldRunCompute != _canRunCompute) {
|
||||||
_canRunCompute = shouldRunCompute;
|
_canRunCompute = shouldRunCompute;
|
||||||
@@ -175,6 +186,25 @@ class ComputeController {
|
|||||||
_startInteractionTimer(kDefaultInteractionTimeout);
|
_startInteractionTimer(kDefaultInteractionTimeout);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Future<bool> isDeviceHealthyFuture() async {
|
||||||
|
if (!isProcessBg) return isDeviceHealthy;
|
||||||
|
|
||||||
|
// Update Thermal status
|
||||||
|
_lastThermalStatus = await _thermal.thermalStatus;
|
||||||
|
|
||||||
|
// Update Battery info and device health
|
||||||
|
if (Platform.isIOS) {
|
||||||
|
_iosLastBatteryInfo = await BatteryInfoPlugin().iosBatteryInfo;
|
||||||
|
_isDeviceHealthy = _computeIsiOSDeviceHealthy();
|
||||||
|
} else {
|
||||||
|
_androidLastBatteryInfo = await BatteryInfoPlugin().androidBatteryInfo;
|
||||||
|
_isDeviceHealthy = _computeIsAndroidDeviceHealthy();
|
||||||
|
}
|
||||||
|
|
||||||
|
_logger.info("Device health status: $_isDeviceHealthy");
|
||||||
|
return _isDeviceHealthy;
|
||||||
|
}
|
||||||
|
|
||||||
void _onAndroidBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) {
|
void _onAndroidBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) {
|
||||||
_androidLastBatteryInfo = batteryInfo;
|
_androidLastBatteryInfo = batteryInfo;
|
||||||
_logger.info("Battery info: ${batteryInfo!.toJson()}");
|
_logger.info("Battery info: ${batteryInfo!.toJson()}");
|
||||||
|
|||||||
@@ -54,12 +54,19 @@ class FaceDetectionService extends MlModel {
|
|||||||
'sessionAddress should be valid',
|
'sessionAddress should be valid',
|
||||||
);
|
);
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Running face detection for image with size ${dimensions.width}x${dimensions.height}",
|
||||||
|
);
|
||||||
|
|
||||||
final startTime = DateTime.now();
|
final startTime = DateTime.now();
|
||||||
|
|
||||||
final (inputImageList, scaledSize) = await preprocessImageYoloFace(
|
final (inputImageList, scaledSize) = await preprocessImageYoloFace(
|
||||||
dimensions,
|
dimensions,
|
||||||
rawRgbaBytes,
|
rawRgbaBytes,
|
||||||
);
|
);
|
||||||
|
_logger.info(
|
||||||
|
"Preprocessed image to input list of size ${inputImageList.length} with scaled size $scaledSize",
|
||||||
|
);
|
||||||
final preprocessingTime = DateTime.now();
|
final preprocessingTime = DateTime.now();
|
||||||
final preprocessingMs =
|
final preprocessingMs =
|
||||||
preprocessingTime.difference(startTime).inMilliseconds;
|
preprocessingTime.difference(startTime).inMilliseconds;
|
||||||
@@ -68,8 +75,14 @@ class FaceDetectionService extends MlModel {
|
|||||||
List<List<List<double>>>? nestedResults = [];
|
List<List<List<double>>>? nestedResults = [];
|
||||||
try {
|
try {
|
||||||
if (MlModel.usePlatformPlugin) {
|
if (MlModel.usePlatformPlugin) {
|
||||||
|
_logger.info(
|
||||||
|
"Running inference using platform plugin",
|
||||||
|
);
|
||||||
nestedResults = await _runPlatformPluginPredict(inputImageList);
|
nestedResults = await _runPlatformPluginPredict(inputImageList);
|
||||||
} else {
|
} else {
|
||||||
|
_logger.info(
|
||||||
|
"Running inference using ONNX runtime",
|
||||||
|
);
|
||||||
nestedResults = _runFFIBasedPredict(
|
nestedResults = _runFFIBasedPredict(
|
||||||
sessionAddress,
|
sessionAddress,
|
||||||
inputImageList,
|
inputImageList,
|
||||||
@@ -116,9 +129,15 @@ class FaceDetectionService extends MlModel {
|
|||||||
final inputs = {'input': inputOrt};
|
final inputs = {'input': inputOrt};
|
||||||
final runOptions = OrtRunOptions();
|
final runOptions = OrtRunOptions();
|
||||||
final session = OrtSession.fromAddress(sessionAddress);
|
final session = OrtSession.fromAddress(sessionAddress);
|
||||||
|
_logger.info(
|
||||||
|
"Running face detection using ONNX runtime with input size ${inputImageList.length}",
|
||||||
|
);
|
||||||
final List<OrtValue?> outputs = session.run(runOptions, inputs);
|
final List<OrtValue?> outputs = session.run(runOptions, inputs);
|
||||||
final result =
|
final result =
|
||||||
outputs[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
|
outputs[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
|
||||||
|
_logger.info(
|
||||||
|
"Finished running face detection using ONNX runtime",
|
||||||
|
);
|
||||||
inputOrt.release();
|
inputOrt.release();
|
||||||
runOptions.release();
|
runOptions.release();
|
||||||
for (var element in outputs) {
|
for (var element in outputs) {
|
||||||
@@ -132,11 +151,18 @@ class FaceDetectionService extends MlModel {
|
|||||||
Float32List inputImageList,
|
Float32List inputImageList,
|
||||||
) async {
|
) async {
|
||||||
final OnnxDart plugin = OnnxDart();
|
final OnnxDart plugin = OnnxDart();
|
||||||
|
_logger.info(
|
||||||
|
"Running face detection using OnnxDart plugin with input size ${inputImageList.length}",
|
||||||
|
);
|
||||||
final result = await plugin.predict(
|
final result = await plugin.predict(
|
||||||
inputImageList,
|
inputImageList,
|
||||||
_modelName,
|
_modelName,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Finished running face detection using OnnxDart plugin",
|
||||||
|
);
|
||||||
|
|
||||||
final int resultLength = result!.length;
|
final int resultLength = result!.length;
|
||||||
assert(resultLength % 25200 * 16 == 0);
|
assert(resultLength % 25200 * 16 == 0);
|
||||||
const int outerLength = 1;
|
const int outerLength = 1;
|
||||||
|
|||||||
@@ -81,6 +81,9 @@ class FaceRecognitionService {
|
|||||||
final faceResults = <FaceResult>[];
|
final faceResults = <FaceResult>[];
|
||||||
final startTime = DateTime.now();
|
final startTime = DateTime.now();
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Starting runFacesPipeline with fileID $enteFileID",
|
||||||
|
);
|
||||||
// Get the faces
|
// Get the faces
|
||||||
final List<FaceDetectionRelative> faceDetectionResult =
|
final List<FaceDetectionRelative> faceDetectionResult =
|
||||||
await _detectFacesSync(
|
await _detectFacesSync(
|
||||||
@@ -90,6 +93,9 @@ class FaceRecognitionService {
|
|||||||
faceDetectionAddress,
|
faceDetectionAddress,
|
||||||
faceResults,
|
faceResults,
|
||||||
);
|
);
|
||||||
|
_logger.info(
|
||||||
|
"Detected ${faceDetectionResult.length} faces in image with fileID $enteFileID",
|
||||||
|
);
|
||||||
final detectFacesTime = DateTime.now();
|
final detectFacesTime = DateTime.now();
|
||||||
final detectFacesMs = detectFacesTime.difference(startTime).inMilliseconds;
|
final detectFacesMs = detectFacesTime.difference(startTime).inMilliseconds;
|
||||||
|
|
||||||
@@ -101,6 +107,9 @@ class FaceRecognitionService {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Detected ${faceDetectionResult.length} faces, proceeding to alignment and embedding",
|
||||||
|
);
|
||||||
// Align the faces
|
// Align the faces
|
||||||
final Float32List faceAlignmentResult = await _alignFacesSync(
|
final Float32List faceAlignmentResult = await _alignFacesSync(
|
||||||
dim,
|
dim,
|
||||||
@@ -112,6 +121,9 @@ class FaceRecognitionService {
|
|||||||
final alignFacesMs =
|
final alignFacesMs =
|
||||||
alignFacesTime.difference(detectFacesTime).inMilliseconds;
|
alignFacesTime.difference(detectFacesTime).inMilliseconds;
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Aligned ${faceDetectionResult.length} faces in image with fileID $enteFileID",
|
||||||
|
);
|
||||||
// Get the embeddings of the faces
|
// Get the embeddings of the faces
|
||||||
await _embedFacesSync(
|
await _embedFacesSync(
|
||||||
faceAlignmentResult,
|
faceAlignmentResult,
|
||||||
@@ -139,6 +151,9 @@ class FaceRecognitionService {
|
|||||||
List<FaceResult> faceResults,
|
List<FaceResult> faceResults,
|
||||||
) async {
|
) async {
|
||||||
try {
|
try {
|
||||||
|
_logger.info(
|
||||||
|
"Running face detection for fileID $fileID with interpreter at $interpreterAddress",
|
||||||
|
);
|
||||||
// Get the bounding boxes of the faces
|
// Get the bounding boxes of the faces
|
||||||
final List<FaceDetectionRelative> faces =
|
final List<FaceDetectionRelative> faces =
|
||||||
await FaceDetectionService.predict(
|
await FaceDetectionService.predict(
|
||||||
@@ -147,6 +162,10 @@ class FaceRecognitionService {
|
|||||||
interpreterAddress,
|
interpreterAddress,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Detected ${faces.length} faces in image with fileID $fileID",
|
||||||
|
);
|
||||||
|
|
||||||
// Add detected faces to the faceResults
|
// Add detected faces to the faceResults
|
||||||
for (var i = 0; i < faces.length; i++) {
|
for (var i = 0; i < faces.length; i++) {
|
||||||
faceResults.add(
|
faceResults.add(
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import "package:photos/db/files_db.dart";
|
|||||||
import "package:photos/db/ml/db.dart";
|
import "package:photos/db/ml/db.dart";
|
||||||
import "package:photos/events/compute_control_event.dart";
|
import "package:photos/events/compute_control_event.dart";
|
||||||
import "package:photos/events/people_changed_event.dart";
|
import "package:photos/events/people_changed_event.dart";
|
||||||
|
import "package:photos/main.dart";
|
||||||
import "package:photos/models/ml/face/face.dart";
|
import "package:photos/models/ml/face/face.dart";
|
||||||
import "package:photos/models/ml/ml_versions.dart";
|
import "package:photos/models/ml/ml_versions.dart";
|
||||||
import "package:photos/service_locator.dart";
|
import "package:photos/service_locator.dart";
|
||||||
@@ -69,31 +70,36 @@ class MLService {
|
|||||||
_logger.info("client: $client");
|
_logger.info("client: $client");
|
||||||
|
|
||||||
// Listen on ComputeController
|
// Listen on ComputeController
|
||||||
Bus.instance.on<ComputeControlEvent>().listen((event) {
|
/// Only listen for events when in foreground,
|
||||||
if (!flagService.hasGrantedMLConsent) {
|
/// so we don't waste resources when the app is in background
|
||||||
return;
|
/// and we just do things sequentially
|
||||||
}
|
if (!isProcessBg) {
|
||||||
|
Bus.instance.on<ComputeControlEvent>().listen((event) {
|
||||||
|
if (!flagService.hasGrantedMLConsent) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
_mlControllerStatus = event.shouldRun;
|
_mlControllerStatus = event.shouldRun;
|
||||||
if (_mlControllerStatus) {
|
if (_mlControllerStatus) {
|
||||||
if (_shouldPauseIndexingAndClustering) {
|
if (_shouldPauseIndexingAndClustering) {
|
||||||
_cancelPauseIndexingAndClustering();
|
_cancelPauseIndexingAndClustering();
|
||||||
_logger.info(
|
_logger.info(
|
||||||
"MLController allowed running ML, faces indexing undoing previous pause",
|
"MLController allowed running ML, faces indexing undoing previous pause",
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
_logger.info(
|
||||||
|
"MLController allowed running ML, faces indexing starting",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
unawaited(runAllML());
|
||||||
} else {
|
} else {
|
||||||
_logger.info(
|
_logger.info(
|
||||||
"MLController allowed running ML, faces indexing starting",
|
"MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)",
|
||||||
);
|
);
|
||||||
|
pauseIndexingAndClustering();
|
||||||
}
|
}
|
||||||
unawaited(runAllML());
|
});
|
||||||
} else {
|
}
|
||||||
_logger.info(
|
|
||||||
"MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)",
|
|
||||||
);
|
|
||||||
pauseIndexingAndClustering();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
_isInitialized = true;
|
_isInitialized = true;
|
||||||
_logger.info('init done');
|
_logger.info('init done');
|
||||||
@@ -136,7 +142,7 @@ class MLService {
|
|||||||
);
|
);
|
||||||
await clusterAllImages();
|
await clusterAllImages();
|
||||||
}
|
}
|
||||||
if (_mlControllerStatus == true) {
|
if (!isProcessBg && _mlControllerStatus == true) {
|
||||||
// refresh discover section
|
// refresh discover section
|
||||||
magicCacheService.updateCache(forced: force).ignore();
|
magicCacheService.updateCache(forced: force).ignore();
|
||||||
// refresh memories section
|
// refresh memories section
|
||||||
@@ -148,7 +154,7 @@ class MLService {
|
|||||||
if ((await mlDataDB.getUnclusteredFaceCount()) > 0) {
|
if ((await mlDataDB.getUnclusteredFaceCount()) > 0) {
|
||||||
await clusterAllImages();
|
await clusterAllImages();
|
||||||
}
|
}
|
||||||
if (_mlControllerStatus == true) {
|
if (!isProcessBg && _mlControllerStatus == true) {
|
||||||
// refresh discover section
|
// refresh discover section
|
||||||
magicCacheService.updateCache().ignore();
|
magicCacheService.updateCache().ignore();
|
||||||
// refresh memories section (only runs if forced is true)
|
// refresh memories section (only runs if forced is true)
|
||||||
@@ -160,8 +166,10 @@ class MLService {
|
|||||||
} finally {
|
} finally {
|
||||||
_logger.severe("ML finished running");
|
_logger.severe("ML finished running");
|
||||||
_isRunningML = false;
|
_isRunningML = false;
|
||||||
computeController.releaseCompute(ml: true);
|
if (!isProcessBg) {
|
||||||
VideoPreviewService.instance.queueFiles();
|
computeController.releaseCompute(ml: true);
|
||||||
|
VideoPreviewService.instance.queueFiles();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -86,6 +86,9 @@ class ClipImageEncoder extends MlModel {
|
|||||||
Float32List inputList,
|
Float32List inputList,
|
||||||
) async {
|
) async {
|
||||||
final OnnxDart plugin = OnnxDart();
|
final OnnxDart plugin = OnnxDart();
|
||||||
|
_logger.info(
|
||||||
|
"Running Clip image predict using OnnxDart plugin with input size ${inputList.length}",
|
||||||
|
);
|
||||||
final result = await plugin.predict(
|
final result = await plugin.predict(
|
||||||
inputList,
|
inputList,
|
||||||
_modelName,
|
_modelName,
|
||||||
|
|||||||
@@ -307,6 +307,9 @@ class SemanticSearchService {
|
|||||||
Uint8List rawRgbaBytes,
|
Uint8List rawRgbaBytes,
|
||||||
int clipImageAddress,
|
int clipImageAddress,
|
||||||
) async {
|
) async {
|
||||||
|
_logger.info(
|
||||||
|
"Running Clip image encoding for file ID: $enteFileID",
|
||||||
|
);
|
||||||
final embedding = await ClipImageEncoder.predict(
|
final embedding = await ClipImageEncoder.predict(
|
||||||
dimensions,
|
dimensions,
|
||||||
rawRgbaBytes,
|
rawRgbaBytes,
|
||||||
@@ -314,6 +317,10 @@ class SemanticSearchService {
|
|||||||
enteFileID,
|
enteFileID,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Clip image encoding completed for file ID: $enteFileID",
|
||||||
|
);
|
||||||
|
|
||||||
final clipResult = ClipResult(fileID: enteFileID, embedding: embedding);
|
final clipResult = ClipResult(fileID: enteFileID, embedding: embedding);
|
||||||
|
|
||||||
return clipResult;
|
return clipResult;
|
||||||
|
|||||||
@@ -95,6 +95,10 @@ class RemoteSyncService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Future<void> sync({bool silently = false}) async {
|
Future<void> sync({bool silently = false}) async {
|
||||||
|
// TODO: remove
|
||||||
|
// if (!isProcessBg) {
|
||||||
|
// return;
|
||||||
|
// }
|
||||||
if (!_config.hasConfiguredAccount()) {
|
if (!_config.hasConfiguredAccount()) {
|
||||||
_logger.info("Skipping remote sync since account is not configured");
|
_logger.info("Skipping remote sync since account is not configured");
|
||||||
return;
|
return;
|
||||||
@@ -377,10 +381,9 @@ class RemoteSyncService {
|
|||||||
localIDsToSync.removeAll(alreadyClaimedLocalIDs);
|
localIDsToSync.removeAll(alreadyClaimedLocalIDs);
|
||||||
if (alreadyClaimedLocalIDs.isNotEmpty && !_hasCleanupStaleEntry) {
|
if (alreadyClaimedLocalIDs.isNotEmpty && !_hasCleanupStaleEntry) {
|
||||||
try {
|
try {
|
||||||
await _db.removeQueuedLocalFiles(alreadyClaimedLocalIDs);
|
await _db.removeQueuedLocalFiles(alreadyClaimedLocalIDs);
|
||||||
} catch(e, s) {
|
} catch (e, s) {
|
||||||
_logger.severe("removeQueuedLocalFiles failed",e,s);
|
_logger.severe("removeQueuedLocalFiles failed", e, s);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,8 @@ void callbackDispatcher() {
|
|||||||
try {
|
try {
|
||||||
BgTaskUtils.$.info('Task started $tlog');
|
BgTaskUtils.$.info('Task started $tlog');
|
||||||
await runBackgroundTask(taskName, tlog).timeout(
|
await runBackgroundTask(taskName, tlog).timeout(
|
||||||
Platform.isIOS ? kBGTaskTimeout : const Duration(hours: 1),
|
// TODO: For testing don't do seppuku
|
||||||
|
Platform.isIOS && false ? kBGTaskTimeout : const Duration(hours: 1),
|
||||||
onTimeout: () async {
|
onTimeout: () async {
|
||||||
BgTaskUtils.$.warning(
|
BgTaskUtils.$.warning(
|
||||||
"TLE, committing seppuku for taskID: $taskName",
|
"TLE, committing seppuku for taskID: $taskName",
|
||||||
|
|||||||
@@ -83,11 +83,17 @@ Future<DecodedImage> decodeImageFromPath(
|
|||||||
"Failed to decode image from file: $imagePath using image package",
|
"Failed to decode image from file: $imagePath using image package",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
final bytes = imageData.getBytes(order: img_pkg.ChannelOrder.rgba);
|
|
||||||
|
Uint8List? bytes;
|
||||||
|
for (final order in img_pkg.ChannelOrder.values) {
|
||||||
|
bytes = imageData.getBytes(order: order);
|
||||||
|
_logger.info("Bytes length is: ${bytes.length}, for order: : $order");
|
||||||
|
}
|
||||||
final dimensions = Dimensions(
|
final dimensions = Dimensions(
|
||||||
width: image!.width,
|
width: image!.width,
|
||||||
height: image.height,
|
height: image.height,
|
||||||
);
|
);
|
||||||
|
_logger.info("Dimensions are: $dimensions");
|
||||||
return DecodedImage(
|
return DecodedImage(
|
||||||
dimensions: dimensions,
|
dimensions: dimensions,
|
||||||
rawRgbaBytes: bytes,
|
rawRgbaBytes: bytes,
|
||||||
@@ -111,6 +117,9 @@ Future<DecodedImage> decodeImageFromPath(
|
|||||||
}
|
}
|
||||||
|
|
||||||
late Image image;
|
late Image image;
|
||||||
|
_logger.info(
|
||||||
|
'Decoding image at path: $imagePath, format: $format, includeRgbaBytes: $includeRgbaBytes',
|
||||||
|
);
|
||||||
try {
|
try {
|
||||||
image = await decodeImageFromData(imageData);
|
image = await decodeImageFromData(imageData);
|
||||||
} catch (e, s) {
|
} catch (e, s) {
|
||||||
@@ -144,12 +153,18 @@ Future<DecodedImage> decodeImageFromPath(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_logger.info(
|
||||||
|
"Decoded image at path: $imagePath [i]",
|
||||||
|
);
|
||||||
if (!includeRgbaBytes) {
|
if (!includeRgbaBytes) {
|
||||||
return DecodedImage(
|
return DecodedImage(
|
||||||
dimensions: Dimensions(width: image.width, height: image.height),
|
dimensions: Dimensions(width: image.width, height: image.height),
|
||||||
image: includeDartUiImage ? image : null,
|
image: includeDartUiImage ? image : null,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
_logger.info(
|
||||||
|
"Getting Raw RGBA",
|
||||||
|
);
|
||||||
final rawRgbaBytes = await _getRawRgbaBytes(image);
|
final rawRgbaBytes = await _getRawRgbaBytes(image);
|
||||||
return DecodedImage(
|
return DecodedImage(
|
||||||
dimensions: Dimensions(width: image.width, height: image.height),
|
dimensions: Dimensions(width: image.width, height: image.height),
|
||||||
|
|||||||
@@ -128,6 +128,8 @@ abstract class SuperIsolate {
|
|||||||
final taskID = newIsolateTaskID(operation.name);
|
final taskID = newIsolateTaskID(operation.name);
|
||||||
_mainSendPort.send([taskID, operation.index, args, answerPort.sendPort]);
|
_mainSendPort.send([taskID, operation.index, args, answerPort.sendPort]);
|
||||||
|
|
||||||
|
logger.info("Activity ${operation.name} started");
|
||||||
|
|
||||||
answerPort.listen((receivedMessage) {
|
answerPort.listen((receivedMessage) {
|
||||||
if (receivedMessage['taskID'] != taskID) {
|
if (receivedMessage['taskID'] != taskID) {
|
||||||
logger.severe("Received isolate message with wrong taskID");
|
logger.severe("Received isolate message with wrong taskID");
|
||||||
@@ -136,6 +138,7 @@ abstract class SuperIsolate {
|
|||||||
final logs = receivedMessage['logs'] as List<String>;
|
final logs = receivedMessage['logs'] as List<String>;
|
||||||
IsolateLogger.handLogStringsToMainLogger(logs);
|
IsolateLogger.handLogStringsToMainLogger(logs);
|
||||||
final data = receivedMessage['data'];
|
final data = receivedMessage['data'];
|
||||||
|
|
||||||
if (data is Map && data.containsKey('error')) {
|
if (data is Map && data.containsKey('error')) {
|
||||||
// Handle the error
|
// Handle the error
|
||||||
final errorMessage = data['error'];
|
final errorMessage = data['error'];
|
||||||
@@ -143,11 +146,13 @@ abstract class SuperIsolate {
|
|||||||
final exception = Exception(errorMessage);
|
final exception = Exception(errorMessage);
|
||||||
final stackTrace = StackTrace.fromString(errorStackTrace);
|
final stackTrace = StackTrace.fromString(errorStackTrace);
|
||||||
completer.completeError(exception, stackTrace);
|
completer.completeError(exception, stackTrace);
|
||||||
|
logger.severe("Activity ${operation.name} failed");
|
||||||
} else {
|
} else {
|
||||||
completer.complete(data);
|
completer.complete(data);
|
||||||
|
logger.info("Activity ${operation.name} completed");
|
||||||
}
|
}
|
||||||
|
_activeTasks--;
|
||||||
});
|
});
|
||||||
_activeTasks--;
|
|
||||||
|
|
||||||
return completer.future;
|
return completer.future;
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import "package:photos/db/files_db.dart";
|
|||||||
import "package:photos/db/ml/db.dart";
|
import "package:photos/db/ml/db.dart";
|
||||||
import "package:photos/db/ml/filedata.dart";
|
import "package:photos/db/ml/filedata.dart";
|
||||||
import "package:photos/extensions/list.dart";
|
import "package:photos/extensions/list.dart";
|
||||||
|
import "package:photos/main.dart";
|
||||||
import "package:photos/models/file/extensions/file_props.dart";
|
import "package:photos/models/file/extensions/file_props.dart";
|
||||||
import "package:photos/models/file/file.dart";
|
import "package:photos/models/file/file.dart";
|
||||||
import "package:photos/models/file/file_type.dart";
|
import "package:photos/models/file/file_type.dart";
|
||||||
@@ -411,19 +412,28 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
|||||||
);
|
);
|
||||||
final startTime = DateTime.now();
|
final startTime = DateTime.now();
|
||||||
|
|
||||||
|
_logger.info("Decoding image at path: $imagePath");
|
||||||
// Decode the image once to use for both face detection and alignment
|
// Decode the image once to use for both face detection and alignment
|
||||||
final decodedImage = await decodeImageFromPath(
|
final decodedImage = await decodeImageFromPath(
|
||||||
imagePath,
|
imagePath,
|
||||||
includeRgbaBytes: true,
|
includeRgbaBytes: true,
|
||||||
includeDartUiImage: false,
|
includeDartUiImage: false,
|
||||||
|
inBackground: isProcessBg,
|
||||||
);
|
);
|
||||||
final rawRgbaBytes = decodedImage.rawRgbaBytes!;
|
final rawRgbaBytes = decodedImage.rawRgbaBytes!;
|
||||||
final imageDimensions = decodedImage.dimensions;
|
final imageDimensions = decodedImage.dimensions;
|
||||||
|
_logger.info(
|
||||||
|
"Decoded image with rgbaLength: ${rawRgbaBytes.length}, dimensions: $imageDimensions",
|
||||||
|
);
|
||||||
final result = MLResult.fromEnteFileID(enteFileID);
|
final result = MLResult.fromEnteFileID(enteFileID);
|
||||||
result.decodedImageSize = imageDimensions;
|
result.decodedImageSize = imageDimensions;
|
||||||
final decodeTime = DateTime.now();
|
final decodeTime = DateTime.now();
|
||||||
final decodeMs = decodeTime.difference(startTime).inMilliseconds;
|
final decodeMs = decodeTime.difference(startTime).inMilliseconds;
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Decoded image at path: $imagePath, in $decodeMs ms",
|
||||||
|
);
|
||||||
|
|
||||||
String faceMsString = "", clipMsString = "";
|
String faceMsString = "", clipMsString = "";
|
||||||
final pipelines = await Future.wait([
|
final pipelines = await Future.wait([
|
||||||
runFaces
|
runFaces
|
||||||
|
|||||||
Reference in New Issue
Block a user