Ml logging (#3112)
## Description - Regular logging now shows in user logs - Cleaned up logging around ML indexing - Refactored all isolates services into single interface - Try converting to jpg on all failed decoding for ML on Android ## Tested Tested in debug mode on my pixel 8 phone.
This commit is contained in:
59
mobile/lib/core/error-reporting/isolate_logging.dart
Normal file
59
mobile/lib/core/error-reporting/isolate_logging.dart
Normal file
@@ -0,0 +1,59 @@
|
||||
import "dart:collection" show Queue;
|
||||
import "dart:convert" show jsonEncode, jsonDecode;
|
||||
|
||||
import "package:logging/logging.dart";
|
||||
import "package:photos/core/error-reporting/super_logging.dart";
|
||||
|
||||
class IsolateLogString {
|
||||
final String logString;
|
||||
final Object? error;
|
||||
|
||||
IsolateLogString(this.logString, this.error);
|
||||
|
||||
String toJsonString() => jsonEncode({
|
||||
'logString': logString,
|
||||
'error': error,
|
||||
});
|
||||
|
||||
static IsolateLogString fromJsonString(String jsonString) {
|
||||
final json = jsonDecode(jsonString);
|
||||
return IsolateLogString(
|
||||
json['logString'] as String,
|
||||
json['error'],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class IsolateLogger {
|
||||
final Queue<IsolateLogString> fileQueueEntries = Queue();
|
||||
|
||||
Future onLogRecordInIsolate(LogRecord rec) async {
|
||||
final str = rec.toPrettyString(null, true);
|
||||
|
||||
// write to stdout
|
||||
SuperLogging.printLog(str);
|
||||
|
||||
// push to log queue
|
||||
fileQueueEntries.add(IsolateLogString(str, rec.error != null));
|
||||
}
|
||||
|
||||
/// WARNING: only call this from the isolate
|
||||
Queue<String> getLogStringsAndClear() {
|
||||
if (fileQueueEntries.isEmpty) return Queue<String>();
|
||||
final result = Queue<String>();
|
||||
while (fileQueueEntries.isNotEmpty) {
|
||||
final entry = fileQueueEntries.removeFirst();
|
||||
result.add(entry.toJsonString());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// WARNING: only call this from the main thread
|
||||
static void handLogStringsToMainLogger(List<String> logs) {
|
||||
while (logs.isNotEmpty) {
|
||||
final logString = logs.removeAt(0);
|
||||
final log = IsolateLogString.fromJsonString(logString);
|
||||
SuperLogging.saveLogString(log.logString, log.error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -38,8 +38,9 @@ extension SuperString on String {
|
||||
}
|
||||
|
||||
extension SuperLogRecord on LogRecord {
|
||||
String toPrettyString([String? extraLines]) {
|
||||
final header = "[$loggerName] [$level] [$time]";
|
||||
String toPrettyString([String? extraLines, bool inIsolate = false]) {
|
||||
final header =
|
||||
"[$loggerName${inIsolate ? " (in isolate)" : ""}] [$level] [$time]";
|
||||
|
||||
var msg = "$header $message";
|
||||
|
||||
@@ -270,6 +271,10 @@ class SuperLogging {
|
||||
// write to stdout
|
||||
printLog(str);
|
||||
|
||||
saveLogString(str, rec.error);
|
||||
}
|
||||
|
||||
static void saveLogString(String str, Object? error) {
|
||||
// push to log queue
|
||||
if (fileIsEnabled) {
|
||||
fileQueueEntries.add(str + '\n');
|
||||
@@ -279,8 +284,8 @@ class SuperLogging {
|
||||
}
|
||||
|
||||
// add error to sentry queue
|
||||
if (sentryIsEnabled && rec.error != null) {
|
||||
_sendErrorToSentry(rec.error!, null).ignore();
|
||||
if (sentryIsEnabled && error != null) {
|
||||
_sendErrorToSentry(error, null).ignore();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,3 +7,7 @@ const clusterIDLength = 22;
|
||||
String newClusterID() {
|
||||
return "cluster_${customAlphabet(enteWhiteListedAlphabet, clusterIDLength)}";
|
||||
}
|
||||
|
||||
String newIsolateTaskID(String task) {
|
||||
return "${task}_${customAlphabet(enteWhiteListedAlphabet, clusterIDLength)}";
|
||||
}
|
||||
|
||||
129
mobile/lib/services/isolate_functions.dart
Normal file
129
mobile/lib/services/isolate_functions.dart
Normal file
@@ -0,0 +1,129 @@
|
||||
import "dart:io" show File;
|
||||
import 'dart:typed_data' show Uint8List;
|
||||
|
||||
import "package:photos/models/ml/face/box.dart";
|
||||
import "package:photos/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart";
|
||||
import "package:photos/services/machine_learning/ml_model.dart";
|
||||
import "package:photos/services/machine_learning/ml_result.dart";
|
||||
import "package:photos/services/machine_learning/semantic_search/clip/clip_text_encoder.dart";
|
||||
import "package:photos/services/machine_learning/semantic_search/clip/clip_text_tokenizer.dart";
|
||||
import "package:photos/utils/image_ml_util.dart";
|
||||
import "package:photos/utils/ml_util.dart";
|
||||
|
||||
enum IsolateOperation {
|
||||
/// [MLIndexingIsolate]
|
||||
analyzeImage,
|
||||
|
||||
/// [MLIndexingIsolate]
|
||||
loadIndexingModels,
|
||||
|
||||
/// [MLIndexingIsolate]
|
||||
releaseIndexingModels,
|
||||
|
||||
/// [MLComputer]
|
||||
generateFaceThumbnails,
|
||||
|
||||
/// [MLComputer]
|
||||
loadModel,
|
||||
|
||||
/// [MLComputer]
|
||||
initializeClipTokenizer,
|
||||
|
||||
/// [MLComputer]
|
||||
runClipText,
|
||||
|
||||
/// [FaceClusteringService]
|
||||
linearIncrementalClustering
|
||||
}
|
||||
|
||||
/// WARNING: Only return primitives unless you know the method is only going
|
||||
/// to be used on regular isolates as opposed to DartUI and Flutter isolates
|
||||
/// https://api.flutter.dev/flutter/dart-isolate/SendPort/send.html
|
||||
Future<dynamic> isolateFunction(
|
||||
IsolateOperation function,
|
||||
Map<String, dynamic> args,
|
||||
) async {
|
||||
switch (function) {
|
||||
/// Cases for MLIndexingIsolate start here
|
||||
|
||||
/// MLIndexingIsolate
|
||||
case IsolateOperation.analyzeImage:
|
||||
final MLResult result = await analyzeImageStatic(args);
|
||||
return result.toJsonString();
|
||||
|
||||
/// MLIndexingIsolate
|
||||
case IsolateOperation.loadIndexingModels:
|
||||
final modelNames = args['modelNames'] as List<String>;
|
||||
final modelPaths = args['modelPaths'] as List<String>;
|
||||
final addresses = <int>[];
|
||||
for (int i = 0; i < modelNames.length; i++) {
|
||||
final int address = await MlModel.loadModel(
|
||||
modelNames[i],
|
||||
modelPaths[i],
|
||||
);
|
||||
addresses.add(address);
|
||||
}
|
||||
return List<int>.from(addresses, growable: false);
|
||||
|
||||
/// MLIndexingIsolate
|
||||
case IsolateOperation.releaseIndexingModels:
|
||||
final modelNames = args['modelNames'] as List<String>;
|
||||
final modelAddresses = args['modelAddresses'] as List<int>;
|
||||
for (int i = 0; i < modelNames.length; i++) {
|
||||
await MlModel.releaseModel(
|
||||
modelNames[i],
|
||||
modelAddresses[i],
|
||||
);
|
||||
}
|
||||
return true;
|
||||
|
||||
/// Cases for MLIndexingIsolate stop here
|
||||
|
||||
/// Cases for MLComputer start here
|
||||
|
||||
/// MLComputer
|
||||
case IsolateOperation.generateFaceThumbnails:
|
||||
final imagePath = args['imagePath'] as String;
|
||||
final Uint8List imageData = await File(imagePath).readAsBytes();
|
||||
final faceBoxesJson = args['faceBoxesList'] as List<Map<String, dynamic>>;
|
||||
final List<FaceBox> faceBoxes =
|
||||
faceBoxesJson.map((json) => FaceBox.fromJson(json)).toList();
|
||||
final List<Uint8List> results = await generateFaceThumbnailsUsingCanvas(
|
||||
imageData,
|
||||
faceBoxes,
|
||||
);
|
||||
return List.from(results);
|
||||
|
||||
/// MLComputer
|
||||
case IsolateOperation.loadModel:
|
||||
final modelName = args['modelName'] as String;
|
||||
final modelPath = args['modelPath'] as String;
|
||||
final int address = await MlModel.loadModel(
|
||||
modelName,
|
||||
modelPath,
|
||||
);
|
||||
return address;
|
||||
|
||||
/// MLComputer
|
||||
case IsolateOperation.initializeClipTokenizer:
|
||||
final vocabPath = args["vocabPath"] as String;
|
||||
await ClipTextTokenizer.instance.init(vocabPath);
|
||||
return true;
|
||||
|
||||
/// MLComputer
|
||||
case IsolateOperation.runClipText:
|
||||
final textEmbedding = await ClipTextEncoder.predict(args);
|
||||
return List<double>.from(textEmbedding, growable: false);
|
||||
|
||||
/// Cases for MLComputer end here
|
||||
|
||||
/// Cases for FaceClusteringService start here
|
||||
|
||||
/// FaceClusteringService
|
||||
case IsolateOperation.linearIncrementalClustering:
|
||||
final ClusteringResult result = runLinearClustering(args);
|
||||
return result;
|
||||
|
||||
/// Cases for FaceClusteringService end here
|
||||
}
|
||||
}
|
||||
170
mobile/lib/services/isolate_service.dart
Normal file
170
mobile/lib/services/isolate_service.dart
Normal file
@@ -0,0 +1,170 @@
|
||||
import 'dart:async';
|
||||
import 'dart:isolate';
|
||||
|
||||
import "package:dart_ui_isolate/dart_ui_isolate.dart";
|
||||
import "package:flutter/foundation.dart" show kDebugMode;
|
||||
import "package:logging/logging.dart";
|
||||
import "package:photos/core/error-reporting/isolate_logging.dart";
|
||||
import "package:photos/models/base/id.dart";
|
||||
import "package:photos/services/isolate_functions.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
abstract class SuperIsolate {
|
||||
Logger get logger;
|
||||
|
||||
Timer? _inactivityTimer;
|
||||
final Duration _inactivityDuration = const Duration(seconds: 120);
|
||||
int _activeTasks = 0;
|
||||
|
||||
final _initIsolateLock = Lock();
|
||||
final _functionLock = Lock();
|
||||
|
||||
bool get isDartUiIsolate;
|
||||
bool get shouldAutomaticDispose;
|
||||
String get isolateName;
|
||||
|
||||
late dynamic _isolate;
|
||||
late ReceivePort _receivePort;
|
||||
late SendPort _mainSendPort;
|
||||
|
||||
bool get isIsolateSpawned => _isIsolateSpawned;
|
||||
bool _isIsolateSpawned = false;
|
||||
|
||||
Future<void> _initIsolate() async {
|
||||
return _initIsolateLock.synchronized(() async {
|
||||
if (_isIsolateSpawned) return;
|
||||
|
||||
_receivePort = ReceivePort();
|
||||
|
||||
try {
|
||||
_isolate = isDartUiIsolate
|
||||
? await DartUiIsolate.spawn(
|
||||
_isolateMain,
|
||||
_receivePort.sendPort,
|
||||
)
|
||||
: await Isolate.spawn(
|
||||
_isolateMain,
|
||||
_receivePort.sendPort,
|
||||
debugName: isolateName,
|
||||
);
|
||||
_mainSendPort = await _receivePort.first as SendPort;
|
||||
|
||||
if (shouldAutomaticDispose) _resetInactivityTimer();
|
||||
logger.info('initIsolate done');
|
||||
_isIsolateSpawned = true;
|
||||
} catch (e) {
|
||||
logger.severe('Could not spawn isolate', e);
|
||||
_isIsolateSpawned = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@pragma('vm:entry-point')
|
||||
static void _isolateMain(SendPort mainSendPort) async {
|
||||
Logger.root.level = kDebugMode ? Level.ALL : Level.INFO;
|
||||
final IsolateLogger isolateLogger = IsolateLogger();
|
||||
Logger.root.onRecord.listen(isolateLogger.onLogRecordInIsolate);
|
||||
final receivePort = ReceivePort();
|
||||
mainSendPort.send(receivePort.sendPort);
|
||||
|
||||
receivePort.listen((message) async {
|
||||
final taskID = message[0] as String;
|
||||
final functionIndex = message[1] as int;
|
||||
final function = IsolateOperation.values[functionIndex];
|
||||
final args = message[2] as Map<String, dynamic>;
|
||||
final sendPort = message[3] as SendPort;
|
||||
|
||||
late final Object data;
|
||||
try {
|
||||
data = await isolateFunction(function, args);
|
||||
} catch (e, stackTrace) {
|
||||
data = {
|
||||
'error': e.toString(),
|
||||
'stackTrace': stackTrace.toString(),
|
||||
};
|
||||
}
|
||||
final logs = List<String>.from(isolateLogger.getLogStringsAndClear());
|
||||
sendPort.send({"taskID": taskID, "data": data, "logs": logs});
|
||||
});
|
||||
}
|
||||
|
||||
/// The common method to run any operation in the isolate.
|
||||
/// It sends the [message] to [_isolateMain] and waits for the result.
|
||||
/// The actual function executed is [isolateFunction].
|
||||
Future<dynamic> runInIsolate(
|
||||
IsolateOperation operation,
|
||||
Map<String, dynamic> args,
|
||||
) async {
|
||||
await _initIsolate();
|
||||
return _functionLock.synchronized(() async {
|
||||
if (shouldAutomaticDispose) _resetInactivityTimer();
|
||||
|
||||
if (postFunctionlockStop(operation)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final completer = Completer<dynamic>();
|
||||
final answerPort = ReceivePort();
|
||||
|
||||
_activeTasks++;
|
||||
final taskID = newIsolateTaskID(operation.name);
|
||||
_mainSendPort.send([taskID, operation.index, args, answerPort.sendPort]);
|
||||
|
||||
answerPort.listen((receivedMessage) {
|
||||
if (receivedMessage['taskID'] != taskID) {
|
||||
logger.severe("Received isolate message with wrong taskID");
|
||||
return;
|
||||
}
|
||||
final logs = receivedMessage['logs'] as List<String>;
|
||||
IsolateLogger.handLogStringsToMainLogger(logs);
|
||||
final data = receivedMessage['data'];
|
||||
if (data is Map && data.containsKey('error')) {
|
||||
// Handle the error
|
||||
final errorMessage = data['error'];
|
||||
final errorStackTrace = data['stackTrace'];
|
||||
final exception = Exception(errorMessage);
|
||||
final stackTrace = StackTrace.fromString(errorStackTrace);
|
||||
completer.completeError(exception, stackTrace);
|
||||
} else {
|
||||
completer.complete(data);
|
||||
}
|
||||
});
|
||||
_activeTasks--;
|
||||
|
||||
return completer.future;
|
||||
});
|
||||
}
|
||||
|
||||
bool postFunctionlockStop(IsolateOperation operation) => false;
|
||||
|
||||
/// Resets a timer that kills the isolate after a certain amount of inactivity.
|
||||
///
|
||||
/// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`)
|
||||
void _resetInactivityTimer() {
|
||||
_inactivityTimer?.cancel();
|
||||
_inactivityTimer = Timer(_inactivityDuration, () {
|
||||
if (_activeTasks > 0) {
|
||||
logger.info('Tasks are still running. Delaying isolate disposal.');
|
||||
// Optionally, reschedule the timer to check again later.
|
||||
_resetInactivityTimer();
|
||||
} else {
|
||||
logger.info(
|
||||
'Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
|
||||
);
|
||||
_disposeIsolate();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> onDispose() async {}
|
||||
|
||||
void _disposeIsolate() async {
|
||||
if (!_isIsolateSpawned) return;
|
||||
logger.info('Disposing isolate');
|
||||
await onDispose();
|
||||
_isIsolateSpawned = false;
|
||||
_isolate.kill();
|
||||
_receivePort.close();
|
||||
_inactivityTimer?.cancel();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import "dart:async";
|
||||
import "dart:developer";
|
||||
import "dart:isolate";
|
||||
import "dart:typed_data" show Uint8List;
|
||||
|
||||
import "package:computer/computer.dart";
|
||||
@@ -10,10 +9,11 @@ import "package:ml_linalg/dtype.dart";
|
||||
import "package:ml_linalg/vector.dart";
|
||||
import "package:photos/generated/protos/ente/common/vector.pb.dart";
|
||||
import "package:photos/models/base/id.dart";
|
||||
import "package:photos/services/isolate_functions.dart";
|
||||
import "package:photos/services/isolate_service.dart";
|
||||
import "package:photos/services/machine_learning/face_ml/face_clustering/face_db_info_for_clustering.dart";
|
||||
import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart";
|
||||
import "package:photos/services/machine_learning/ml_result.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
class FaceInfo {
|
||||
final String faceID;
|
||||
@@ -60,26 +60,27 @@ class ClusteringResult {
|
||||
}
|
||||
}
|
||||
|
||||
class FaceClusteringService {
|
||||
class FaceClusteringService extends SuperIsolate {
|
||||
@override
|
||||
Logger get logger => _logger;
|
||||
final _logger = Logger("FaceLinearClustering");
|
||||
|
||||
final _computer = Computer.shared();
|
||||
|
||||
Timer? _inactivityTimer;
|
||||
final Duration _inactivityDuration = const Duration(minutes: 2);
|
||||
int _activeTasks = 0;
|
||||
|
||||
final _initLock = Lock();
|
||||
|
||||
late Isolate _isolate;
|
||||
late ReceivePort _receivePort = ReceivePort();
|
||||
late SendPort _mainSendPort;
|
||||
|
||||
bool isSpawned = false;
|
||||
bool isRunning = false;
|
||||
|
||||
static const kRecommendedDistanceThreshold = 0.24;
|
||||
static const kConservativeDistanceThreshold = 0.16;
|
||||
|
||||
@override
|
||||
bool get isDartUiIsolate => false;
|
||||
|
||||
@override
|
||||
String get isolateName => "FaceClusteringIsolate";
|
||||
|
||||
@override
|
||||
bool get shouldAutomaticDispose => true;
|
||||
|
||||
// singleton pattern
|
||||
FaceClusteringService._privateConstructor();
|
||||
|
||||
@@ -88,119 +89,7 @@ class FaceClusteringService {
|
||||
static final instance = FaceClusteringService._privateConstructor();
|
||||
factory FaceClusteringService() => instance;
|
||||
|
||||
Future<void> _initIsolate() async {
|
||||
return _initLock.synchronized(() async {
|
||||
if (isSpawned) return;
|
||||
|
||||
_receivePort = ReceivePort();
|
||||
|
||||
try {
|
||||
_isolate = await Isolate.spawn(
|
||||
_isolateMain,
|
||||
_receivePort.sendPort,
|
||||
);
|
||||
_mainSendPort = await _receivePort.first as SendPort;
|
||||
isSpawned = true;
|
||||
|
||||
_resetInactivityTimer();
|
||||
} catch (e) {
|
||||
_logger.severe('Could not spawn isolate', e);
|
||||
isSpawned = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> _ensureSpawnedIsolate() async {
|
||||
if (!isSpawned) {
|
||||
await _initIsolate();
|
||||
}
|
||||
}
|
||||
|
||||
/// The main execution function of the isolate.
|
||||
static void _isolateMain(SendPort mainSendPort) async {
|
||||
final receivePort = ReceivePort();
|
||||
mainSendPort.send(receivePort.sendPort);
|
||||
|
||||
receivePort.listen((message) async {
|
||||
final functionIndex = message[0] as int;
|
||||
final function = ClusterOperation.values[functionIndex];
|
||||
final args = message[1] as Map<String, dynamic>;
|
||||
final sendPort = message[2] as SendPort;
|
||||
|
||||
try {
|
||||
switch (function) {
|
||||
case ClusterOperation.linearIncrementalClustering:
|
||||
final ClusteringResult result = _runLinearClustering(args);
|
||||
sendPort.send(result);
|
||||
break;
|
||||
}
|
||||
} catch (e, stackTrace) {
|
||||
sendPort
|
||||
.send({'error': e.toString(), 'stackTrace': stackTrace.toString()});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result.
|
||||
Future<dynamic> _runInIsolate(
|
||||
(ClusterOperation, Map<String, dynamic>) message,
|
||||
) async {
|
||||
await _ensureSpawnedIsolate();
|
||||
_resetInactivityTimer();
|
||||
final completer = Completer<dynamic>();
|
||||
final answerPort = ReceivePort();
|
||||
|
||||
_activeTasks++;
|
||||
_mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]);
|
||||
|
||||
answerPort.listen((receivedMessage) {
|
||||
if (receivedMessage is Map && receivedMessage.containsKey('error')) {
|
||||
// Handle the error
|
||||
final errorMessage = receivedMessage['error'];
|
||||
final errorStackTrace = receivedMessage['stackTrace'];
|
||||
final exception = Exception(errorMessage);
|
||||
final stackTrace = StackTrace.fromString(errorStackTrace);
|
||||
_activeTasks--;
|
||||
completer.completeError(exception, stackTrace);
|
||||
} else {
|
||||
_activeTasks--;
|
||||
completer.complete(receivedMessage);
|
||||
}
|
||||
});
|
||||
|
||||
return completer.future;
|
||||
}
|
||||
|
||||
/// Resets a timer that kills the isolate after a certain amount of inactivity.
|
||||
///
|
||||
/// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`)
|
||||
void _resetInactivityTimer() {
|
||||
_inactivityTimer?.cancel();
|
||||
_inactivityTimer = Timer(_inactivityDuration, () {
|
||||
if (_activeTasks > 0) {
|
||||
_logger.info('Tasks are still running. Delaying isolate disposal.');
|
||||
// Optionally, reschedule the timer to check again later.
|
||||
_resetInactivityTimer();
|
||||
} else {
|
||||
_logger.info(
|
||||
'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
|
||||
);
|
||||
_dispose();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Disposes the isolate worker.
|
||||
void _dispose() {
|
||||
if (!isSpawned) return;
|
||||
|
||||
isSpawned = false;
|
||||
_isolate.kill();
|
||||
_receivePort.close();
|
||||
_inactivityTimer?.cancel();
|
||||
}
|
||||
|
||||
/// Runs the clustering algorithm [_runLinearClustering] on the given [input], in an isolate.
|
||||
/// Runs the clustering algorithm [runLinearClustering] on the given [input], in an isolate.
|
||||
///
|
||||
/// Returns the clustering result, which is a list of clusters, where each cluster is a list of indices of the dataset.
|
||||
Future<ClusteringResult?> predictLinearIsolate(
|
||||
@@ -232,31 +121,27 @@ class FaceClusteringService {
|
||||
final stopwatchClustering = Stopwatch()..start();
|
||||
// final Map<String, int> faceIdToCluster =
|
||||
// await _runLinearClusteringInComputer(input);
|
||||
final ClusteringResult faceIdToCluster = await _runInIsolate(
|
||||
(
|
||||
ClusterOperation.linearIncrementalClustering,
|
||||
{
|
||||
'input': input,
|
||||
'fileIDToCreationTime': fileIDToCreationTime,
|
||||
'distanceThreshold': distanceThreshold,
|
||||
'conservativeDistanceThreshold': conservativeDistanceThreshold,
|
||||
'useDynamicThreshold': useDynamicThreshold,
|
||||
'offset': offset,
|
||||
'oldClusterSummaries': oldClusterSummaries,
|
||||
}
|
||||
),
|
||||
);
|
||||
final ClusteringResult faceIdToCluster =
|
||||
await runInIsolate(IsolateOperation.linearIncrementalClustering, {
|
||||
'input': input,
|
||||
'fileIDToCreationTime': fileIDToCreationTime,
|
||||
'distanceThreshold': distanceThreshold,
|
||||
'conservativeDistanceThreshold': conservativeDistanceThreshold,
|
||||
'useDynamicThreshold': useDynamicThreshold,
|
||||
'offset': offset,
|
||||
'oldClusterSummaries': oldClusterSummaries,
|
||||
});
|
||||
// return _runLinearClusteringInComputer(input);
|
||||
_logger.info(
|
||||
'predictLinear Clustering executed in ${stopwatchClustering.elapsed.inSeconds} seconds',
|
||||
);
|
||||
|
||||
isRunning = false;
|
||||
return faceIdToCluster;
|
||||
} catch (e, stackTrace) {
|
||||
_logger.severe('Error while running clustering', e, stackTrace);
|
||||
isRunning = false;
|
||||
rethrow;
|
||||
} finally {
|
||||
isRunning = false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,7 +187,7 @@ class FaceClusteringService {
|
||||
}
|
||||
}
|
||||
|
||||
/// Runs the clustering algorithm [_runLinearClustering] on the given [input], in computer, without any dynamic thresholding
|
||||
/// Runs the clustering algorithm [runLinearClustering] on the given [input], in computer, without any dynamic thresholding
|
||||
Future<ClusteringResult> predictLinearComputer(
|
||||
Map<String, Uint8List> input, {
|
||||
Map<int, int>? fileIDToCreationTime,
|
||||
@@ -338,7 +223,7 @@ class FaceClusteringService {
|
||||
.toSet();
|
||||
final startTime = DateTime.now();
|
||||
final faceIdToCluster = await _computer.compute(
|
||||
_runLinearClustering,
|
||||
runLinearClustering,
|
||||
param: {
|
||||
"input": clusteringInput,
|
||||
"fileIDToCreationTime": fileIDToCreationTime,
|
||||
@@ -407,7 +292,9 @@ class FaceClusteringService {
|
||||
}
|
||||
}
|
||||
|
||||
ClusteringResult _runLinearClustering(Map args) {
|
||||
final _logger = Logger("FaceLinearClustering");
|
||||
|
||||
ClusteringResult runLinearClustering(Map args) {
|
||||
// final input = args['input'] as Map<String, (int?, Uint8List)>;
|
||||
final input = args['input'] as Set<FaceDbInfoForClustering>;
|
||||
final fileIDToCreationTime = args['fileIDToCreationTime'] as Map<int, int>?;
|
||||
@@ -419,8 +306,8 @@ ClusteringResult _runLinearClustering(Map args) {
|
||||
final oldClusterSummaries =
|
||||
args['oldClusterSummaries'] as Map<String, (Uint8List, int)>?;
|
||||
|
||||
log(
|
||||
"[ClusterIsolate] ${DateTime.now()} Copied to isolate ${input.length} faces",
|
||||
_logger.info(
|
||||
"Copied to isolate ${input.length} faces",
|
||||
);
|
||||
|
||||
// Organize everything into a list of FaceInfo objects
|
||||
@@ -470,25 +357,20 @@ ClusteringResult _runLinearClustering(Map args) {
|
||||
}
|
||||
}
|
||||
final alreadyClusteredCount = facesWithClusterID.length;
|
||||
final newToClusterCount = facesWithoutClusterID.length;
|
||||
final sortedFaceInfos = <FaceInfo>[];
|
||||
sortedFaceInfos.addAll(facesWithClusterID);
|
||||
sortedFaceInfos.addAll(facesWithoutClusterID);
|
||||
|
||||
log(
|
||||
"[ClusterIsolate] ${DateTime.now()} Clustering ${facesWithoutClusterID.length} new faces without clusterId, and $alreadyClusteredCount faces with clusterId",
|
||||
);
|
||||
|
||||
// Make sure the first face has a clusterId
|
||||
final int totalFaces = sortedFaceInfos.length;
|
||||
int dynamicThresholdCount = 0;
|
||||
|
||||
if (sortedFaceInfos.isEmpty) {
|
||||
return ClusteringResult.empty();
|
||||
}
|
||||
final int totalFaces = sortedFaceInfos.length;
|
||||
int dynamicThresholdCount = 0;
|
||||
|
||||
// Start actual clustering
|
||||
log(
|
||||
"[ClusterIsolate] ${DateTime.now()} Processing $totalFaces faces in total in this round ${offset != null ? "on top of ${offset + facesWithClusterID.length} earlier processed faces" : ""}",
|
||||
_logger.info(
|
||||
"[ClusterIsolate] ${DateTime.now()} Processing $totalFaces faces ($newToClusterCount new, $alreadyClusteredCount already done) in total in this round ${offset != null ? "on top of ${offset + facesWithClusterID.length} earlier processed faces" : ""}",
|
||||
);
|
||||
// set current epoch time as clusterID
|
||||
String clusterID = newClusterID();
|
||||
@@ -517,7 +399,7 @@ ClusteringResult _runLinearClustering(Map args) {
|
||||
thresholdValue = distanceThreshold;
|
||||
}
|
||||
if (i % 250 == 0) {
|
||||
log("[ClusterIsolate] ${DateTime.now()} Processed ${offset != null ? i + offset : i} faces");
|
||||
_logger.info("Processed ${offset != null ? i + offset : i} faces");
|
||||
}
|
||||
// WARNING: The loop below is now O(n^2) so be very careful with anything you put in there!
|
||||
for (int j = i - 1; j >= 0; j--) {
|
||||
@@ -536,8 +418,8 @@ ClusteringResult _runLinearClustering(Map args) {
|
||||
if (closestDistance < thresholdValue) {
|
||||
if (sortedFaceInfos[closestIdx].clusterId == null) {
|
||||
// Ideally this should never happen, but just in case log it
|
||||
log(
|
||||
" [ClusterIsolate] [WARNING] ${DateTime.now()} Found new cluster $clusterID",
|
||||
_logger.severe(
|
||||
"Found new cluster $clusterID, but closest face has no clusterId",
|
||||
);
|
||||
clusterID = newClusterID();
|
||||
sortedFaceInfos[closestIdx].clusterId = clusterID;
|
||||
@@ -568,12 +450,12 @@ ClusteringResult _runLinearClustering(Map args) {
|
||||
}
|
||||
|
||||
stopwatchClustering.stop();
|
||||
log(
|
||||
' [ClusterIsolate] ${DateTime.now()} Clustering for ${sortedFaceInfos.length} embeddings executed in ${stopwatchClustering.elapsedMilliseconds}ms',
|
||||
_logger.info(
|
||||
'Clustering for ${sortedFaceInfos.length} embeddings executed in ${stopwatchClustering.elapsedMilliseconds}ms',
|
||||
);
|
||||
if (useDynamicThreshold) {
|
||||
log(
|
||||
"[ClusterIsolate] ${DateTime.now()} Dynamic thresholding: $dynamicThresholdCount faces had a low face score or low blur clarity",
|
||||
_logger.info(
|
||||
"Dynamic thresholding: $dynamicThresholdCount faces had a low face score or low blur clarity",
|
||||
);
|
||||
}
|
||||
|
||||
@@ -838,8 +720,8 @@ Map<String, (Uint8List, int)> _updateClusterSummaries({
|
||||
);
|
||||
}
|
||||
}
|
||||
log(
|
||||
"[ClusterIsolate] ${DateTime.now()} Calculated cluster summaries in ${DateTime.now().difference(calcSummariesStart).inMilliseconds}ms",
|
||||
_logger.info(
|
||||
"Calculated cluster summaries in ${DateTime.now().difference(calcSummariesStart).inMilliseconds}ms",
|
||||
);
|
||||
|
||||
return newClusterSummaries;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import 'dart:developer' as dev show log;
|
||||
import 'dart:math' as math show max, min;
|
||||
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
|
||||
@@ -33,9 +32,6 @@ List<FaceDetectionRelative> yoloOnnxFilterExtractDetections(
|
||||
maxScore = result[4];
|
||||
}
|
||||
}
|
||||
dev.log(
|
||||
'No face detections found above the minScoreSigmoidThreshold of $minScoreSigmoidThreshold. The max score was $maxScore.',
|
||||
);
|
||||
}
|
||||
|
||||
for (final List<double> rawDetection in output) {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import "dart:async";
|
||||
import "dart:developer" as dev show log;
|
||||
import 'dart:typed_data' show ByteData, Float32List;
|
||||
import 'dart:ui' as ui show Image;
|
||||
|
||||
@@ -55,9 +54,8 @@ class FaceDetectionService extends MlModel {
|
||||
'sessionAddress should be valid',
|
||||
);
|
||||
|
||||
final stopwatch = Stopwatch()..start();
|
||||
final startTime = DateTime.now();
|
||||
|
||||
final stopwatchPreprocessing = Stopwatch()..start();
|
||||
final (inputImageList, newSize) =
|
||||
await preprocessImageToFloat32ChannelsFirst(
|
||||
image,
|
||||
@@ -67,17 +65,11 @@ class FaceDetectionService extends MlModel {
|
||||
requiredHeight: kInputHeight,
|
||||
maintainAspectRatio: true,
|
||||
);
|
||||
stopwatchPreprocessing.stop();
|
||||
dev.log(
|
||||
'Face detection image preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms',
|
||||
);
|
||||
_logger.info(
|
||||
'Image decoding and preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms',
|
||||
);
|
||||
final preprocessingTime = DateTime.now();
|
||||
final preprocessingMs =
|
||||
preprocessingTime.difference(startTime).inMilliseconds;
|
||||
|
||||
// Run inference
|
||||
final stopwatchInterpreter = Stopwatch()..start();
|
||||
|
||||
List<List<List<double>>>? nestedResults = [];
|
||||
try {
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
@@ -88,23 +80,19 @@ class FaceDetectionService extends MlModel {
|
||||
inputImageList,
|
||||
); // [1, 25200, 16]
|
||||
}
|
||||
final inferenceTime = DateTime.now();
|
||||
final inferenceMs =
|
||||
inferenceTime.difference(preprocessingTime).inMilliseconds;
|
||||
_logger.info(
|
||||
'Face detection is finished, in ${inferenceTime.difference(startTime).inMilliseconds} ms (preprocessing: $preprocessingMs ms, inference: $inferenceMs ms)',
|
||||
);
|
||||
} catch (e, s) {
|
||||
dev.log('Error while running inference', error: e, stackTrace: s);
|
||||
_logger.severe('Error while running inference (PlatformPlugin: ${MlModel.usePlatformPlugin})', e, s);
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
try {
|
||||
_logger.info(
|
||||
'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms',
|
||||
);
|
||||
|
||||
final relativeDetections =
|
||||
_yoloPostProcessOutputs(nestedResults!, newSize);
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms',
|
||||
);
|
||||
|
||||
return relativeDetections;
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while post processing', e, s);
|
||||
@@ -135,9 +123,9 @@ class FaceDetectionService extends MlModel {
|
||||
outputs[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
|
||||
inputOrt.release();
|
||||
runOptions.release();
|
||||
outputs.forEach((element) {
|
||||
for (var element in outputs) {
|
||||
element?.release();
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -46,9 +46,11 @@ class FaceEmbeddingService extends MlModel {
|
||||
} else {
|
||||
return _runFFIBasedPredict(input, sessionAddress);
|
||||
}
|
||||
} catch (e) {
|
||||
_logger.info(
|
||||
'MobileFaceNet (PlatformPlugin: $MlModel.usePlatformPlugin)Error while running inference: $e',
|
||||
} catch (e, s) {
|
||||
_logger.severe(
|
||||
'Error while running inference (PlatformPlugin: ${MlModel.usePlatformPlugin})',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
throw MobileFaceNetInterpreterRunException();
|
||||
}
|
||||
@@ -58,8 +60,6 @@ class FaceEmbeddingService extends MlModel {
|
||||
Float32List input,
|
||||
int sessionAddress,
|
||||
) {
|
||||
final stopwatch = Stopwatch()..start();
|
||||
_logger.info('MobileFaceNet interpreter.run is called');
|
||||
final runOptions = OrtRunOptions();
|
||||
final int numberOfFaces = input.length ~/ (kInputSize * kInputSize * 3);
|
||||
final inputOrt = OrtValueTensor.createTensorWithDataList(
|
||||
@@ -76,11 +76,9 @@ class FaceEmbeddingService extends MlModel {
|
||||
}
|
||||
inputOrt.release();
|
||||
runOptions.release();
|
||||
outputs.forEach((element) => element?.release());
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
'MobileFaceNetFFI interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms',
|
||||
);
|
||||
for (var element in outputs) {
|
||||
element?.release();
|
||||
}
|
||||
|
||||
return embeddings;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import 'package:logging/logging.dart';
|
||||
import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart";
|
||||
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
|
||||
|
||||
class BlurDetectionService {
|
||||
static final _logger = Logger('BlurDetectionService');
|
||||
|
||||
// singleton pattern
|
||||
BlurDetectionService._privateConstructor();
|
||||
static final instance = BlurDetectionService._privateConstructor();
|
||||
@@ -18,7 +15,6 @@ class BlurDetectionService {
|
||||
final List<List<int>> laplacian =
|
||||
_applyLaplacian(grayImage, faceDirection: faceDirection);
|
||||
final double variance = _calculateVariance(laplacian);
|
||||
_logger.info('Variance: $variance');
|
||||
return (variance < threshold, variance);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import "dart:async" show unawaited;
|
||||
import "dart:developer" as dev show log;
|
||||
import "dart:typed_data" show ByteData, Float32List;
|
||||
import "dart:ui" show Image;
|
||||
|
||||
@@ -16,7 +15,7 @@ import "package:photos/services/machine_learning/ml_result.dart";
|
||||
import "package:photos/utils/image_ml_util.dart";
|
||||
|
||||
class FaceRecognitionService {
|
||||
final _logger = Logger("FaceRecognitionService");
|
||||
static final _logger = Logger("FaceRecognitionService");
|
||||
|
||||
// Singleton pattern
|
||||
FaceRecognitionService._privateConstructor();
|
||||
@@ -76,8 +75,6 @@ class FaceRecognitionService {
|
||||
int faceEmbeddingAddress,
|
||||
) async {
|
||||
final faceResults = <FaceResult>[];
|
||||
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
final startTime = DateTime.now();
|
||||
|
||||
// Get the faces
|
||||
@@ -89,19 +86,17 @@ class FaceRecognitionService {
|
||||
faceDetectionAddress,
|
||||
faceResults,
|
||||
);
|
||||
dev.log(
|
||||
"${faceDetectionResult.length} faces detected with scores ${faceDetectionResult.map((e) => e.score).toList()}: completed `detectFacesSync` function, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms");
|
||||
final detectFacesTime = DateTime.now();
|
||||
final detectFacesMs = detectFacesTime.difference(startTime).inMilliseconds;
|
||||
|
||||
// If no faces were detected, return a result with no faces. Otherwise, continue.
|
||||
if (faceDetectionResult.isEmpty) {
|
||||
dev.log(
|
||||
"No faceDetectionResult, Completed analyzing image with uploadedFileID $enteFileID, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms");
|
||||
_logger.info(
|
||||
"Finished runFacesPipeline with fileID $enteFileID in $detectFacesMs ms (${faceDetectionResult.length} faces, detectFaces: $detectFacesMs ms)",
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
stopwatch.reset();
|
||||
// Align the faces
|
||||
final Float32List faceAlignmentResult = await _alignFacesSync(
|
||||
image,
|
||||
@@ -109,23 +104,24 @@ class FaceRecognitionService {
|
||||
faceDetectionResult,
|
||||
faceResults,
|
||||
);
|
||||
dev.log("Completed `alignFacesSync` function, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms");
|
||||
final alignFacesTime = DateTime.now();
|
||||
final alignFacesMs =
|
||||
alignFacesTime.difference(detectFacesTime).inMilliseconds;
|
||||
|
||||
stopwatch.reset();
|
||||
// Get the embeddings of the faces
|
||||
final embeddings = await _embedFacesSync(
|
||||
await _embedFacesSync(
|
||||
faceAlignmentResult,
|
||||
faceEmbeddingAddress,
|
||||
faceResults,
|
||||
);
|
||||
dev.log("Completed `embedFacesSync` function, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms");
|
||||
stopwatch.stop();
|
||||
final embedFacesTime = DateTime.now();
|
||||
final embedFacesMs =
|
||||
embedFacesTime.difference(alignFacesTime).inMilliseconds;
|
||||
final totalMs = DateTime.now().difference(startTime).inMilliseconds;
|
||||
|
||||
dev.log("Finished faces pipeline (${embeddings.length} faces) with "
|
||||
"uploadedFileID $enteFileID, in "
|
||||
"${DateTime.now().difference(startTime).inMilliseconds} ms");
|
||||
_logger.info(
|
||||
"Finished runFacesPipeline with fileID $enteFileID in $totalMs ms (${faceDetectionResult.length} faces, detectFaces: $detectFacesMs ms, alignFaces: $alignFacesMs ms, embedFaces: $embedFacesMs ms)",
|
||||
);
|
||||
|
||||
return faceResults;
|
||||
}
|
||||
@@ -160,8 +156,8 @@ class FaceRecognitionService {
|
||||
return faces;
|
||||
} on YOLOFaceInterpreterRunException {
|
||||
throw CouldNotRunFaceDetector();
|
||||
} catch (e) {
|
||||
dev.log('[SEVERE] Face detection failed: $e');
|
||||
} catch (e, s) {
|
||||
_logger.severe('Face detection failed', e, s);
|
||||
throw GeneralFaceMlException('Face detection failed: $e');
|
||||
}
|
||||
}
|
||||
@@ -184,6 +180,9 @@ class FaceRecognitionService {
|
||||
|
||||
// Store the results
|
||||
if (alignmentResults.length != faces.length) {
|
||||
_logger.severe(
|
||||
"The amount of alignment results (${alignmentResults.length}) does not match the number of faces (${faces.length})",
|
||||
);
|
||||
throw Exception(
|
||||
"The amount of alignment results (${alignmentResults.length}) does not match the number of faces (${faces.length})",
|
||||
);
|
||||
@@ -195,7 +194,7 @@ class FaceRecognitionService {
|
||||
|
||||
return alignedFaces;
|
||||
} catch (e, s) {
|
||||
dev.log('[SEVERE] Face alignment failed: $e $s');
|
||||
_logger.severe('Face alignment failed: $e $s');
|
||||
throw CouldNotWarpAffine();
|
||||
}
|
||||
}
|
||||
@@ -214,6 +213,9 @@ class FaceRecognitionService {
|
||||
|
||||
// Store the results
|
||||
if (embeddings.length != faceResults.length) {
|
||||
_logger.severe(
|
||||
"The amount of embeddings (${embeddings.length}) does not match the number of faces (${faceResults.length})",
|
||||
);
|
||||
throw Exception(
|
||||
"The amount of embeddings (${embeddings.length}) does not match the number of faces (${faceResults.length})",
|
||||
);
|
||||
@@ -225,8 +227,8 @@ class FaceRecognitionService {
|
||||
return embeddings;
|
||||
} on MobileFaceNetInterpreterRunException {
|
||||
throw CouldNotRunFaceEmbeddor();
|
||||
} catch (e) {
|
||||
dev.log('[SEVERE] Face embedding (batch) failed: $e');
|
||||
} catch (e, s) {
|
||||
_logger.severe('Face embedding (batch) failed', e, s);
|
||||
throw GeneralFaceMlException('Face embedding (batch) failed: $e');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,142 +1,36 @@
|
||||
import 'dart:async';
|
||||
import "dart:io" show File;
|
||||
import 'dart:isolate';
|
||||
import 'dart:typed_data' show Uint8List;
|
||||
|
||||
import "package:dart_ui_isolate/dart_ui_isolate.dart";
|
||||
import "package:logging/logging.dart";
|
||||
import "package:photos/models/ml/face/box.dart";
|
||||
import "package:photos/services/machine_learning/ml_model.dart";
|
||||
import "package:photos/services/isolate_functions.dart";
|
||||
import "package:photos/services/isolate_service.dart";
|
||||
import "package:photos/services/machine_learning/semantic_search/clip/clip_text_encoder.dart";
|
||||
import "package:photos/services/machine_learning/semantic_search/clip/clip_text_tokenizer.dart";
|
||||
import "package:photos/services/remote_assets_service.dart";
|
||||
import "package:photos/utils/image_ml_util.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
enum MLComputerOperation {
|
||||
generateFaceThumbnails,
|
||||
loadModel,
|
||||
initializeClipTokenizer,
|
||||
runClipText,
|
||||
}
|
||||
|
||||
class MLComputer {
|
||||
class MLComputer extends SuperIsolate {
|
||||
@override
|
||||
Logger get logger => _logger;
|
||||
final _logger = Logger('MLComputer');
|
||||
|
||||
final _initLock = Lock();
|
||||
final _functionLock = Lock();
|
||||
final _initModelLock = Lock();
|
||||
|
||||
late ReceivePort _receivePort = ReceivePort();
|
||||
late SendPort _mainSendPort;
|
||||
@override
|
||||
bool get isDartUiIsolate => true;
|
||||
|
||||
bool isSpawned = false;
|
||||
@override
|
||||
String get isolateName => "MLComputerIsolate";
|
||||
|
||||
@override
|
||||
bool get shouldAutomaticDispose => false;
|
||||
|
||||
// Singleton pattern
|
||||
MLComputer._privateConstructor();
|
||||
static final MLComputer instance = MLComputer._privateConstructor();
|
||||
factory MLComputer() => instance;
|
||||
|
||||
Future<void> _init() async {
|
||||
return _initLock.synchronized(() async {
|
||||
if (isSpawned) return;
|
||||
|
||||
_receivePort = ReceivePort();
|
||||
|
||||
try {
|
||||
await DartUiIsolate.spawn(
|
||||
_isolateMain,
|
||||
_receivePort.sendPort,
|
||||
);
|
||||
_mainSendPort = await _receivePort.first as SendPort;
|
||||
isSpawned = true;
|
||||
} catch (e) {
|
||||
_logger.severe('Could not spawn isolate', e);
|
||||
isSpawned = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@pragma('vm:entry-point')
|
||||
static void _isolateMain(SendPort mainSendPort) async {
|
||||
final receivePort = ReceivePort();
|
||||
mainSendPort.send(receivePort.sendPort);
|
||||
|
||||
receivePort.listen((message) async {
|
||||
final functionIndex = message[0] as int;
|
||||
final function = MLComputerOperation.values[functionIndex];
|
||||
final args = message[1] as Map<String, dynamic>;
|
||||
final sendPort = message[2] as SendPort;
|
||||
|
||||
try {
|
||||
switch (function) {
|
||||
case MLComputerOperation.generateFaceThumbnails:
|
||||
final imagePath = args['imagePath'] as String;
|
||||
final Uint8List imageData = await File(imagePath).readAsBytes();
|
||||
final faceBoxesJson =
|
||||
args['faceBoxesList'] as List<Map<String, dynamic>>;
|
||||
final List<FaceBox> faceBoxes =
|
||||
faceBoxesJson.map((json) => FaceBox.fromJson(json)).toList();
|
||||
final List<Uint8List> results =
|
||||
await generateFaceThumbnailsUsingCanvas(
|
||||
imageData,
|
||||
faceBoxes,
|
||||
);
|
||||
sendPort.send(List.from(results));
|
||||
case MLComputerOperation.loadModel:
|
||||
final modelName = args['modelName'] as String;
|
||||
final modelPath = args['modelPath'] as String;
|
||||
final int address = await MlModel.loadModel(
|
||||
modelName,
|
||||
modelPath,
|
||||
);
|
||||
sendPort.send(address);
|
||||
break;
|
||||
case MLComputerOperation.initializeClipTokenizer:
|
||||
final vocabPath = args["vocabPath"] as String;
|
||||
await ClipTextTokenizer.instance.init(vocabPath);
|
||||
sendPort.send(true);
|
||||
break;
|
||||
case MLComputerOperation.runClipText:
|
||||
final textEmbedding = await ClipTextEncoder.predict(args);
|
||||
sendPort.send(List<double>.from(textEmbedding, growable: false));
|
||||
break;
|
||||
}
|
||||
} catch (e, stackTrace) {
|
||||
sendPort
|
||||
.send({'error': e.toString(), 'stackTrace': stackTrace.toString()});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result.
|
||||
Future<dynamic> _runInIsolate(
|
||||
(MLComputerOperation, Map<String, dynamic>) message,
|
||||
) async {
|
||||
await _init();
|
||||
return _functionLock.synchronized(() async {
|
||||
final completer = Completer<dynamic>();
|
||||
final answerPort = ReceivePort();
|
||||
|
||||
_mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]);
|
||||
|
||||
answerPort.listen((receivedMessage) {
|
||||
if (receivedMessage is Map && receivedMessage.containsKey('error')) {
|
||||
// Handle the error
|
||||
final errorMessage = receivedMessage['error'];
|
||||
final errorStackTrace = receivedMessage['stackTrace'];
|
||||
final exception = Exception(errorMessage);
|
||||
final stackTrace = StackTrace.fromString(errorStackTrace);
|
||||
completer.completeError(exception, stackTrace);
|
||||
} else {
|
||||
completer.complete(receivedMessage);
|
||||
}
|
||||
});
|
||||
|
||||
return completer.future;
|
||||
});
|
||||
}
|
||||
|
||||
/// Generates face thumbnails for all [faceBoxes] in [imageData].
|
||||
///
|
||||
/// Uses [generateFaceThumbnailsUsingCanvas] inside the isolate.
|
||||
@@ -146,14 +40,12 @@ class MLComputer {
|
||||
) async {
|
||||
final List<Map<String, dynamic>> faceBoxesJson =
|
||||
faceBoxes.map((box) => box.toJson()).toList();
|
||||
return await _runInIsolate(
|
||||
(
|
||||
MLComputerOperation.generateFaceThumbnails,
|
||||
{
|
||||
'imagePath': imagePath,
|
||||
'faceBoxesList': faceBoxesJson,
|
||||
},
|
||||
),
|
||||
return await runInIsolate(
|
||||
IsolateOperation.generateFaceThumbnails,
|
||||
{
|
||||
'imagePath': imagePath,
|
||||
'faceBoxesList': faceBoxesJson,
|
||||
},
|
||||
).then((value) => value.cast<Uint8List>());
|
||||
}
|
||||
|
||||
@@ -161,15 +53,10 @@ class MLComputer {
|
||||
try {
|
||||
await _ensureLoadedClipTextModel();
|
||||
final int clipAddress = ClipTextEncoder.instance.sessionAddress;
|
||||
final textEmbedding = await _runInIsolate(
|
||||
(
|
||||
MLComputerOperation.runClipText,
|
||||
{
|
||||
"text": query,
|
||||
"address": clipAddress,
|
||||
}
|
||||
),
|
||||
) as List<double>;
|
||||
final textEmbedding = await runInIsolate(IsolateOperation.runClipText, {
|
||||
"text": query,
|
||||
"address": clipAddress,
|
||||
}) as List<double>;
|
||||
return textEmbedding;
|
||||
} catch (e, s) {
|
||||
_logger.severe("Could not run clip text in isolate", e, s);
|
||||
@@ -186,27 +73,24 @@ class MLComputer {
|
||||
ClipTextEncoder.instance.vocabRemotePath;
|
||||
final String tokenizerVocabPath = await RemoteAssetsService.instance
|
||||
.getAssetPath(tokenizerRemotePath);
|
||||
await _runInIsolate(
|
||||
(
|
||||
MLComputerOperation.initializeClipTokenizer,
|
||||
{'vocabPath': tokenizerVocabPath},
|
||||
),
|
||||
await runInIsolate(
|
||||
IsolateOperation.initializeClipTokenizer,
|
||||
{'vocabPath': tokenizerVocabPath},
|
||||
);
|
||||
|
||||
// Load ClipText model
|
||||
final String modelName = ClipTextEncoder.instance.modelName;
|
||||
final String? modelPath = await ClipTextEncoder.instance.downloadModelSafe();
|
||||
final String? modelPath =
|
||||
await ClipTextEncoder.instance.downloadModelSafe();
|
||||
if (modelPath == null) {
|
||||
throw Exception("Could not download clip text model, no wifi");
|
||||
}
|
||||
final address = await _runInIsolate(
|
||||
(
|
||||
MLComputerOperation.loadModel,
|
||||
{
|
||||
'modelName': modelName,
|
||||
'modelPath': modelPath,
|
||||
},
|
||||
),
|
||||
final address = await runInIsolate(
|
||||
IsolateOperation.loadModel,
|
||||
{
|
||||
'modelName': modelName,
|
||||
'modelPath': modelPath,
|
||||
},
|
||||
) as int;
|
||||
ClipTextEncoder.instance.storeSessionAddress(address);
|
||||
} catch (e, s) {
|
||||
|
||||
@@ -1,36 +1,43 @@
|
||||
import "dart:async";
|
||||
import "dart:isolate";
|
||||
|
||||
import "package:dart_ui_isolate/dart_ui_isolate.dart";
|
||||
import "package:flutter/foundation.dart" show debugPrint, kDebugMode;
|
||||
import "package:flutter/foundation.dart" show debugPrint;
|
||||
import "package:logging/logging.dart";
|
||||
import "package:photos/core/error-reporting/super_logging.dart";
|
||||
import "package:photos/services/isolate_functions.dart";
|
||||
import "package:photos/services/isolate_service.dart";
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart';
|
||||
import "package:photos/services/machine_learning/ml_model.dart";
|
||||
import "package:photos/services/machine_learning/ml_models_overview.dart";
|
||||
import 'package:photos/services/machine_learning/ml_result.dart';
|
||||
import "package:photos/services/machine_learning/semantic_search/clip/clip_image_encoder.dart";
|
||||
import "package:photos/utils/ml_util.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
enum MLIndexingOperation { analyzeImage, loadModels, releaseModels }
|
||||
class MLIndexingIsolate extends SuperIsolate {
|
||||
@override
|
||||
Logger get logger => _logger;
|
||||
final _logger = Logger("MLIndexingIsolate");
|
||||
|
||||
class MLIndexingIsolate {
|
||||
static final _logger = Logger("MLIndexingIsolate");
|
||||
@override
|
||||
bool get isDartUiIsolate => true;
|
||||
|
||||
Timer? _inactivityTimer;
|
||||
final Duration _inactivityDuration = const Duration(seconds: 120);
|
||||
int _activeTasks = 0;
|
||||
@override
|
||||
String get isolateName => "MLIndexingIsolate";
|
||||
|
||||
final _functionLock = Lock();
|
||||
final _initIsolateLock = Lock();
|
||||
@override
|
||||
bool get shouldAutomaticDispose => true;
|
||||
|
||||
late DartUiIsolate _isolate;
|
||||
late ReceivePort _receivePort = ReceivePort();
|
||||
late SendPort _mainSendPort;
|
||||
@override
|
||||
Future<void> onDispose() async {
|
||||
await _releaseModels();
|
||||
}
|
||||
|
||||
bool _isIsolateSpawned = false;
|
||||
@override
|
||||
bool postFunctionlockStop(IsolateOperation operation) {
|
||||
if (operation == IsolateOperation.analyzeImage &&
|
||||
shouldPauseIndexingAndClustering) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool shouldPauseIndexingAndClustering = false;
|
||||
|
||||
@@ -39,178 +46,24 @@ class MLIndexingIsolate {
|
||||
static final instance = MLIndexingIsolate._privateConstructor();
|
||||
factory MLIndexingIsolate() => instance;
|
||||
|
||||
Future<void> _initIsolate() async {
|
||||
return _initIsolateLock.synchronized(() async {
|
||||
if (_isIsolateSpawned) return;
|
||||
_logger.info("initIsolate called");
|
||||
|
||||
_receivePort = ReceivePort();
|
||||
|
||||
try {
|
||||
_isolate = await DartUiIsolate.spawn(
|
||||
_isolateMain,
|
||||
_receivePort.sendPort,
|
||||
);
|
||||
_mainSendPort = await _receivePort.first as SendPort;
|
||||
_isIsolateSpawned = true;
|
||||
|
||||
_resetInactivityTimer();
|
||||
_logger.info('initIsolate done');
|
||||
} catch (e) {
|
||||
_logger.severe('Could not spawn isolate', e);
|
||||
_isIsolateSpawned = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// The main execution function of the isolate.
|
||||
@pragma('vm:entry-point')
|
||||
static void _isolateMain(SendPort mainSendPort) async {
|
||||
Logger.root.level = kDebugMode ? Level.ALL : Level.INFO;
|
||||
Logger.root.onRecord.listen((LogRecord rec) {
|
||||
debugPrint('[MLIsolate] ${rec.toPrettyString()}');
|
||||
});
|
||||
final receivePort = ReceivePort();
|
||||
mainSendPort.send(receivePort.sendPort);
|
||||
receivePort.listen((message) async {
|
||||
final functionIndex = message[0] as int;
|
||||
final function = MLIndexingOperation.values[functionIndex];
|
||||
final args = message[1] as Map<String, dynamic>;
|
||||
final sendPort = message[2] as SendPort;
|
||||
|
||||
try {
|
||||
switch (function) {
|
||||
case MLIndexingOperation.analyzeImage:
|
||||
final time = DateTime.now();
|
||||
final MLResult result = await analyzeImageStatic(args);
|
||||
_logger.info(
|
||||
"`analyzeImageSync` function executed in ${DateTime.now().difference(time).inMilliseconds} ms",
|
||||
);
|
||||
sendPort.send(result.toJsonString());
|
||||
break;
|
||||
case MLIndexingOperation.loadModels:
|
||||
final modelNames = args['modelNames'] as List<String>;
|
||||
final modelPaths = args['modelPaths'] as List<String>;
|
||||
final addresses = <int>[];
|
||||
for (int i = 0; i < modelNames.length; i++) {
|
||||
final int address = await MlModel.loadModel(
|
||||
modelNames[i],
|
||||
modelPaths[i],
|
||||
);
|
||||
addresses.add(address);
|
||||
}
|
||||
sendPort.send(List<int>.from(addresses, growable: false));
|
||||
break;
|
||||
case MLIndexingOperation.releaseModels:
|
||||
final modelNames = args['modelNames'] as List<String>;
|
||||
final modelAddresses = args['modelAddresses'] as List<int>;
|
||||
for (int i = 0; i < modelNames.length; i++) {
|
||||
await MlModel.releaseModel(
|
||||
modelNames[i],
|
||||
modelAddresses[i],
|
||||
);
|
||||
}
|
||||
sendPort.send(true);
|
||||
break;
|
||||
}
|
||||
} catch (e, s) {
|
||||
_logger.severe("Error in FaceML isolate", e, s);
|
||||
sendPort.send({'error': e.toString(), 'stackTrace': s.toString()});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result.
|
||||
Future<dynamic> _runInIsolate(
|
||||
(MLIndexingOperation, Map<String, dynamic>) message,
|
||||
) async {
|
||||
await _initIsolate();
|
||||
return _functionLock.synchronized(() async {
|
||||
_resetInactivityTimer();
|
||||
|
||||
if (message.$1 == MLIndexingOperation.analyzeImage &&
|
||||
shouldPauseIndexingAndClustering) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final completer = Completer<dynamic>();
|
||||
final answerPort = ReceivePort();
|
||||
|
||||
_activeTasks++;
|
||||
_mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]);
|
||||
|
||||
answerPort.listen((receivedMessage) {
|
||||
if (receivedMessage is Map && receivedMessage.containsKey('error')) {
|
||||
// Handle the error
|
||||
final errorMessage = receivedMessage['error'];
|
||||
final errorStackTrace = receivedMessage['stackTrace'];
|
||||
final exception = Exception(errorMessage);
|
||||
final stackTrace = StackTrace.fromString(errorStackTrace);
|
||||
completer.completeError(exception, stackTrace);
|
||||
} else {
|
||||
completer.complete(receivedMessage);
|
||||
}
|
||||
});
|
||||
_activeTasks--;
|
||||
|
||||
return completer.future;
|
||||
});
|
||||
}
|
||||
|
||||
/// Resets a timer that kills the isolate after a certain amount of inactivity.
|
||||
///
|
||||
/// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`)
|
||||
void _resetInactivityTimer() {
|
||||
_inactivityTimer?.cancel();
|
||||
_inactivityTimer = Timer(_inactivityDuration, () {
|
||||
if (_activeTasks > 0) {
|
||||
_logger.info('Tasks are still running. Delaying isolate disposal.');
|
||||
// Optionally, reschedule the timer to check again later.
|
||||
_resetInactivityTimer();
|
||||
} else {
|
||||
_logger.info(
|
||||
'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
|
||||
);
|
||||
_dispose();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void _dispose() async {
|
||||
if (!_isIsolateSpawned) return;
|
||||
_logger.info('Disposing isolate and models');
|
||||
await _releaseModels();
|
||||
_isIsolateSpawned = false;
|
||||
_isolate.kill();
|
||||
_receivePort.close();
|
||||
_inactivityTimer?.cancel();
|
||||
}
|
||||
|
||||
/// Analyzes the given image data by running the full pipeline for faces, using [_analyzeImageSync] in the isolate.
|
||||
/// Analyzes the given image data by running the full pipeline for faces, using [analyzeImageStatic] in the isolate.
|
||||
Future<MLResult?> analyzeImage(
|
||||
FileMLInstruction instruction,
|
||||
String filePath,
|
||||
) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
late MLResult result;
|
||||
|
||||
try {
|
||||
final resultJsonString = await _runInIsolate(
|
||||
(
|
||||
MLIndexingOperation.analyzeImage,
|
||||
{
|
||||
"enteFileID": instruction.file.uploadedFileID ?? -1,
|
||||
"filePath": filePath,
|
||||
"runFaces": instruction.shouldRunFaces,
|
||||
"runClip": instruction.shouldRunClip,
|
||||
"faceDetectionAddress":
|
||||
FaceDetectionService.instance.sessionAddress,
|
||||
"faceEmbeddingAddress":
|
||||
FaceEmbeddingService.instance.sessionAddress,
|
||||
"clipImageAddress": ClipImageEncoder.instance.sessionAddress,
|
||||
}
|
||||
),
|
||||
) as String?;
|
||||
final resultJsonString =
|
||||
await runInIsolate(IsolateOperation.analyzeImage, {
|
||||
"enteFileID": instruction.file.uploadedFileID ?? -1,
|
||||
"filePath": filePath,
|
||||
"runFaces": instruction.shouldRunFaces,
|
||||
"runClip": instruction.shouldRunClip,
|
||||
"faceDetectionAddress": FaceDetectionService.instance.sessionAddress,
|
||||
"faceEmbeddingAddress": FaceEmbeddingService.instance.sessionAddress,
|
||||
"clipImageAddress": ClipImageEncoder.instance.sessionAddress,
|
||||
}) as String?;
|
||||
if (resultJsonString == null) {
|
||||
if (!shouldPauseIndexingAndClustering) {
|
||||
_logger.severe('Analyzing image in isolate is giving back null');
|
||||
@@ -225,15 +78,10 @@ class MLIndexingIsolate {
|
||||
s,
|
||||
);
|
||||
debugPrint(
|
||||
"This image with ID ${instruction.file.uploadedFileID} has name ${instruction.file.displayName}.",
|
||||
"This image with fileID ${instruction.file.uploadedFileID} has name ${instruction.file.displayName}.",
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
"Finished Analyze image with uploadedFileID ${instruction.file.uploadedFileID}, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms (including time waiting for inference engine availability)",
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -265,15 +113,11 @@ class MLIndexingIsolate {
|
||||
}
|
||||
|
||||
try {
|
||||
final addresses = await _runInIsolate(
|
||||
(
|
||||
MLIndexingOperation.loadModels,
|
||||
{
|
||||
"modelNames": modelNames,
|
||||
"modelPaths": modelPaths,
|
||||
}
|
||||
),
|
||||
) as List<int>;
|
||||
final addresses =
|
||||
await runInIsolate(IsolateOperation.loadIndexingModels, {
|
||||
"modelNames": modelNames,
|
||||
"modelPaths": modelPaths,
|
||||
}) as List<int>;
|
||||
for (int i = 0; i < models.length; i++) {
|
||||
final model = models[i].model;
|
||||
final address = addresses[i];
|
||||
@@ -300,15 +144,10 @@ class MLIndexingIsolate {
|
||||
}
|
||||
if (modelNames.isEmpty) return;
|
||||
try {
|
||||
await _runInIsolate(
|
||||
(
|
||||
MLIndexingOperation.releaseModels,
|
||||
{
|
||||
"modelNames": modelNames,
|
||||
"modelAddresses": modelAddresses,
|
||||
}
|
||||
),
|
||||
);
|
||||
await runInIsolate(IsolateOperation.releaseIndexingModels, {
|
||||
"modelNames": modelNames,
|
||||
"modelAddresses": modelAddresses,
|
||||
});
|
||||
for (final model in models) {
|
||||
model.model.releaseSessionAddress();
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import "package:photos/utils/network_util.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
abstract class MlModel {
|
||||
static final Logger isolateLogger = Logger("MlModelInIsolate");
|
||||
static final Logger isolateLogger = Logger("MlModel");
|
||||
Logger get logger;
|
||||
|
||||
String get kModelBucketEndpoint => "https://models.ente.io/";
|
||||
@@ -95,10 +95,28 @@ abstract class MlModel {
|
||||
String modelName,
|
||||
String modelPath,
|
||||
) async {
|
||||
if (usePlatformPlugin) {
|
||||
return await _loadModelWithPlatformPlugin(modelName, modelPath);
|
||||
} else {
|
||||
return await _loadModelWithFFI(modelName, modelPath);
|
||||
isolateLogger
|
||||
.info('Start loading $modelName (platformPlugin: $usePlatformPlugin)');
|
||||
final time = DateTime.now();
|
||||
try {
|
||||
late int result;
|
||||
if (usePlatformPlugin) {
|
||||
result = await _loadModelWithPlatformPlugin(modelName, modelPath);
|
||||
} else {
|
||||
result = await _loadModelWithFFI(modelName, modelPath);
|
||||
}
|
||||
final timeMs = DateTime.now().difference(time).inMilliseconds;
|
||||
isolateLogger.info(
|
||||
"$modelName model loaded in $timeMs ms (platformPlugin: $usePlatformPlugin)",
|
||||
);
|
||||
return result;
|
||||
} catch (e, s) {
|
||||
isolateLogger.severe(
|
||||
"Failed to load model $modelName (platformPlugin: $usePlatformPlugin)",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,18 +124,12 @@ abstract class MlModel {
|
||||
String modelName,
|
||||
String modelPath,
|
||||
) async {
|
||||
final startTime = DateTime.now();
|
||||
isolateLogger.info('Initializing $modelName with EntePlugin');
|
||||
final OnnxDart plugin = OnnxDart();
|
||||
final bool? initResult = await plugin.init(modelName, modelPath);
|
||||
if (initResult == null || !initResult) {
|
||||
isolateLogger.severe("Failed to initialize $modelName with EntePlugin.");
|
||||
throw Exception("Failed to initialize $modelName with EntePlugin.");
|
||||
}
|
||||
final endTime = DateTime.now();
|
||||
isolateLogger.info(
|
||||
"$modelName loaded via EntePlugin in ${endTime.difference(startTime).inMilliseconds}ms",
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -125,10 +137,8 @@ abstract class MlModel {
|
||||
String modelName,
|
||||
String modelPath,
|
||||
) async {
|
||||
isolateLogger.info('Initializing $modelName with FFI');
|
||||
ONNXEnvFFI.instance.initONNX(modelName);
|
||||
try {
|
||||
final startTime = DateTime.now();
|
||||
final sessionOptions = OrtSessionOptions()
|
||||
..setInterOpNumThreads(1)
|
||||
..setIntraOpNumThreads(1)
|
||||
@@ -136,21 +146,26 @@ abstract class MlModel {
|
||||
GraphOptimizationLevel.ortEnableAll,
|
||||
);
|
||||
final session = OrtSession.fromFile(File(modelPath), sessionOptions);
|
||||
final endTime = DateTime.now();
|
||||
isolateLogger.info(
|
||||
"$modelName loaded with FFI, took: ${endTime.difference(startTime).inMilliseconds}ms",
|
||||
);
|
||||
return session.address;
|
||||
} catch (e) {
|
||||
} catch (e, s) {
|
||||
isolateLogger.severe("Failed to load model $modelName with FFI", e, s);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
static Future<void> releaseModel(String modelName, int sessionAddress) async {
|
||||
if (usePlatformPlugin) {
|
||||
await _releaseModelWithPlatformPlugin(modelName);
|
||||
} else {
|
||||
await _releaseModelWithFFI(modelName, sessionAddress);
|
||||
try {
|
||||
if (usePlatformPlugin) {
|
||||
await _releaseModelWithPlatformPlugin(modelName);
|
||||
} else {
|
||||
await _releaseModelWithFFI(modelName, sessionAddress);
|
||||
}
|
||||
} catch (e, s) {
|
||||
isolateLogger.severe(
|
||||
"Failed to release model $modelName (platformPlugin: $usePlatformPlugin)",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,7 +173,6 @@ abstract class MlModel {
|
||||
final OnnxDart plugin = OnnxDart();
|
||||
final bool? initResult = await plugin.release(modelName);
|
||||
if (initResult == null || !initResult) {
|
||||
isolateLogger.severe("Failed to release $modelName with PlatformPlugin.");
|
||||
throw Exception("Failed to release $modelName with PlatformPlugin.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -385,9 +385,6 @@ class MLService {
|
||||
}
|
||||
|
||||
Future<bool> processImage(FileMLInstruction instruction) async {
|
||||
_logger.info(
|
||||
"`processImage` start processing image with uploadedFileID: ${instruction.file.uploadedFileID}",
|
||||
);
|
||||
bool actuallyRanML = false;
|
||||
|
||||
try {
|
||||
@@ -420,7 +417,6 @@ class MLService {
|
||||
if (result.facesRan) {
|
||||
if (result.faces!.isEmpty) {
|
||||
faces.add(Face.empty(result.fileId));
|
||||
_logger.info("no face detected, storing empty for ${result.fileId}");
|
||||
}
|
||||
if (result.faces!.isNotEmpty) {
|
||||
for (int i = 0; i < result.faces!.length; ++i) {
|
||||
@@ -432,7 +428,6 @@ class MLService {
|
||||
),
|
||||
);
|
||||
}
|
||||
_logger.info("storing ${faces.length} faces for ${result.fileId}");
|
||||
}
|
||||
dataEntity.putFace(
|
||||
RemoteFaceEmbedding(
|
||||
@@ -459,7 +454,7 @@ class MLService {
|
||||
instruction.file,
|
||||
dataEntity,
|
||||
);
|
||||
_logger.info("Results for file ${result.fileId} stored on remote");
|
||||
_logger.info("ML results for fileID ${result.fileId} stored on remote");
|
||||
// Storing results locally
|
||||
if (result.facesRan) await MLDataDB.instance.bulkInsertFaces(faces);
|
||||
if (result.clipRan) {
|
||||
@@ -467,7 +462,7 @@ class MLService {
|
||||
result.clip!,
|
||||
);
|
||||
}
|
||||
_logger.info("Results for file ${result.fileId} stored locally");
|
||||
_logger.info("ML results for fileID ${result.fileId} stored locally");
|
||||
return actuallyRanML;
|
||||
} catch (e, s) {
|
||||
final String errorString = e.toString();
|
||||
@@ -480,7 +475,7 @@ class MLService {
|
||||
errorString.contains('FileSizeTooLargeForMobileIndexing');
|
||||
if (acceptedIssue) {
|
||||
_logger.severe(
|
||||
'$errorString with ID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size), storing empty results so indexing does not get stuck',
|
||||
'$errorString for fileID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size), storing empty results so indexing does not get stuck',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
@@ -493,7 +488,7 @@ class MLService {
|
||||
return true;
|
||||
}
|
||||
_logger.severe(
|
||||
"Failed to index file with ID: ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size). Not storing any results locally, which means it will be automatically retried later.",
|
||||
"Failed to index file for fileID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size). Not storing any results locally, which means it will be automatically retried later.",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,6 @@ import "dart:ui" show Image;
|
||||
import "package:logging/logging.dart";
|
||||
import "package:onnx_dart/onnx_dart.dart";
|
||||
import "package:onnxruntime/onnxruntime.dart";
|
||||
import "package:photos/extensions/stop_watch.dart";
|
||||
import "package:photos/services/machine_learning/ml_model.dart";
|
||||
import "package:photos/utils/image_ml_util.dart";
|
||||
import "package:photos/utils/ml_util.dart";
|
||||
@@ -31,21 +30,42 @@ class ClipImageEncoder extends MlModel {
|
||||
static Future<List<double>> predict(
|
||||
Image image,
|
||||
ByteData imageByteData,
|
||||
int sessionAddress,
|
||||
) async {
|
||||
int sessionAddress, [
|
||||
int? enteFileID,
|
||||
]) async {
|
||||
final startTime = DateTime.now();
|
||||
final inputList = await preprocessImageClip(image, imageByteData);
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
return await _runPlatformPluginPredict(inputList);
|
||||
} else {
|
||||
return _runFFIBasedPredict(inputList, sessionAddress);
|
||||
final preprocessingTime = DateTime.now();
|
||||
final preprocessingMs =
|
||||
preprocessingTime.difference(startTime).inMilliseconds;
|
||||
late List<double> result;
|
||||
try {
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
result = await _runPlatformPluginPredict(inputList);
|
||||
} else {
|
||||
result = _runFFIBasedPredict(inputList, sessionAddress);
|
||||
}
|
||||
} catch (e, stackTrace) {
|
||||
_logger.severe(
|
||||
"Clip image inference failed${enteFileID != null ? " with fileID $enteFileID" : ""} (PlatformPlugin: ${MlModel.usePlatformPlugin})",
|
||||
e,
|
||||
stackTrace,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
final inferTime = DateTime.now();
|
||||
final inferenceMs = inferTime.difference(preprocessingTime).inMilliseconds;
|
||||
final totalMs = inferTime.difference(startTime).inMilliseconds;
|
||||
_logger.info(
|
||||
"Clip image predict took $totalMs ms${enteFileID != null ? " with fileID $enteFileID" : ""} (inference: $inferenceMs ms, preprocessing: $preprocessingMs ms)",
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
static List<double> _runFFIBasedPredict(
|
||||
Float32List inputList,
|
||||
int sessionAddress,
|
||||
) {
|
||||
final w = EnteWatch("ClipImageEncoder._runFFIBasedPredict")..start();
|
||||
final inputOrt =
|
||||
OrtValueTensor.createTensorWithDataList(inputList, [1, 3, 256, 256]);
|
||||
final inputs = {'input': inputOrt};
|
||||
@@ -59,14 +79,12 @@ class ClipImageEncoder extends MlModel {
|
||||
element?.release();
|
||||
}
|
||||
normalizeEmbedding(embedding);
|
||||
w.stopWithLog("done");
|
||||
return embedding;
|
||||
}
|
||||
|
||||
static Future<List<double>> _runPlatformPluginPredict(
|
||||
Float32List inputImageList,
|
||||
) async {
|
||||
final w = EnteWatch("ClipImageEncoder._runEntePlugin")..start();
|
||||
final OnnxDart plugin = OnnxDart();
|
||||
final result = await plugin.predict(
|
||||
inputImageList,
|
||||
@@ -74,7 +92,6 @@ class ClipImageEncoder extends MlModel {
|
||||
);
|
||||
final List<double> embedding = result!.sublist(0, 512);
|
||||
normalizeEmbedding(embedding);
|
||||
w.stopWithLog("done");
|
||||
return embedding;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,12 +34,32 @@ class ClipTextEncoder extends MlModel {
|
||||
static Future<List<double>> predict(Map args) async {
|
||||
final text = args["text"] as String;
|
||||
final address = args["address"] as int;
|
||||
final startTime = DateTime.now();
|
||||
final List<int> tokenize = await ClipTextTokenizer.instance.tokenize(text);
|
||||
final int32list = Int32List.fromList(tokenize);
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
return await _runPlatformPluginPredict(int32list);
|
||||
} else {
|
||||
return _runFFIBasedPredict(int32list, address);
|
||||
final tokenizeTime = DateTime.now();
|
||||
final tokenizeMs = tokenizeTime.difference(startTime).inMilliseconds;
|
||||
try {
|
||||
late List<double> embedding;
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
embedding = await _runPlatformPluginPredict(int32list);
|
||||
} else {
|
||||
embedding = _runFFIBasedPredict(int32list, address);
|
||||
}
|
||||
final inferTime = DateTime.now();
|
||||
final inferMs = inferTime.difference(tokenizeTime).inMilliseconds;
|
||||
final totalMs = inferTime.difference(startTime).inMilliseconds;
|
||||
_logger.info(
|
||||
"Clip text predict took $totalMs ms (predict: $inferMs ms, tokenize: $tokenizeMs ms) for text: '$text'",
|
||||
);
|
||||
return embedding;
|
||||
} catch (e, s) {
|
||||
_logger.severe(
|
||||
"Clip text inference failed (PlatformPlugin: ${MlModel.usePlatformPlugin})",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +76,9 @@ class ClipTextEncoder extends MlModel {
|
||||
final embedding = (outputs[0]?.value as List<List<double>>)[0];
|
||||
inputOrt.release();
|
||||
runOptions.release();
|
||||
outputs.forEach((element) => element?.release());
|
||||
for (var element in outputs) {
|
||||
element?.release();
|
||||
}
|
||||
normalizeEmbedding(embedding);
|
||||
return embedding;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,12 @@ import "dart:io" show File;
|
||||
import "dart:math";
|
||||
|
||||
import "package:html_unescape/html_unescape.dart";
|
||||
import "package:logging/logging.dart";
|
||||
import "package:tuple/tuple.dart";
|
||||
|
||||
class ClipTextTokenizer {
|
||||
final _logger = Logger("ClipTextTokenizer");
|
||||
|
||||
static const int totalTokens = 77;
|
||||
|
||||
late String vocabulary;
|
||||
@@ -75,6 +78,7 @@ class ClipTextTokenizer {
|
||||
|
||||
sot = encoder['<|startoftext|>']!;
|
||||
eot = encoder['<|endoftext|>']!;
|
||||
_logger.info("Clip text tokenizer initialized");
|
||||
_isInitialized = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import "package:photos/services/machine_learning/semantic_search/clip/clip_image
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
class SemanticSearchService {
|
||||
final _logger = Logger("SemanticSearchService");
|
||||
static final _logger = Logger("SemanticSearchService");
|
||||
SemanticSearchService._privateConstructor();
|
||||
|
||||
static final SemanticSearchService instance =
|
||||
@@ -293,18 +293,15 @@ class SemanticSearchService {
|
||||
ByteData imageByteData,
|
||||
int clipImageAddress,
|
||||
) async {
|
||||
final startTime = DateTime.now();
|
||||
final embedding = await ClipImageEncoder.predict(
|
||||
image,
|
||||
imageByteData,
|
||||
clipImageAddress,
|
||||
enteFileID,
|
||||
);
|
||||
|
||||
final clipResult = ClipResult(fileID: enteFileID, embedding: embedding);
|
||||
|
||||
dev.log('Finished running ClipImage for $enteFileID in '
|
||||
'${DateTime.now().difference(startTime).inMilliseconds} ms');
|
||||
|
||||
return clipResult;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,26 +29,27 @@ Future<(Image, ByteData)> decodeImageFromPath(String imagePath) async {
|
||||
return (image, imageByteData);
|
||||
} catch (e, s) {
|
||||
final format = imagePath.split('.').last;
|
||||
if ((format == 'heic' || format == 'heif') && Platform.isAndroid) {
|
||||
_logger.info('Cannot decode $format, converting to JPG format');
|
||||
if (Platform.isAndroid) {
|
||||
_logger.info('Cannot decode $format, converting to JPG on Android');
|
||||
final String? jpgPath =
|
||||
await HeifConverter.convert(imagePath, format: 'jpg');
|
||||
if (jpgPath == null) {
|
||||
_logger.severe('Error converting $format to jpg:', e, s);
|
||||
throw Exception('InvalidImageFormatException: Error decoding image of format $format');
|
||||
if (jpgPath != null) {
|
||||
_logger.info('Conversion successful, decoding JPG');
|
||||
final imageData = await File(jpgPath).readAsBytes();
|
||||
final image = await decodeImageFromData(imageData);
|
||||
final ByteData imageByteData = await getByteDataFromImage(image);
|
||||
return (image, imageByteData);
|
||||
}
|
||||
final imageData = await File(jpgPath).readAsBytes();
|
||||
final image = await decodeImageFromData(imageData);
|
||||
final ByteData imageByteData = await getByteDataFromImage(image);
|
||||
return (image, imageByteData);
|
||||
} else {
|
||||
_logger.severe(
|
||||
'Error decoding image of format $format:',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
throw Exception('InvalidImageFormatException: Error decoding image of format $format');
|
||||
_logger.info('Unable to convert $format to JPG');
|
||||
}
|
||||
_logger.severe(
|
||||
'Error decoding image of format $format (Android: ${Platform.isAndroid})',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
throw Exception(
|
||||
'InvalidImageFormatException: Error decoding image of format $format',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,10 +106,9 @@ Future<List<Uint8List>> generateFaceThumbnailsUsingCanvas(
|
||||
Uint8List imageData,
|
||||
List<FaceBox> faceBoxes,
|
||||
) async {
|
||||
final Image img = await decodeImageFromData(imageData);
|
||||
int i = 0;
|
||||
|
||||
int i = 0; // Index of the faceBox, initialized here for logging purposes
|
||||
try {
|
||||
final Image img = await decodeImageFromData(imageData);
|
||||
final futureFaceThumbnails = <Future<Uint8List>>[];
|
||||
for (final faceBox in faceBoxes) {
|
||||
// Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first
|
||||
@@ -149,9 +149,12 @@ Future<List<Uint8List>> generateFaceThumbnailsUsingCanvas(
|
||||
final List<Uint8List> faceThumbnails =
|
||||
await Future.wait(futureFaceThumbnails);
|
||||
return faceThumbnails;
|
||||
} catch (e) {
|
||||
log('[ImageMlUtils] Error generating face thumbnails: $e');
|
||||
log('[ImageMlUtils] cropImage problematic input argument: ${faceBoxes[i]}');
|
||||
} catch (e, s) {
|
||||
_logger.severe(
|
||||
'Error generating face thumbnails. cropImage problematic input argument: ${faceBoxes[i]}',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -255,8 +258,6 @@ Future<(Float32List, List<AlignmentResult>, List<bool>, List<double>, Size)>
|
||||
int width = 112,
|
||||
int height = 112,
|
||||
}) async {
|
||||
final stopwatch = Stopwatch()..start();
|
||||
|
||||
final Size originalSize =
|
||||
Size(image.width.toDouble(), image.height.toDouble());
|
||||
|
||||
@@ -278,7 +279,9 @@ Future<(Float32List, List<AlignmentResult>, List<bool>, List<double>, Size)>
|
||||
final (alignmentResult, correctlyEstimated) =
|
||||
SimilarityTransform.estimate(face.allKeypoints);
|
||||
if (!correctlyEstimated) {
|
||||
log('Face alignment failed because not able to estimate SimilarityTransform, for face: $face');
|
||||
_logger.severe(
|
||||
'Face alignment failed because not able to estimate SimilarityTransform, for face: $face',
|
||||
);
|
||||
throw Exception(
|
||||
'Face alignment failed because not able to estimate SimilarityTransform',
|
||||
);
|
||||
@@ -293,31 +296,20 @@ Future<(Float32List, List<AlignmentResult>, List<bool>, List<double>, Size)>
|
||||
alignedImageIndex,
|
||||
);
|
||||
|
||||
final blurDetectionStopwatch = Stopwatch()..start();
|
||||
final faceGrayMatrix = _createGrayscaleIntMatrixFromNormalized2List(
|
||||
alignedImagesFloat32List,
|
||||
alignedImageIndex,
|
||||
);
|
||||
|
||||
alignedImageIndex += 3 * width * height;
|
||||
final grayscalems = blurDetectionStopwatch.elapsedMilliseconds;
|
||||
log('creating grayscale matrix took $grayscalems ms');
|
||||
final (isBlur, blurValue) =
|
||||
await BlurDetectionService.predictIsBlurGrayLaplacian(
|
||||
faceGrayMatrix,
|
||||
faceDirection: face.getFaceDirection(),
|
||||
);
|
||||
final blurms = blurDetectionStopwatch.elapsedMilliseconds - grayscalems;
|
||||
log('blur detection took $blurms ms');
|
||||
log(
|
||||
'total blur detection took ${blurDetectionStopwatch.elapsedMilliseconds} ms',
|
||||
);
|
||||
blurDetectionStopwatch.stop();
|
||||
isBlurs.add(isBlur);
|
||||
blurValues.add(blurValue);
|
||||
}
|
||||
stopwatch.stop();
|
||||
log("Face Alignment took: ${stopwatch.elapsedMilliseconds} ms");
|
||||
return (
|
||||
alignedImagesFloat32List,
|
||||
alignmentResults,
|
||||
|
||||
@@ -332,7 +332,7 @@ Future<String> getImagePathForML(EnteFile enteFile) async {
|
||||
imagePath = file?.path;
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
"Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
|
||||
"Getting file data for fileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
|
||||
);
|
||||
|
||||
if (imagePath == null) {
|
||||
@@ -388,18 +388,18 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
final int clipImageAddress = args["clipImageAddress"] as int;
|
||||
|
||||
_logger.info(
|
||||
"Start analyzing image with uploadedFileID: $enteFileID inside the isolate",
|
||||
"Start analyzeImageStatic for fileID $enteFileID (runFaces: $runFaces, runClip: $runClip)",
|
||||
);
|
||||
final time = DateTime.now();
|
||||
final startTime = DateTime.now();
|
||||
|
||||
// Decode the image once to use for both face detection and alignment
|
||||
final (image, imageByteData) = await decodeImageFromPath(imagePath);
|
||||
_logger.info('Reading and decoding image took '
|
||||
'${DateTime.now().difference(time).inMilliseconds} ms');
|
||||
final decodedImageSize =
|
||||
Dimensions(height: image.height, width: image.width);
|
||||
final result = MLResult.fromEnteFileID(enteFileID);
|
||||
result.decodedImageSize = decodedImageSize;
|
||||
final decodeTime = DateTime.now();
|
||||
final decodeMs = decodeTime.difference(startTime).inMilliseconds;
|
||||
|
||||
if (runFaces) {
|
||||
final resultFaces = await FaceRecognitionService.runFacesPipeline(
|
||||
@@ -415,6 +415,9 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
result.faces = resultFaces;
|
||||
}
|
||||
}
|
||||
final facesTime = DateTime.now();
|
||||
final facesMs = facesTime.difference(decodeTime).inMilliseconds;
|
||||
final faceMsString = runFaces ? ", faces: $facesMs ms" : "";
|
||||
|
||||
if (runClip) {
|
||||
final clipResult = await SemanticSearchService.runClipImage(
|
||||
@@ -425,6 +428,15 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
);
|
||||
result.clip = clipResult;
|
||||
}
|
||||
final clipTime = DateTime.now();
|
||||
final clipMs = clipTime.difference(facesTime).inMilliseconds;
|
||||
final clipMsString = runClip ? ", clip: $clipMs ms" : "";
|
||||
|
||||
final endTime = DateTime.now();
|
||||
final totalMs = endTime.difference(startTime).inMilliseconds;
|
||||
_logger.info(
|
||||
'Finished analyzeImageStatic for fileID $enteFileID, in $totalMs ms (decode: $decodeMs ms$faceMsString$clipMsString)',
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (e, s) {
|
||||
|
||||
@@ -201,7 +201,7 @@ class OnnxDartPlugin: FlutterPlugin, MethodCallHandler {
|
||||
inputs["input"] = inputTensor
|
||||
}
|
||||
val outputs = session.run(inputs)
|
||||
Log.d(TAG, "Output shape: ${outputs.size()}")
|
||||
// Log.d(TAG, "Output shape: ${outputs.size()}")
|
||||
if (modelType == ModelType.YOLOv5Face) {
|
||||
val outputTensor = (outputs[0].value as Array<Array<FloatArray>>).get(0)
|
||||
val flatList = outputTensor.flattenToFloatArray()
|
||||
|
||||
Reference in New Issue
Block a user