[mob][photos] Cleaner logging
This commit is contained in:
@@ -1,4 +1,3 @@
|
||||
import 'dart:developer' as dev show log;
|
||||
import 'dart:math' as math show max, min;
|
||||
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
|
||||
@@ -33,9 +32,6 @@ List<FaceDetectionRelative> yoloOnnxFilterExtractDetections(
|
||||
maxScore = result[4];
|
||||
}
|
||||
}
|
||||
dev.log(
|
||||
'No face detections found above the minScoreSigmoidThreshold of $minScoreSigmoidThreshold. The max score was $maxScore.',
|
||||
);
|
||||
}
|
||||
|
||||
for (final List<double> rawDetection in output) {
|
||||
|
||||
@@ -66,9 +66,8 @@ class FaceDetectionService extends MlModel {
|
||||
maintainAspectRatio: true,
|
||||
);
|
||||
final preprocessingTime = DateTime.now();
|
||||
_logger.info(
|
||||
'Face detection preprocessing is finished, in ${preprocessingTime.difference(startTime).inMilliseconds} ms',
|
||||
);
|
||||
final preprocessingMs =
|
||||
preprocessingTime.difference(startTime).inMilliseconds;
|
||||
|
||||
// Run inference
|
||||
List<List<List<double>>>? nestedResults = [];
|
||||
@@ -81,11 +80,14 @@ class FaceDetectionService extends MlModel {
|
||||
inputImageList,
|
||||
); // [1, 25200, 16]
|
||||
}
|
||||
final inferenceTime = DateTime.now();
|
||||
final inferenceMs =
|
||||
inferenceTime.difference(preprocessingTime).inMilliseconds;
|
||||
_logger.info(
|
||||
'inference is finished, in ${DateTime.now().difference(preprocessingTime).inMilliseconds} ms',
|
||||
'Face detection is finished, in ${inferenceTime.difference(startTime).inMilliseconds} ms (preprocessing: $preprocessingMs ms, inference: $inferenceMs ms)',
|
||||
);
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while running inference', e, s);
|
||||
_logger.severe('Error while running inference (PlatformPlugin: ${MlModel.usePlatformPlugin})', e, s);
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
try {
|
||||
@@ -121,9 +123,9 @@ class FaceDetectionService extends MlModel {
|
||||
outputs[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
|
||||
inputOrt.release();
|
||||
runOptions.release();
|
||||
outputs.forEach((element) {
|
||||
for (var element in outputs) {
|
||||
element?.release();
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -51,7 +51,6 @@ class MLIndexingIsolate extends SuperIsolate {
|
||||
FileMLInstruction instruction,
|
||||
String filePath,
|
||||
) async {
|
||||
final Stopwatch stopwatch = Stopwatch()..start();
|
||||
late MLResult result;
|
||||
|
||||
try {
|
||||
@@ -79,15 +78,10 @@ class MLIndexingIsolate extends SuperIsolate {
|
||||
s,
|
||||
);
|
||||
debugPrint(
|
||||
"This image with ID ${instruction.file.uploadedFileID} has name ${instruction.file.displayName}.",
|
||||
"This image with fileID ${instruction.file.uploadedFileID} has name ${instruction.file.displayName}.",
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
"Finished Analyze image with uploadedFileID ${instruction.file.uploadedFileID}, in "
|
||||
"${stopwatch.elapsedMilliseconds} ms (including time waiting for inference engine availability)",
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -385,9 +385,6 @@ class MLService {
|
||||
}
|
||||
|
||||
Future<bool> processImage(FileMLInstruction instruction) async {
|
||||
_logger.info(
|
||||
"`processImage` start processing image with uploadedFileID: ${instruction.file.uploadedFileID}",
|
||||
);
|
||||
bool actuallyRanML = false;
|
||||
|
||||
try {
|
||||
@@ -420,7 +417,6 @@ class MLService {
|
||||
if (result.facesRan) {
|
||||
if (result.faces!.isEmpty) {
|
||||
faces.add(Face.empty(result.fileId));
|
||||
_logger.info("no face detected, storing empty for ${result.fileId}");
|
||||
}
|
||||
if (result.faces!.isNotEmpty) {
|
||||
for (int i = 0; i < result.faces!.length; ++i) {
|
||||
@@ -432,7 +428,6 @@ class MLService {
|
||||
),
|
||||
);
|
||||
}
|
||||
_logger.info("storing ${faces.length} faces for ${result.fileId}");
|
||||
}
|
||||
dataEntity.putFace(
|
||||
RemoteFaceEmbedding(
|
||||
@@ -459,7 +454,7 @@ class MLService {
|
||||
instruction.file,
|
||||
dataEntity,
|
||||
);
|
||||
_logger.info("Results for file ${result.fileId} stored on remote");
|
||||
_logger.info("ML results for fileID ${result.fileId} stored on remote");
|
||||
// Storing results locally
|
||||
if (result.facesRan) await MLDataDB.instance.bulkInsertFaces(faces);
|
||||
if (result.clipRan) {
|
||||
@@ -467,7 +462,7 @@ class MLService {
|
||||
result.clip!,
|
||||
);
|
||||
}
|
||||
_logger.info("Results for file ${result.fileId} stored locally");
|
||||
_logger.info("ML results for fileID ${result.fileId} stored locally");
|
||||
return actuallyRanML;
|
||||
} catch (e, s) {
|
||||
final String errorString = e.toString();
|
||||
@@ -480,7 +475,7 @@ class MLService {
|
||||
errorString.contains('FileSizeTooLargeForMobileIndexing');
|
||||
if (acceptedIssue) {
|
||||
_logger.severe(
|
||||
'$errorString with ID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size), storing empty results so indexing does not get stuck',
|
||||
'$errorString for fileID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size), storing empty results so indexing does not get stuck',
|
||||
e,
|
||||
s,
|
||||
);
|
||||
@@ -493,7 +488,7 @@ class MLService {
|
||||
return true;
|
||||
}
|
||||
_logger.severe(
|
||||
"Failed to index file with ID: ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size). Not storing any results locally, which means it will be automatically retried later.",
|
||||
"Failed to index file for fileID ${instruction.file.uploadedFileID} (format $format, type $fileType, size $size). Not storing any results locally, which means it will be automatically retried later.",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
|
||||
@@ -40,16 +40,25 @@ class ClipImageEncoder extends MlModel {
|
||||
final preprocessingMs =
|
||||
preprocessingTime.difference(startTime).inMilliseconds;
|
||||
late List<double> result;
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
result = await _runPlatformPluginPredict(inputList);
|
||||
} else {
|
||||
result = _runFFIBasedPredict(inputList, sessionAddress);
|
||||
try {
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
result = await _runPlatformPluginPredict(inputList);
|
||||
} else {
|
||||
result = _runFFIBasedPredict(inputList, sessionAddress);
|
||||
}
|
||||
} catch (e, stackTrace) {
|
||||
_logger.severe(
|
||||
"Clip image inference failed${enteFileID != null ? " with fileID $enteFileID" : ""} (PlatformPlugin: ${MlModel.usePlatformPlugin})",
|
||||
e,
|
||||
stackTrace,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
final inferTime = DateTime.now();
|
||||
final inferenceMs = inferTime.difference(preprocessingTime).inMilliseconds;
|
||||
final totalMs = inferTime.difference(startTime).inMilliseconds;
|
||||
_logger.info(
|
||||
"Clip predict took $totalMs ms${enteFileID != null ? " with fileID $enteFileID" : ""} (nference: $inferenceMs ms, preprocessing: $preprocessingMs ms)",
|
||||
"Clip image predict took $totalMs ms${enteFileID != null ? " with fileID $enteFileID" : ""} (inference: $inferenceMs ms, preprocessing: $preprocessingMs ms)",
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -36,10 +36,19 @@ class ClipTextEncoder extends MlModel {
|
||||
final address = args["address"] as int;
|
||||
final List<int> tokenize = await ClipTextTokenizer.instance.tokenize(text);
|
||||
final int32list = Int32List.fromList(tokenize);
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
return await _runPlatformPluginPredict(int32list);
|
||||
} else {
|
||||
return _runFFIBasedPredict(int32list, address);
|
||||
try {
|
||||
if (MlModel.usePlatformPlugin) {
|
||||
return await _runPlatformPluginPredict(int32list);
|
||||
} else {
|
||||
return _runFFIBasedPredict(int32list, address);
|
||||
}
|
||||
} catch (e, s) {
|
||||
_logger.severe(
|
||||
"Clip text inference failed (PlatformPlugin: ${MlModel.usePlatformPlugin})",
|
||||
e,
|
||||
s,
|
||||
);
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -259,7 +259,6 @@ Future<(Float32List, List<AlignmentResult>, List<bool>, List<double>, Size)>
|
||||
int width = 112,
|
||||
int height = 112,
|
||||
}) async {
|
||||
final stopwatch = Stopwatch()..start();
|
||||
|
||||
final Size originalSize =
|
||||
Size(image.width.toDouble(), image.height.toDouble());
|
||||
@@ -299,31 +298,20 @@ Future<(Float32List, List<AlignmentResult>, List<bool>, List<double>, Size)>
|
||||
alignedImageIndex,
|
||||
);
|
||||
|
||||
final blurDetectionStopwatch = Stopwatch()..start();
|
||||
final faceGrayMatrix = _createGrayscaleIntMatrixFromNormalized2List(
|
||||
alignedImagesFloat32List,
|
||||
alignedImageIndex,
|
||||
);
|
||||
|
||||
alignedImageIndex += 3 * width * height;
|
||||
final grayscalems = blurDetectionStopwatch.elapsedMilliseconds;
|
||||
log('creating grayscale matrix took $grayscalems ms');
|
||||
final (isBlur, blurValue) =
|
||||
await BlurDetectionService.predictIsBlurGrayLaplacian(
|
||||
faceGrayMatrix,
|
||||
faceDirection: face.getFaceDirection(),
|
||||
);
|
||||
final blurms = blurDetectionStopwatch.elapsedMilliseconds - grayscalems;
|
||||
log('blur detection took $blurms ms');
|
||||
log(
|
||||
'total blur detection took ${blurDetectionStopwatch.elapsedMilliseconds} ms',
|
||||
);
|
||||
blurDetectionStopwatch.stop();
|
||||
isBlurs.add(isBlur);
|
||||
blurValues.add(blurValue);
|
||||
}
|
||||
stopwatch.stop();
|
||||
log("Face Alignment took: ${stopwatch.elapsedMilliseconds} ms");
|
||||
return (
|
||||
alignedImagesFloat32List,
|
||||
alignmentResults,
|
||||
|
||||
@@ -332,7 +332,7 @@ Future<String> getImagePathForML(EnteFile enteFile) async {
|
||||
imagePath = file?.path;
|
||||
stopwatch.stop();
|
||||
_logger.info(
|
||||
"Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
|
||||
"Getting file data for fileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
|
||||
);
|
||||
|
||||
if (imagePath == null) {
|
||||
@@ -388,7 +388,7 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
final int clipImageAddress = args["clipImageAddress"] as int;
|
||||
|
||||
_logger.info(
|
||||
"Start analyzeImageStatic with fileID: $enteFileID (runFaces: $runFaces, runClip: $runClip)",
|
||||
"Start analyzeImageStatic for fileID $enteFileID (runFaces: $runFaces, runClip: $runClip)",
|
||||
);
|
||||
final startTime = DateTime.now();
|
||||
|
||||
@@ -399,9 +399,7 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
final result = MLResult.fromEnteFileID(enteFileID);
|
||||
result.decodedImageSize = decodedImageSize;
|
||||
final decodeTime = DateTime.now();
|
||||
_logger.info(
|
||||
'Reading and decoding image took ${decodeTime.difference(startTime).inMilliseconds} ms (fileID: $enteFileID)',
|
||||
);
|
||||
final decodeMs = decodeTime.difference(startTime).inMilliseconds;
|
||||
|
||||
if (runFaces) {
|
||||
final resultFaces = await FaceRecognitionService.runFacesPipeline(
|
||||
@@ -417,6 +415,9 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
result.faces = resultFaces;
|
||||
}
|
||||
}
|
||||
final facesTime = DateTime.now();
|
||||
final facesMs = facesTime.difference(decodeTime).inMilliseconds;
|
||||
final faceMsString = runFaces ? ", faces: $facesMs ms" : "";
|
||||
|
||||
if (runClip) {
|
||||
final clipResult = await SemanticSearchService.runClipImage(
|
||||
@@ -427,10 +428,14 @@ Future<MLResult> analyzeImageStatic(Map args) async {
|
||||
);
|
||||
result.clip = clipResult;
|
||||
}
|
||||
final clipTime = DateTime.now();
|
||||
final clipMs = clipTime.difference(facesTime).inMilliseconds;
|
||||
final clipMsString = runClip ? ", clip: $clipMs ms" : "";
|
||||
|
||||
final endTime = DateTime.now();
|
||||
final totalMs = endTime.difference(startTime).inMilliseconds;
|
||||
_logger.info(
|
||||
'Finished analyzeImageStatic with fileID: $enteFileID, in ${endTime.difference(startTime).inMilliseconds} ms',
|
||||
'Finished analyzeImageStatic for fileID $enteFileID, in $totalMs ms (decode: $decodeMs ms$faceMsString$clipMsString)',
|
||||
);
|
||||
|
||||
return result;
|
||||
|
||||
Reference in New Issue
Block a user