Compare commits

...

53 Commits

Author SHA1 Message Date
Prateek Sunal
0939d4c0d1 fix(auth/connectivity): try to fix certificate issue on windows 11 2024-07-17 16:00:01 +05:30
Neeraj Gupta
2242bdb2f8 fix(auth/import): remove cross button from import page (#2469)
## Description

## Tests
2024-07-17 15:57:43 +05:30
Prateek Sunal
c794eabf23 Merge remote-tracking branch 'origin/main' into auth-fixes 2024-07-17 15:41:40 +05:30
Prateek Sunal
de8a48287b chore(auth): lint fixes 2024-07-17 15:40:21 +05:30
Prateek Sunal
d9471e406c fix(auth/import): remove cross button from import page 2024-07-17 15:38:51 +05:30
Neeraj Gupta
70162c17d1 [mob] Use video probe data for getting location during upload (#2468)
## Description

## Tests
2024-07-17 15:16:09 +05:30
Neeraj Gupta
62f0825522 generated strings 2024-07-17 15:14:09 +05:30
Neeraj Gupta
826e549462 bump version 2024-07-17 15:13:51 +05:30
Neeraj Gupta
dac589b6b8 Backfill video location when fileInfo is viewed 2024-07-17 15:13:35 +05:30
Neeraj Gupta
600cbc15d2 Switch back to new video player 2024-07-17 15:03:48 +05:30
Neeraj Gupta
f33cea212f Use ffprobe for getting video location in Android 2024-07-17 15:01:26 +05:30
Neeraj Gupta
95369097a5 Merge metadata streams 2024-07-17 14:57:54 +05:30
Neeraj Gupta
087e84b7ba Remove border 2024-07-17 14:18:09 +05:30
Neeraj Gupta
7744b8101b Refactor 2024-07-17 14:10:07 +05:30
Neeraj Gupta
5d078b06be [mob] Show video metadata inside fileInfo (#2466)
## Description

## Tests
2024-07-16 18:02:29 +05:30
Neeraj Gupta
24aff0b9c1 bump version 2024-07-16 18:00:19 +05:30
Neeraj Gupta
fd001a9181 Fix lint 2024-07-16 17:59:56 +05:30
Neeraj Gupta
d6a970274c Format fps value 2024-07-16 17:57:02 +05:30
Neeraj Gupta
428b3e2cd6 Enable drag to close 2024-07-16 17:49:49 +05:30
Neeraj Gupta
1c5154ac26 Show dims and fps in the video info dialog 2024-07-16 17:47:10 +05:30
Neeraj Gupta
2b7dc88281 iOS build changes 2024-07-16 17:39:02 +05:30
Neeraj Gupta
95c92b0572 Show fps, bitrate and dim for video 2024-07-16 17:38:52 +05:30
Neeraj Gupta
c28b4934c4 Parse make, model and location for iPhone Videos 2024-07-16 16:52:55 +05:30
Neeraj Gupta
92b188bc21 Improve UI 2024-07-16 15:44:36 +05:30
Neeraj Gupta
daaa1d7423 Format creation time 2024-07-16 15:35:46 +05:30
Neeraj Gupta
f2ed6802d2 [mob] Rename 2024-07-16 14:48:38 +05:30
Neeraj Gupta
aeeed9cd11 [mob] Rename 2024-07-16 14:45:34 +05:30
Neeraj Gupta
e2ef2eacc4 [mob] Display video metadata in file info 2024-07-16 14:12:46 +05:30
Manav Rathi
9dc8a054f8 Fix typo (#2462) 2024-07-15 22:00:35 +05:30
Manav Rathi
c1f4a9f0d6 Fix typo 2024-07-15 21:59:16 +05:30
Ashil
b2e43624ba [mob][photos] Init magic cache service only if internal user (#2445) 2024-07-15 18:23:30 +05:30
Neeraj Gupta
8f5210d0a2 Fix grammar in CLI readme (#2449) 2024-07-15 18:21:29 +05:30
Neeraj Gupta
803e8b3a14 [auth] Add new icons (#2452)
Add Instant Gaming, Teleport and Mistral icons.

<!--- Describe your changes in detail -->

## Type of Change

<!--- Put an `x` in all the boxes that apply: -->

- [x] 🖼️ New icon
- [ ]  New feature (non-breaking change which adds functionality)
- [ ] 🛠️ Bug fix (non-breaking change which fixes an issue)
- [ ]  Breaking change (fix or feature that would cause existing
functionality to change)
- [ ] 🧹 Code refactor
- [ ]  Build configuration change
- [ ] 📝 Documentation
- [ ] 🗑️ Chore
2024-07-15 18:21:10 +05:30
Neeraj Gupta
177bab2961 [mobile] New translations (#2454)
New translations from
[Crowdin](https://crowdin.com/project/ente-photos-app)
2024-07-15 18:20:34 +05:30
Neeraj Gupta
f2894d1d62 [auth] New translations (#2455)
New translations from
[Crowdin](https://crowdin.com/project/ente-authenticator-app)
2024-07-15 18:20:01 +05:30
Manav Rathi
1a39d2b648 [desktop] ML new derived data format - Part 1/x (#2460) 2024-07-15 16:22:49 +05:30
Manav Rathi
d6908ffda3 Fix 2024-07-15 16:16:17 +05:30
Manav Rathi
73ee6f5284 Mark bad images as failed instead of retrying indefinitely 2024-07-15 16:11:39 +05:30
Manav Rathi
32750599b9 Outline 2024-07-15 14:41:39 +05:30
Manav Rathi
e054247359 Rely on ML flag 2024-07-15 14:35:07 +05:30
Manav Rathi
6dca5c2a8b Fix p nesting warning 2024-07-15 14:28:50 +05:30
Manav Rathi
3941c05229 Simplify UI 2024-07-15 14:25:07 +05:30
Manav Rathi
1a61c02602 Simplify 2024-07-15 14:13:53 +05:30
Manav Rathi
bc45261efa Handle the API quirk 2024-07-15 13:35:03 +05:30
Manav Rathi
bd884c9db8 [web] [desktop] Enable Polish translations (#2459)
A big hats off to the person who did 0-99% 🇵🇱 over the weekend!
2024-07-15 12:28:16 +05:30
Manav Rathi
326b347bc6 [web] [desktop] Enable Polish translations
A big hats off to the person who did 0-99% over the weekend!
2024-07-15 11:59:40 +05:30
Neeraj Gupta
74f4698fd6 [mob] Format brand 2024-07-15 10:21:19 +05:30
Crowdin Bot
b7b5977b85 New Crowdin translations by GitHub Action 2024-07-15 01:16:52 +00:00
Crowdin Bot
a3cbfc52c7 New Crowdin translations by GitHub Action 2024-07-15 01:04:12 +00:00
Eseltwift
a8be3be3b4 Add Instant Gaming, Teleport and Mistral icons. 2024-07-13 22:05:33 +02:00
dnred
faee7cc642 fix typo in CLI readme 2024-07-13 13:02:41 +02:00
ashilkn
89ff4cf43e [mob][photos] Initialise MagicCacheService only if internal user 2024-07-13 14:26:07 +05:30
Neeraj Gupta
c941783fd3 [mob] Expose and log video metadata 2024-07-12 17:45:30 +05:30
43 changed files with 3465 additions and 518 deletions

View File

@@ -202,6 +202,10 @@
{
"title": "ING"
},
{
"title": "Instant Gaming",
"slug": "instant_gaming"
},
{
"title": "Instagram"
},
@@ -297,6 +301,13 @@
{
"title": "Mintos"
},
{
"title": "Mistral",
"altNames": [
"Mistral AI",
"MistralAI"
]
},
{
"title": "Mozilla"
},
@@ -472,6 +483,13 @@
"Techlore Forums"
]
},
{
"title": "Teleport",
"altNames": [
"Gravitational Teleport",
"GoTeleport"
]
},
{
"title": "Termius",
"hex": "858585"

View File

@@ -0,0 +1,19 @@
<svg width="500" height="500" viewBox="0 0 500 500" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M42.3044 77.0441C33.8578 77.0441 25.6439 72.6653 21.1175 64.8302C14.3699 53.1508 18.3751 38.2158 30.0626 31.4721L78.9366 3.27575C90.6254 -3.46792 105.571 0.534679 112.319 12.2141C119.066 23.8936 115.062 38.8286 103.374 45.5723L54.5009 73.7686C50.7944 75.9145 46.5873 77.0444 42.3044 77.0441Z" fill="url(#paint0_linear_154_309)"/>
<path d="M408.844 235.902L66.7177 433.284V348.692L335.121 193.755L286.49 165.698L30.0615 313.446C22.5003 317.808 17.8424 325.869 17.8424 334.593V475.58C17.8433 479.868 18.973 484.079 21.118 487.792C23.2629 491.504 26.3475 494.586 30.0615 496.728C33.7768 498.873 37.9911 500.001 42.2807 500C46.5703 500 50.7844 498.872 54.4999 496.728L469.94 257.051C477.501 252.689 482.159 244.627 482.159 235.902C482.159 227.178 477.501 219.117 469.94 214.756L348.167 144.501C347.907 144.344 347.643 144.192 347.377 144.046L225.564 73.7685C218.003 69.4069 208.689 69.4069 201.127 73.7685L30.0629 172.459C18.3741 179.203 14.3702 194.136 21.1178 205.817C25.6442 213.651 33.8581 218.031 42.3046 218.031C46.587 218.03 50.7935 216.901 54.4999 214.756L213.344 123.114L408.844 235.904V235.902Z" fill="url(#paint1_linear_154_309)"/>
<path d="M408.844 235.902L66.7177 433.284V348.692L82.1938 339.759L54.4972 299.366L30.0615 313.446C22.5003 317.808 17.8424 325.869 17.8424 334.593V475.58C17.8433 479.868 18.973 484.079 21.118 487.792C23.2629 491.504 26.3475 494.586 30.0615 496.728C33.7768 498.873 37.9911 500.001 42.2807 500C46.5703 500 50.7844 498.872 54.4999 496.728L469.94 257.051C477.501 252.689 482.159 244.627 482.159 235.902C482.159 227.178 477.501 219.117 469.94 214.756L348.167 144.501C347.907 144.344 347.643 144.192 347.377 144.046L225.564 73.7685C218.003 69.4069 208.689 69.4069 201.127 73.7685L30.0629 172.459C18.3741 179.203 14.3702 194.136 21.1178 205.817C25.6442 213.651 33.8581 218.031 42.3046 218.031C46.587 218.03 50.7935 216.901 54.4999 214.756L213.344 123.114L408.844 235.902Z" fill="url(#paint2_linear_154_309)"/>
<defs>
<linearGradient id="paint0_linear_154_309" x1="31.2204" y1="111.73" x2="108.384" y2="0.000287497" gradientUnits="userSpaceOnUse">
<stop offset="0.26" stop-color="#FF670D"/>
<stop offset="1" stop-color="#FF5317"/>
</linearGradient>
<linearGradient id="paint1_linear_154_309" x1="86.5912" y1="285.221" x2="187.325" y2="109.662" gradientUnits="userSpaceOnUse">
<stop stop-color="#FF7207"/>
<stop offset="1" stop-color="#FF7208" stop-opacity="0"/>
</linearGradient>
<linearGradient id="paint2_linear_154_309" x1="35.1941" y1="500" x2="482.158" y2="227.227" gradientUnits="userSpaceOnUse">
<stop stop-color="#FF8000"/>
<stop offset="1" stop-color="#FF4020"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@@ -0,0 +1,8 @@
<svg width="500" height="500" viewBox="0 0 500 500" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M0 22.72H90.9091V113.629V113.629V204.538V295.447H0V204.538V113.629V113.629V22.72ZM363.636 22.72H454.545V113.629H363.636V22.72ZM363.636 113.629L363.636 204.538H272.727V113.629L363.636 113.629ZM90.9091 295.447H0V386.357V477.266H90.9091V386.357V295.447ZM181.818 295.447H272.727V386.357H181.818V295.447ZM454.545 295.447H363.636V386.357V477.266H454.545V386.357V295.447Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M409.091 386.356H500V477.266H409.091V386.356ZM45.4551 386.356H136.364V477.266H45.4551V386.356Z" fill="#EA3326"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M45.4551 295.448H136.364V386.357H45.4551V295.448ZM318.182 295.448H227.273V386.357H318.182V295.448ZM500 295.448H409.091V386.357H500V295.448Z" fill="#EB5829"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M45.4551 204.538H136.364H136.364H227.273H318.182H409.091H500V295.447H409.091H318.182H227.273H136.364H136.364H45.4551V204.538Z" fill="#EE792F"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M45.4551 113.629H136.364L227.273 113.629V204.538L136.364 204.538L136.364 204.538L45.4551 204.538V113.629ZM409.091 113.629H500V204.538H409.091H318.182V113.629H409.091Z" fill="#F2A73B"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M45.4551 22.72H136.364V113.629H45.4551V22.72ZM409.091 22.72H500V113.629H409.091V22.72Z" fill="#F7D046"/>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -0,0 +1,10 @@
<svg width="500" height="500" viewBox="0 0 500 500" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_154_102)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M297.647 4.78775C299.464 5.07025 301.17 5.85037 302.58 7.04373C303.991 8.23707 305.052 9.79823 305.649 11.5583L305.999 12.85L318.014 75.3062C334.084 81.5487 349.235 89.9877 363.046 100.389L368.086 104.347L428.753 83.3683C432.661 81.6601 436.838 82.8893 439.78 86.285C460.64 109.972 477.591 138.554 487.424 168.574C488.164 170.248 488.382 172.11 488.047 173.912C487.713 175.714 486.842 177.369 485.553 178.657L484.483 179.552L436.345 221.01C438.072 230.28 438.566 240.3 438.566 250.05C438.566 257.904 438.238 265.862 437.229 273.465L436.345 279.112L484.483 320.611C486.153 321.812 487.359 323.564 487.896 325.567C488.433 327.568 488.266 329.698 487.424 331.589C477.613 361.63 460.682 390.17 439.801 413.836C437.167 416.877 433.588 418.19 430.07 417.232L428.753 416.774L368.107 395.774C354.797 406.629 339.923 415.565 324.041 422.357L318.034 424.816L305.999 487.313C305.584 489.358 304.561 491.226 303.069 492.668C301.577 494.108 299.685 495.052 297.647 495.375C281.93 498.292 266.213 500 250.023 500C233.813 500 218.095 498.292 202.378 495.375C200.559 495.092 198.851 494.312 197.438 493.119C196.023 491.926 194.958 490.366 194.354 488.605L194.026 487.313L182.011 424.816C165.665 418.593 150.284 410.026 136.341 399.379L131.919 395.774L71.2717 416.774C67.363 418.482 63.187 417.252 60.2246 413.836C39.3439 390.17 22.4131 361.63 12.6001 331.589C11.8637 329.914 11.6483 328.054 11.9828 326.252C12.3172 324.45 13.1856 322.795 14.4722 321.506L15.542 320.611L63.6601 279.112C62.0471 269.513 61.3034 259.786 61.4383 250.05C61.4383 242.258 61.7469 234.28 62.755 226.655L63.6601 221.03L15.5214 179.552C13.8514 178.351 12.6452 176.599 12.1087 174.596C11.5722 172.594 11.7386 170.465 12.5796 168.574C22.3925 138.554 39.3233 109.972 60.204 86.285C61.3417 84.8444 62.8613 83.7618 64.5845 83.1642C66.3078 82.5664 68.1636 82.4783 69.9346 82.9101L71.2717 83.3475L131.919 104.326C145.41 93.2005 160.453 84.1583 176.56 77.4936L182.011 75.2854L194.026 12.8292C194.435 10.7811 195.457 8.9099 196.949 7.46812C198.444 6.02634 200.338 5.08349 202.378 4.76692C233.817 -1.58897 266.188 -1.58897 297.627 4.76692L297.647 4.78775ZM250.002 103.722C170.017 103.722 105.257 169.219 105.257 250.008C105.257 330.798 170.059 396.295 250.002 396.295C329.946 396.295 394.747 330.798 394.747 250.008C394.747 169.219 329.946 103.722 249.982 103.722H250.002ZM341.61 229.676V186.136H158.395V229.676H228.196V334.256H271.809V229.676H341.61Z" fill="#512FC9"/>
</g>
<defs>
<clipPath id="clip0_154_102">
<rect width="500" height="500" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@@ -1,6 +1,7 @@
import 'dart:io';
import 'package:dio/dio.dart';
import 'package:dio/io.dart';
import 'package:ente_auth/core/configuration.dart';
import 'package:ente_auth/core/event_bus.dart';
import 'package:ente_auth/events/endpoint_updated_event.dart';
@@ -36,6 +37,11 @@ class Network {
),
);
(_dio.httpClientAdapter as IOHttpClientAdapter).createHttpClient = () =>
HttpClient()
..badCertificateCallback =
(X509Certificate cert, String host, int port) => true;
_enteDio = Dio(
BaseOptions(
baseUrl: endpoint,
@@ -50,6 +56,11 @@ class Network {
},
),
);
(_enteDio.httpClientAdapter as IOHttpClientAdapter).createHttpClient = () =>
HttpClient()
..badCertificateCallback =
(X509Certificate cert, String host, int port) => true;
_setupInterceptors(endpoint);
Bus.instance.on<EndpointUpdatedEvent>().listen((event) {

View File

@@ -230,5 +230,10 @@
"thisDevice": "این دستگاه",
"editCodeAuthMessage": "احراز هویت برای ویرایش کد",
"deleteCodeAuthMessage": "احراز هویت برای حذف کد",
"showQRAuthMessage": "احراز هویت برای نمایش کد QR"
"showQRAuthMessage": "احراز هویت برای نمایش کد QR",
"tags": "برچسب‌ها",
"createNewTag": "ایجاد برچسب جدید",
"tag": "برچسب",
"create": "ایجاد",
"editTag": "ویرایش برچسب"
}

View File

@@ -1,6 +1,5 @@
import 'package:ente_auth/l10n/l10n.dart';
import 'package:ente_auth/theme/ente_theme.dart';
import 'package:ente_auth/ui/components/buttons/icon_button_widget.dart';
import 'package:ente_auth/ui/components/captioned_text_widget.dart';
import 'package:ente_auth/ui/components/divider_widget.dart';
import 'package:ente_auth/ui/components/menu_item_widget.dart';
@@ -68,18 +67,7 @@ class ImportCodePage extends StatelessWidget {
title: context.l10n.importCodes,
),
flexibleSpaceCaption: "Import source",
actionIcons: [
IconButtonWidget(
icon: Icons.close_outlined,
iconButtonType: IconButtonType.secondary,
onTap: () {
Navigator.pop(context);
if (Navigator.canPop(context)) {
Navigator.pop(context);
}
},
),
],
actionIcons: const [],
),
SliverList(
delegate: SliverChildBuilderDelegate(

View File

@@ -2,7 +2,7 @@
The Ente CLI is a Command Line Utility for exporting data from
[Ente](https://ente.io). It also does a few more things, for example, you can
use it to decrypting the export from Ente Auth.
use it to decrypt the export from Ente Auth.
## Install

View File

@@ -2,6 +2,7 @@
## v1.7.3 (Unreleased)
- Support Polish translations.
- .
## v1.7.2

View File

@@ -9,7 +9,7 @@ Conceptually, the release is straightforward:
3. The download links on our website, and existing apps already check the
latest GitHub release and update automatically.
The complication comes by the fact that electron-builder's auto updater (the
The complication comes from the fact that electron-builder's auto updater (the
mechanism that we use for auto updates) doesn't work with monorepos. So we need
to keep a separate repository just for holding the releases.

View File

@@ -0,0 +1 @@
ente - ذخیره‌سازی عکس رمزگذاری شده

View File

@@ -199,6 +199,8 @@ PODS:
- shared_preferences_foundation (0.0.1):
- Flutter
- FlutterMacOS
- smart_auth (0.0.1):
- Flutter
- sqflite (0.0.3):
- Flutter
- FlutterMacOS
@@ -277,6 +279,7 @@ DEPENDENCIES:
- sentry_flutter (from `.symlinks/plugins/sentry_flutter/ios`)
- share_plus (from `.symlinks/plugins/share_plus/ios`)
- shared_preferences_foundation (from `.symlinks/plugins/shared_preferences_foundation/darwin`)
- smart_auth (from `.symlinks/plugins/smart_auth/ios`)
- sqflite (from `.symlinks/plugins/sqflite/darwin`)
- sqlite3_flutter_libs (from `.symlinks/plugins/sqlite3_flutter_libs/ios`)
- uni_links (from `.symlinks/plugins/uni_links/ios`)
@@ -397,6 +400,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/share_plus/ios"
shared_preferences_foundation:
:path: ".symlinks/plugins/shared_preferences_foundation/darwin"
smart_auth:
:path: ".symlinks/plugins/smart_auth/ios"
sqflite:
:path: ".symlinks/plugins/sqflite/darwin"
sqlite3_flutter_libs:
@@ -466,7 +471,7 @@ SPEC CHECKSUMS:
package_info_plus: 115f4ad11e0698c8c1c5d8a689390df880f47e85
path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c
permission_handler_apple: 9878588469a2b0d0fc1e048d9f43605f92e6cec2
photo_manager: 4f6810b7dfc4feb03b461ac1a70dacf91fba7604
photo_manager: ff695c7a1dd5bc379974953a2b5c0a293f7c4c8a
PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47
receive_sharing_intent: 6837b01768e567fe8562182397bf43d63d8c6437
screen_brightness_ios: 715ca807df953bf676d339f11464e438143ee625
@@ -477,6 +482,7 @@ SPEC CHECKSUMS:
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
share_plus: 8875f4f2500512ea181eef553c3e27dba5135aad
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
smart_auth: 4bedbc118723912d0e45a07e8ab34039c19e04f2
sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec
sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a
sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80

View File

@@ -329,6 +329,7 @@
"${BUILT_PRODUCTS_DIR}/sentry_flutter/sentry_flutter.framework",
"${BUILT_PRODUCTS_DIR}/share_plus/share_plus.framework",
"${BUILT_PRODUCTS_DIR}/shared_preferences_foundation/shared_preferences_foundation.framework",
"${BUILT_PRODUCTS_DIR}/smart_auth/smart_auth.framework",
"${BUILT_PRODUCTS_DIR}/sqflite/sqflite.framework",
"${BUILT_PRODUCTS_DIR}/sqlite3/sqlite3.framework",
"${BUILT_PRODUCTS_DIR}/sqlite3_flutter_libs/sqlite3_flutter_libs.framework",
@@ -420,6 +421,7 @@
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/sentry_flutter.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/share_plus.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/shared_preferences_foundation.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/smart_auth.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/sqflite.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/sqlite3.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/sqlite3_flutter_libs.framework",

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
{
"@@locale ": "en",
"enterYourEmailAddress": "Gib deine E-Mail-Adresse ein",
"accountWelcomeBack": "Willkommen zurück!",
"email": "E-Mail",

View File

@@ -1,4 +1,5 @@
{
"@@locale ": "en",
"enterYourEmailAddress": "Voer uw e-mailadres in",
"accountWelcomeBack": "Welkom terug!",
"email": "E-mail",

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
{
"@@locale ": "en",
"enterYourEmailAddress": "Insira o seu endereço de e-mail",
"accountWelcomeBack": "Bem-vindo de volta!",
"email": "E-mail",

View File

@@ -1,4 +1,5 @@
{
"@@locale ": "en",
"enterYourEmailAddress": "请输入您的电子邮件地址",
"accountWelcomeBack": "欢迎回来!",
"email": "电子邮件地址",

View File

@@ -307,7 +307,9 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
preferences,
);
MagicCacheService.instance.init(preferences);
if (flagService.internalUser) {
MagicCacheService.instance.init(preferences);
}
initComplete = true;
_logger.info("Initialization done");

View File

@@ -28,6 +28,7 @@ class FFProbeKeys {
static const date = 'date';
static const disposition = 'disposition';
static const duration = 'duration';
static const quickTimeLocation ="com.apple.quicktime.location.ISO6709";
static const durationMicros = 'duration_us';
static const encoder = 'encoder';
static const extraDataSize = 'extradata_size';

View File

@@ -1,5 +1,7 @@
// Adapted from: https://github.com/deckerst/aves
import "dart:developer";
import "package:collection/collection.dart";
import "package:intl/intl.dart";
import "package:photos/models/ffmpeg/channel_layouts.dart";
@@ -10,197 +12,140 @@ import "package:photos/models/ffmpeg/mp4.dart";
import "package:photos/models/location/location.dart";
class FFProbeProps {
final double? captureFps;
final String? androidManufacturer;
final String? androidModel;
final String? androidVersion;
final String? bitRate;
final String? bitsPerRawSample;
final String? byteCount;
final String? channelLayout;
final String? chromaLocation;
final String? codecName;
final String? codecPixelFormat;
final int? codedHeight;
final int? codedWidth;
final String? colorPrimaries;
final String? colorRange;
final String? colorSpace;
final String? colorTransfer;
final String? colorProfile;
final String? compatibleBrands;
final String? creationTime;
final String? displayAspectRatio;
final DateTime? date;
final String? duration;
final String? durationMicros;
final String? extraDataSize;
final String? fieldOrder;
final String? fpsDen;
final int? frameCount;
final String? handlerName;
final bool? hasBFrames;
final int? height;
final String? language;
final Location? location;
final String? majorBrand;
final String? mediaFormat;
final String? mediaType;
final String? minorVersion;
final String? nalLengthSize;
final String? quicktimeLocationAccuracyHorizontal;
final int? rFrameRate;
final String? rotate;
final String? sampleFormat;
final String? sampleRate;
final String? sampleAspectRatio;
final String? sarDen;
final int? segmentCount;
final String? sourceOshash;
final String? startMicros;
final String? startPts;
final String? startTime;
final String? statisticsWritingApp;
final String? statisticsWritingDateUtc;
final String? timeBase;
final String? track;
final String? vendorId;
final int? width;
final String? xiaomiSlowMoment;
Map<String, dynamic>? prodData;
Location? location;
DateTime? creationTimeUTC;
String? bitrate;
String? majorBrand;
String? fps;
String? codecWidth;
String? codecHeight;
FFProbeProps({
required this.captureFps,
required this.androidManufacturer,
required this.androidModel,
required this.androidVersion,
required this.bitRate,
required this.bitsPerRawSample,
required this.byteCount,
required this.channelLayout,
required this.chromaLocation,
required this.codecName,
required this.codecPixelFormat,
required this.codedHeight,
required this.codedWidth,
required this.colorPrimaries,
required this.colorRange,
required this.colorSpace,
required this.colorTransfer,
required this.colorProfile,
required this.compatibleBrands,
required this.creationTime,
required this.displayAspectRatio,
required this.date,
required this.duration,
required this.durationMicros,
required this.extraDataSize,
required this.fieldOrder,
required this.fpsDen,
required this.frameCount,
required this.handlerName,
required this.hasBFrames,
required this.height,
required this.language,
required this.location,
required this.majorBrand,
required this.mediaFormat,
required this.mediaType,
required this.minorVersion,
required this.nalLengthSize,
required this.quicktimeLocationAccuracyHorizontal,
required this.rFrameRate,
required this.rotate,
required this.sampleFormat,
required this.sampleRate,
required this.sampleAspectRatio,
required this.sarDen,
required this.segmentCount,
required this.sourceOshash,
required this.startMicros,
required this.startPts,
required this.startTime,
required this.statisticsWritingApp,
required this.statisticsWritingDateUtc,
required this.timeBase,
required this.track,
required this.vendorId,
required this.width,
required this.xiaomiSlowMoment,
});
// dot separated bitrate, fps, codecWidth, codecHeight. Ignore null value
String get videoInfo {
final List<String> info = [];
if (bitrate != null) info.add('$bitrate');
if (fps != null) info.add('ƒ/$fps');
if (codecWidth != null && codecHeight != null) {
info.add('$codecWidth x $codecHeight');
}
return info.join(' * ');
}
factory FFProbeProps.fromJson(Map<dynamic, dynamic>? json) {
return FFProbeProps(
captureFps:
double.tryParse(json?[FFProbeKeys.androidCaptureFramerate] ?? ""),
androidManufacturer: json?[FFProbeKeys.androidManufacturer],
androidModel: json?[FFProbeKeys.androidModel],
androidVersion: json?[FFProbeKeys.androidVersion],
bitRate: _formatMetric(
json?[FFProbeKeys.bitrate] ?? json?[FFProbeKeys.bps],
'b/s',
),
bitsPerRawSample: json?[FFProbeKeys.bitsPerRawSample],
byteCount: _formatFilesize(json?[FFProbeKeys.byteCount]),
channelLayout: _formatChannelLayout(json?[FFProbeKeys.channelLayout]),
chromaLocation: json?[FFProbeKeys.chromaLocation],
codecName: _formatCodecName(json?[FFProbeKeys.codecName]),
codecPixelFormat:
(json?[FFProbeKeys.codecPixelFormat] as String?)?.toUpperCase(),
codedHeight: int.tryParse(json?[FFProbeKeys.codedHeight] ?? ""),
codedWidth: int.tryParse(json?[FFProbeKeys.codedWidth] ?? ""),
colorPrimaries:
(json?[FFProbeKeys.colorPrimaries] as String?)?.toUpperCase(),
colorRange: (json?[FFProbeKeys.colorRange] as String?)?.toUpperCase(),
colorSpace: (json?[FFProbeKeys.colorSpace] as String?)?.toUpperCase(),
colorTransfer:
(json?[FFProbeKeys.colorTransfer] as String?)?.toUpperCase(),
colorProfile: json?[FFProbeKeys.colorTransfer],
compatibleBrands: json?[FFProbeKeys.compatibleBrands],
creationTime: _formatDate(json?[FFProbeKeys.creationTime] ?? ""),
displayAspectRatio: json?[FFProbeKeys.dar],
date: DateTime.tryParse(json?[FFProbeKeys.date] ?? ""),
duration: _formatDuration(json?[FFProbeKeys.durationMicros]),
durationMicros: formatPreciseDuration(
Duration(
microseconds:
int.tryParse(json?[FFProbeKeys.durationMicros] ?? "") ?? 0,
),
),
extraDataSize: json?[FFProbeKeys.extraDataSize],
fieldOrder: json?[FFProbeKeys.fieldOrder],
fpsDen: json?[FFProbeKeys.fpsDen],
frameCount: int.tryParse(json?[FFProbeKeys.frameCount] ?? ""),
handlerName: json?[FFProbeKeys.handlerName],
hasBFrames: json?[FFProbeKeys.hasBFrames],
height: int.tryParse(json?[FFProbeKeys.height] ?? ""),
language: json?[FFProbeKeys.language],
location: _formatLocation(json?[FFProbeKeys.location]),
majorBrand: json?[FFProbeKeys.majorBrand],
mediaFormat: json?[FFProbeKeys.mediaFormat],
mediaType: json?[FFProbeKeys.mediaType],
minorVersion: json?[FFProbeKeys.minorVersion],
nalLengthSize: json?[FFProbeKeys.nalLengthSize],
quicktimeLocationAccuracyHorizontal:
json?[FFProbeKeys.quicktimeLocationAccuracyHorizontal],
rFrameRate: int.tryParse(json?[FFProbeKeys.rFrameRate] ?? ""),
rotate: json?[FFProbeKeys.rotate],
sampleFormat: json?[FFProbeKeys.sampleFormat],
sampleRate: json?[FFProbeKeys.sampleRate],
sampleAspectRatio: json?[FFProbeKeys.sar],
sarDen: json?[FFProbeKeys.sarDen],
segmentCount: int.tryParse(json?[FFProbeKeys.segmentCount] ?? ""),
sourceOshash: json?[FFProbeKeys.sourceOshash],
startMicros: json?[FFProbeKeys.startMicros],
startPts: json?[FFProbeKeys.startPts],
startTime: _formatDuration(json?[FFProbeKeys.startTime]),
statisticsWritingApp: json?[FFProbeKeys.statisticsWritingApp],
statisticsWritingDateUtc: json?[FFProbeKeys.statisticsWritingDateUtc],
timeBase: json?[FFProbeKeys.timeBase],
track: json?[FFProbeKeys.title],
vendorId: json?[FFProbeKeys.vendorId],
width: int.tryParse(json?[FFProbeKeys.width] ?? ""),
xiaomiSlowMoment: json?[FFProbeKeys.xiaomiSlowMoment],
);
// toString() method
@override
String toString() {
final buffer = StringBuffer();
for (final key in prodData!.keys) {
final value = prodData![key];
if (value != null) {
buffer.writeln('$key: $value');
}
}
return buffer.toString();
}
static parseData(Map<dynamic, dynamic>? json) {
final Map<String, dynamic> parsedData = {};
final FFProbeProps result = FFProbeProps();
for (final key in json!.keys) {
final stringKey = key.toString();
switch (stringKey) {
case FFProbeKeys.bitrate:
case FFProbeKeys.bps:
result.bitrate = _formatMetric(json[key], 'b/s');
parsedData[stringKey] = result.bitrate;
break;
case FFProbeKeys.byteCount:
parsedData[stringKey] = _formatFilesize(json[key]);
break;
case FFProbeKeys.channelLayout:
parsedData[stringKey] = _formatChannelLayout(json[key]);
break;
case FFProbeKeys.codecName:
parsedData[stringKey] = _formatCodecName(json[key]);
break;
case FFProbeKeys.codecPixelFormat:
case FFProbeKeys.colorPrimaries:
case FFProbeKeys.colorRange:
case FFProbeKeys.colorSpace:
case FFProbeKeys.colorTransfer:
parsedData[stringKey] = (json[key] as String?)?.toUpperCase();
break;
case FFProbeKeys.creationTime:
parsedData[stringKey] = _formatDate(json[key] ?? "");
result.creationTimeUTC = _getUTCDateTime(json[key] ?? "");
break;
case FFProbeKeys.durationMicros:
parsedData[stringKey] = formatPreciseDuration(
Duration(microseconds: int.tryParse(json[key] ?? "") ?? 0),
);
break;
case FFProbeKeys.duration:
parsedData[stringKey] = _formatDuration(json[key]);
case FFProbeKeys.location:
result.location = _formatLocation(json[key]);
if (result.location != null) {
parsedData[stringKey] =
'${result.location!.latitude}, ${result.location!.longitude}';
}
break;
case FFProbeKeys.quickTimeLocation:
result.location =
_formatLocation(json[FFProbeKeys.quickTimeLocation]);
if (result.location != null) {
parsedData[FFProbeKeys.location] =
'${result.location!.latitude}, ${result.location!.longitude}';
}
break;
case FFProbeKeys.majorBrand:
result.majorBrand = _formatBrand(json[key]);
parsedData[stringKey] = result.majorBrand;
break;
case FFProbeKeys.startTime:
parsedData[stringKey] = _formatDuration(json[key]);
break;
default:
parsedData[stringKey] = json[key];
}
}
// iterate through the streams
final List<dynamic> streams = json["streams"];
final List<dynamic> newStreams = [];
final Map<String, dynamic> metadata = {};
for (final stream in streams) {
if (stream['type'] == 'metadata') {
for (final key in stream.keys) {
if (key == FFProbeKeys.frameCount && stream[key]?.toString() == "1") {
continue;
}
metadata[key] = stream[key];
}
metadata.remove(FFProbeKeys.index);
} else {
newStreams.add(stream);
}
for (final key in stream.keys) {
if (key == FFProbeKeys.rFrameRate) {
result.fps = _formatFPS(stream[key]);
parsedData[key] = result.fps;
} else if (key == FFProbeKeys.codedWidth) {
result.codecWidth = stream[key].toString();
parsedData[key] = result.codecWidth;
} else if (key == FFProbeKeys.codedHeight) {
result.codecHeight = stream[key].toString();
parsedData[key] = result.codecHeight;
}
}
}
if (metadata.isNotEmpty) {
newStreams.add(metadata);
}
parsedData["streams"] = newStreams;
result.prodData = parsedData;
return result;
}
static String _formatBrand(String value) => Mp4.brands[value] ?? value;
@@ -235,11 +180,23 @@ class FFProbeProps {
// input example: '2021-04-12T09:14:37.000000Z'
static String? _formatDate(String value) {
final date = DateTime.tryParse(value);
if (date == null) return value;
final dateInUtc = DateTime.tryParse(value);
if (dateInUtc == null) return value;
final epoch = DateTime.fromMillisecondsSinceEpoch(0, isUtc: true);
if (date == epoch) return null;
return date.toIso8601String();
if (dateInUtc == epoch) return null;
final newDate =
DateTime.fromMicrosecondsSinceEpoch(dateInUtc.microsecondsSinceEpoch);
return formatDateTime(newDate, 'en_US', false);
}
static DateTime? _getUTCDateTime(String value) {
final dateInUtc = DateTime.tryParse(value);
if (dateInUtc == null) return null;
final epoch = DateTime.fromMillisecondsSinceEpoch(0, isUtc: true);
if (dateInUtc == epoch) return null;
return DateTime.fromMicrosecondsSinceEpoch(
dateInUtc.microsecondsSinceEpoch,
);
}
// input example: '00:00:05.408000000' or '5.408000'
@@ -281,7 +238,7 @@ class FFProbeProps {
static String? _formatDuration(String? value) {
if (value == null) return null;
final duration = _parseDuration(value);
return duration != null ? formatPreciseDuration(duration) : value;
return duration != null ? formatFriendlyDuration(duration) : value;
}
static String? _formatFilesize(dynamic value) {
@@ -291,6 +248,20 @@ class FFProbeProps {
return size != null ? formatFileSize(asciiLocale, size) : value;
}
static String? _formatFPS(dynamic value) {
if (value == null) return null;
final int? t = int.tryParse(value.split('/')[0]);
final int? b = int.tryParse(value.split('/')[1]);
if (t != null && b != null) {
// return the value upto 2 decimal places. ignore even two decimal places
// if t is perfectly divisible by b
return (t % b == 0)
? (t / b).toStringAsFixed(0)
: (t / b).toStringAsFixed(2);
}
return value;
}
static String _formatLanguage(String value) {
final language = Language.living639_2
.firstWhereOrNull((language) => language.iso639_2 == value);
@@ -315,6 +286,7 @@ class FFProbeProps {
longitude: coordinates[1],
);
} catch (e) {
log('failed to parse location: $value', error: e);
return null;
}
}

View File

@@ -14,6 +14,8 @@ extension FilePropsExtn on EnteFile {
bool get isOwner =>
(ownerID == null) || (ownerID == Configuration.instance.getUserID());
bool get isVideo => fileType == FileType.video;
bool get canEditMetaInfo => isUploaded && isOwner;
bool get isTrash => this is TrashFile;

View File

@@ -77,11 +77,12 @@ class MagicCacheService {
MagicCacheService._privateConstructor();
void init(SharedPreferences preferences) {
_logger.info("Initializing MagicCacheService");
_prefs = preferences;
_updateCacheIfTheTimeHasCome();
}
Future<void> resetLastMagicCacheUpdateTime() async {
Future<void> _resetLastMagicCacheUpdateTime() async {
await _prefs.setInt(
_lastMagicCacheUpdateTime,
DateTime.now().millisecondsSinceEpoch,
@@ -130,14 +131,14 @@ class MagicCacheService {
try {
_logger.info("updating magic cache");
final magicPromptsData = await _loadMagicPrompts();
final magicCaches = await nonEmptyMagicResults(magicPromptsData);
final magicCaches = await _nonEmptyMagicResults(magicPromptsData);
final file = File(await _getCachePath());
if (!file.existsSync()) {
file.createSync(recursive: true);
}
file.writeAsBytesSync(MagicCache.encodeListToJson(magicCaches).codeUnits);
unawaited(
resetLastMagicCacheUpdateTime().onError((error, stackTrace) {
_resetLastMagicCacheUpdateTime().onError((error, stackTrace) {
_logger.warning(
"Error resetting last magic cache update time",
error,
@@ -194,7 +195,7 @@ class MagicCacheService {
///Returns random non-empty magic results from magicPromptsData
///Length is capped at [limit], can be less than [limit] if there are not enough
///non-empty results
Future<List<MagicCache>> nonEmptyMagicResults(
Future<List<MagicCache>> _nonEmptyMagicResults(
List<dynamic> magicPromptsData,
) async {
//Show all magic prompts to internal users for feedback on results

View File

@@ -1,17 +1,22 @@
import "dart:async";
import "dart:developer";
import "dart:io";
import "package:exif/exif.dart";
import "package:ffmpeg_kit_flutter_min/ffprobe_kit.dart";
import "package:flutter/foundation.dart";
import "package:flutter/material.dart";
import "package:logging/logging.dart";
import "package:photos/core/configuration.dart";
import "package:photos/core/event_bus.dart";
import "package:photos/events/people_changed_event.dart";
import "package:photos/generated/l10n.dart";
import "package:photos/models/ffmpeg/ffprobe_props.dart";
import "package:photos/models/file/extensions/file_props.dart";
import 'package:photos/models/file/file.dart';
import 'package:photos/models/file/file_type.dart';
import "package:photos/models/location/location.dart";
import "package:photos/models/metadata/file_magic.dart";
import "package:photos/service_locator.dart";
import "package:photos/services/file_magic_service.dart";
import 'package:photos/theme/ente_theme.dart';
import 'package:photos/ui/components/buttons/icon_button_widget.dart';
@@ -26,8 +31,8 @@ import 'package:photos/ui/viewer/file_details/exif_item_widgets.dart';
import "package:photos/ui/viewer/file_details/faces_item_widget.dart";
import "package:photos/ui/viewer/file_details/file_properties_item_widget.dart";
import "package:photos/ui/viewer/file_details/location_tags_widget.dart";
import "package:photos/ui/viewer/file_details/video_exif_item.dart";
import "package:photos/utils/exif_util.dart";
import "package:photos/utils/ffprobe_util.dart";
import "package:photos/utils/file_util.dart";
import "package:photos/utils/local_settings.dart";
@@ -44,7 +49,6 @@ class FileDetailsWidget extends StatefulWidget {
}
class _FileDetailsWidgetState extends State<FileDetailsWidget> {
final ValueNotifier<Map<String, IfdTag>?> _exifNotifier = ValueNotifier(null);
final Map<String, dynamic> _exifData = {
"focalLength": null,
"fNumber": null,
@@ -64,8 +68,11 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
bool _isImage = false;
late int _currentUserID;
bool showExifListTile = false;
final ValueNotifier<Map<String, IfdTag>?> _exifNotifier = ValueNotifier(null);
final ValueNotifier<bool> hasLocationData = ValueNotifier(false);
final Logger _logger = Logger("_FileDetailsWidgetState");
final ValueNotifier<FFProbeProps?> _videoMetadataNotifier =
ValueNotifier(null);
@override
void initState() {
@@ -81,7 +88,15 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
_exifNotifier.addListener(() {
if (_exifNotifier.value != null && !widget.file.hasLocation) {
_updateLocationFromExif(_exifNotifier.value!).ignore();
_updateLocationFromExif(locationFromExif(_exifNotifier.value!))
.ignore();
}
});
_videoMetadataNotifier.addListener(() {
if (_videoMetadataNotifier.value?.location != null &&
!widget.file.hasLocation) {
_updateLocationFromExif(_videoMetadataNotifier.value?.location)
.ignore();
}
});
@@ -96,7 +111,7 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
_exifData["exposureTime"] != null ||
_exifData["ISO"] != null;
});
} else {
} else if (flagService.internalUser && widget.file.isVideo) {
getMediaInfo();
}
getExif(widget.file).then((exif) {
@@ -109,25 +124,19 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
Future<void> getMediaInfo() async {
final File? originFile = await getFile(widget.file, isOrigin: true);
if (originFile == null) return;
final session = await FFprobeKit.getMediaInformation(originFile.path);
final mediaInfo = session.getMediaInformation();
if (mediaInfo == null) {
final failStackTrace = await session.getFailStackTrace();
final output = await session.getOutput();
_logger.severe(
'failed to get video metadata failStackTrace=$failStackTrace, output=$output',
);
return;
final properties = await getVideoPropsAsync(originFile);
_videoMetadataNotifier.value = properties;
if (kDebugMode) {
log("videoCustomProps ${properties.toString()}");
log("PropData ${properties?.prodData.toString()}");
}
//todo:(neeraj) Use probe data for back filling location
final _ = await FFProbeUtil.getProperties(mediaInfo);
setState(() {});
}
@override
void dispose() {
_exifNotifier.dispose();
_videoMetadataNotifier.dispose();
_peopleChangedEvent.cancel();
super.dispose();
}
@@ -256,6 +265,20 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
},
),
]);
} else if (flagService.internalUser && widget.file.isVideo) {
fileDetailsTiles.addAll([
ValueListenableBuilder(
valueListenable: _videoMetadataNotifier,
builder: (context, value, _) {
return Column(
children: [
VideoExifRowItem(file, value),
const FileDetailsDivider(),
],
);
},
),
]);
}
if (LocalSettings.instance.isFaceIndexingEnabled) {
@@ -317,14 +340,13 @@ class _FileDetailsWidgetState extends State<FileDetailsWidget> {
//This code is for updating the location of files in which location data is
//missing and the EXIF has location data. This is only happens for a
//certain specific minority of devices.
Future<void> _updateLocationFromExif(Map<String, IfdTag> exif) async {
Future<void> _updateLocationFromExif(Location? locationDataFromExif) async {
// If the file is not uploaded or the file is not owned by the current user
// then we don't need to update the location.
if (!widget.file.isUploaded || widget.file.ownerID! != _currentUserID) {
return;
}
try {
final locationDataFromExif = locationFromExif(exif);
if (locationDataFromExif?.latitude != null &&
locationDataFromExif?.longitude != null) {
widget.file.location = locationDataFromExif;

View File

@@ -1,7 +1,11 @@
import "dart:io";
import "package:flutter/foundation.dart";
import 'package:flutter/material.dart';
import 'package:logging/logging.dart';
import 'package:photos/models/file/file.dart';
import 'package:photos/models/file/file_type.dart';
import "package:photos/ui/viewer/file/video_widget.dart";
import "package:photos/ui/viewer/file/video_widget_new.dart";
import "package:photos/ui/viewer/file/zoomable_live_image_new.dart";
@@ -38,6 +42,15 @@ class FileWidget extends StatelessWidget {
key: key ?? ValueKey(fileKey),
);
} else if (file.fileType == FileType.video) {
// use old video widget on iOS simulator as the new one crashes while
// playing certain videos on iOS simulator
if (kDebugMode && Platform.isIOS) {
return VideoWidget(
file,
tagPrefix: tagPrefix,
playbackCallback: playbackCallback,
);
}
return VideoWidgetNew(
file,
tagPrefix: tagPrefix,

View File

@@ -0,0 +1,150 @@
import 'package:flutter/material.dart';
import "package:photos/models/ffmpeg/ffprobe_keys.dart";
import "package:photos/theme/ente_theme.dart";
class VideoExifDialog extends StatelessWidget {
final Map<String, dynamic> probeData;
const VideoExifDialog({Key? key, required this.probeData}) : super(key: key);
@override
Widget build(BuildContext context) {
return SingleChildScrollView(
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
_buildGeneralInfo(context),
const SizedBox(height: 8),
_buildSection(context, 'Streams', _buildStreamsList(context)),
],
),
),
);
}
Widget _buildSection(BuildContext context, String title, Widget content) {
return Theme(
data: Theme.of(context).copyWith(dividerColor: Colors.transparent),
child: ExpansionTile(
initiallyExpanded: false,
title: Text(title, style: getEnteTextTheme(context).largeFaint),
childrenPadding: EdgeInsets.zero, // Remove padding around children
tilePadding: EdgeInsets.zero,
collapsedShape: const Border(), // Remove border when collapsed
shape: const Border(),
children: [content],
),
);
}
Widget _buildGeneralInfo(BuildContext context) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
_buildInfoRow(context, 'Creation Time', probeData, 'creation_time'),
_buildInfoRow(context, 'Duration', probeData, 'duration'),
_buildInfoRow(context, 'Location', probeData, 'location'),
_buildInfoRow(context, 'Bitrate', probeData, 'bitrate'),
_buildInfoRow(context, 'Frame Rate', probeData, FFProbeKeys.rFrameRate),
_buildInfoRow(context, 'Width', probeData, FFProbeKeys.codedWidth),
_buildInfoRow(context, 'Height', probeData, FFProbeKeys.codedHeight),
_buildInfoRow(context, 'Model', probeData, 'com.apple.quicktime.model'),
_buildInfoRow(context, 'OS', probeData, 'com.apple.quicktime.software'),
_buildInfoRow(context, 'Major Brand', probeData, 'major_brand'),
_buildInfoRow(context, 'Format', probeData, 'format'),
],
);
}
Widget _buildStreamsList(BuildContext context) {
final List<dynamic> streams = probeData['streams'];
final List<Map<String, dynamic>> data = [];
for (final stream in streams) {
final Map<String, dynamic> streamData = {};
for (final key in stream.keys) {
final dynamic value = stream[key];
if (value is List) {
continue;
}
// print type of value
if (value is int ||
value is double ||
value is String ||
value is bool) {
streamData[key] = stream[key];
} else {
streamData[key] = stream[key].toString();
}
}
data.add(streamData);
}
return Column(
children:
data.map((stream) => _buildStreamInfo(context, stream)).toList(),
);
}
Widget _buildStreamInfo(BuildContext context, Map<String, dynamic> stream) {
String titleString = stream['type']?.toString().toUpperCase() ?? '';
final codeName = stream['codec_name']?.toString().toUpperCase() ?? '';
if (codeName != 'NULL' && codeName.isNotEmpty) {
titleString += ' - $codeName';
}
return ExpansionTile(
title: Text(
titleString,
style: getEnteTextTheme(context).smallBold,
),
childrenPadding: const EdgeInsets.symmetric(vertical: 0, horizontal: 4),
tilePadding: const EdgeInsets.symmetric(vertical: 4),
children: [
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: stream.entries
.map(
(entry) => _buildInfoRow(context, entry.key, stream, entry.key),
)
.toList(),
),
],
);
}
Widget _buildInfoRow(
BuildContext context,
String rowName,
Map<String, dynamic> data,
String dataKey,
) {
rowName = rowName.replaceAll('_', ' ');
rowName = rowName[0].toUpperCase() + rowName.substring(1);
try {
final value = data[dataKey];
if (value == null) {
return Container(); // Return an empty container if there's no data for the key.
}
return Padding(
padding: const EdgeInsets.symmetric(vertical: 4.0),
child: Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
SizedBox(
width: 150,
child: Text(
rowName,
style: getEnteTextTheme(context).smallMuted,
),
),
Expanded(child: Text(value.toString())),
],
),
);
} catch (e, _) {
return const SizedBox.shrink();
}
}
}

View File

@@ -0,0 +1,86 @@
import "package:flutter/material.dart";
import "package:modal_bottom_sheet/modal_bottom_sheet.dart";
import "package:photos/generated/l10n.dart";
import "package:photos/models/ffmpeg/ffprobe_props.dart";
import 'package:photos/models/file/file.dart';
import "package:photos/theme/colors.dart";
import "package:photos/theme/ente_theme.dart";
import "package:photos/ui/components/info_item_widget.dart";
import "package:photos/ui/viewer/file/video_exif_dialog.dart";
import "package:photos/utils/toast_util.dart";
class VideoExifRowItem extends StatefulWidget {
final EnteFile file;
final FFProbeProps? props;
const VideoExifRowItem(
this.file,
this.props, {
super.key,
});
@override
State<VideoExifRowItem> createState() => _VideoProbeInfoState();
}
class _VideoProbeInfoState extends State<VideoExifRowItem> {
VoidCallback? _onTap;
@override
void initState() {
super.initState();
}
@override
Widget build(BuildContext context) {
return InfoItemWidget(
leadingIcon: Icons.text_snippet_outlined,
title: "Video Info",
subtitleSection:
_exifButton(context, widget.file, widget.props?.prodData),
onTap: _onTap,
);
}
Future<List<Widget>> _exifButton(
BuildContext context,
EnteFile file,
Map<String, dynamic>? exif,
) async {
late final String label;
late final VoidCallback? onTap;
if (exif == null) {
label = S.of(context).loadingExifData;
onTap = null;
} else if (exif.isNotEmpty) {
label = "${widget.props?.videoInfo ?? ''} ..";
onTap = () => showBarModalBottomSheet(
context: context,
builder: (BuildContext context) {
return VideoExifDialog(
probeData: exif,
);
},
shape: const RoundedRectangleBorder(
side: BorderSide(width: 0),
borderRadius: BorderRadius.vertical(
top: Radius.circular(5),
),
),
topControl: const SizedBox.shrink(),
backgroundColor: getEnteColorScheme(context).backgroundElevated,
barrierColor: backdropFaintDark,
enableDrag: true,
);
} else {
label = S.of(context).noExifData;
onTap =
() => showShortToast(context, S.of(context).thisImageHasNoExifData);
}
setState(() {
_onTap = onTap;
});
return Future.value([
Text(label, style: getEnteTextTheme(context).miniBoldMuted),
]);
}
}

View File

@@ -1,12 +1,20 @@
import "dart:async";
import "dart:developer";
import "dart:io";
import "package:computer/computer.dart";
import 'package:exif/exif.dart';
import "package:ffmpeg_kit_flutter_min/ffprobe_kit.dart";
import "package:ffmpeg_kit_flutter_min/media_information.dart";
import "package:ffmpeg_kit_flutter_min/media_information_session.dart";
import "package:flutter/foundation.dart";
import 'package:intl/intl.dart';
import 'package:logging/logging.dart';
import "package:photos/models/ffmpeg/ffprobe_props.dart";
import 'package:photos/models/file/file.dart';
import "package:photos/models/location/location.dart";
import "package:photos/services/location_service.dart";
import "package:photos/utils/ffprobe_util.dart";
import 'package:photos/utils/file_util.dart';
const kDateTimeOriginal = "EXIF DateTimeOriginal";
@@ -48,6 +56,52 @@ Future<Map<String, IfdTag>?> getExifFromSourceFile(File originFile) async {
}
}
Future<FFProbeProps?> getVideoPropsAsync(File originalFile) async {
try {
final Map<int, StringBuffer> logs = {};
final completer = Completer<MediaInformation?>();
final session = await FFprobeKit.getMediaInformationAsync(
originalFile.path,
(MediaInformationSession session) async {
// This callback is called when the session is complete
final mediaInfo = session.getMediaInformation();
if (mediaInfo == null) {
_logger.warning("Failed to get video metadata");
final failStackTrace = await session.getFailStackTrace();
final output = await session.getOutput();
_logger.warning(
'Failed to get video metadata. failStackTrace=$failStackTrace, output=$output',
);
}
completer.complete(mediaInfo);
},
(log) {
// put log messages into a map
logs.putIfAbsent(log.getSessionId(), () => StringBuffer());
logs[log.getSessionId()]!.write(log.getMessage());
},
);
// Wait for the session to complete
await session.getReturnCode();
final mediaInfo = await completer.future;
if (kDebugMode) {
logs.forEach((key, value) {
log("log for session $key: $value", name: "FFprobeKit");
});
}
if (mediaInfo == null) {
return null;
}
final properties = await FFProbeUtil.getProperties(mediaInfo);
return properties;
} catch (e, s) {
_logger.severe("Failed to getVideoProps", e, s);
return null;
}
}
Future<DateTime?> getCreationTimeFromEXIF(
File? file,
Map<String, IfdTag>? exifData,

View File

@@ -14,10 +14,10 @@ class FFProbeUtil {
static Future<FFProbeProps> getProperties(
MediaInformation mediaInformation,
) async {
final properties = await _getMetadata(mediaInformation);
final properties = await getMetadata(mediaInformation);
try {
return FFProbeProps.fromJson(properties);
return FFProbeProps.parseData(properties);
} catch (e, stackTrace) {
_logger.severe(
"Error parsing FFProbe properties: $properties",
@@ -28,7 +28,7 @@ class FFProbeUtil {
}
}
static Future<Map> _getMetadata(MediaInformation information) async {
static Future<Map> getMetadata(MediaInformation information) async {
final props = information.getAllProperties();
if (props == null) return {};

View File

@@ -15,12 +15,15 @@ import 'package:photo_manager/photo_manager.dart';
import 'package:photos/core/configuration.dart';
import 'package:photos/core/constants.dart';
import 'package:photos/core/errors.dart';
import "package:photos/models/ffmpeg/ffprobe_props.dart";
import "package:photos/models/file/extensions/file_props.dart";
import 'package:photos/models/file/file.dart';
import 'package:photos/models/file/file_type.dart';
import "package:photos/models/location/location.dart";
import "package:photos/models/metadata/file_magic.dart";
import "package:photos/services/file_magic_service.dart";
import 'package:photos/utils/crypto_util.dart';
import "package:photos/utils/exif_util.dart";
import 'package:photos/utils/file_util.dart';
import "package:uuid/uuid.dart";
import 'package:video_thumbnail/video_thumbnail.dart';
@@ -110,7 +113,7 @@ Future<MediaUploadData> _getMediaUploadDataFromAssetFile(EnteFile file) async {
);
}
// h4ck to fetch location data if missing (thank you Android Q+) lazily only during uploads
await _decorateEnteFileData(file, asset);
await _decorateEnteFileData(file, asset, sourceFile);
fileHash = CryptoUtil.bin2base64(await CryptoUtil.getHash(sourceFile));
if (file.fileType == FileType.livePhoto && Platform.isIOS) {
@@ -266,7 +269,11 @@ void _assertFileType(AssetEntity asset, EnteFile file) {
);
}
Future<void> _decorateEnteFileData(EnteFile file, AssetEntity asset) async {
Future<void> _decorateEnteFileData(
EnteFile file,
AssetEntity asset,
File sourceFile,
) async {
// h4ck to fetch location data if missing (thank you Android Q+) lazily only during uploads
if (file.location == null ||
(file.location!.latitude == 0 && file.location!.longitude == 0)) {
@@ -274,7 +281,12 @@ Future<void> _decorateEnteFileData(EnteFile file, AssetEntity asset) async {
file.location =
Location(latitude: latLong.latitude, longitude: latLong.longitude);
}
if (!file.hasLocation && file.isVideo && Platform.isAndroid) {
final FFProbeProps? props = await getVideoPropsAsync(sourceFile);
if (props != null && props.location != null) {
file.location = props.location;
}
}
if (file.title == null || file.title!.isEmpty) {
_logger.warning("Title was missing ${file.tag}");
file.title = await asset.titleAsync;

View File

@@ -12,7 +12,7 @@ description: ente photos application
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
version: 0.9.7+907
version: 0.9.9+909
publish_to: none
environment:

View File

@@ -167,5 +167,7 @@ const localeName = (locale: SupportedLocale) => {
return "Brazilian Portuguese";
case "ru-RU":
return "Russian";
case "pl-PL":
return "Polish";
}
};

View File

@@ -1,8 +1,14 @@
import { FILE_TYPE } from "@/media/file-type";
import { isMLSupported, mlStatusSnapshot } from "@/new/photos/services/ml";
import {
isMLEnabled,
isMLSupported,
mlStatusSnapshot,
} from "@/new/photos/services/ml";
import { clipMatches } from "@/new/photos/services/ml/clip";
import type { Person } from "@/new/photos/services/ml/people";
import { EnteFile } from "@/new/photos/types/file";
import { isDesktop } from "@/next/app";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import * as chrono from "chrono-node";
import { t } from "i18next";
@@ -174,8 +180,7 @@ export async function getMLStatusSuggestion(): Promise<Suggestion> {
const status = mlStatusSnapshot();
if (!status || status.phase == "disabled" || status.phase == "paused")
return undefined;
if (!status || status.phase == "disabled") return undefined;
let label: string;
switch (status.phase) {
@@ -366,14 +371,12 @@ async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
}
const searchClip = async (
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_searchPhrase: string,
searchPhrase: string,
): Promise<ClipSearchScores | undefined> => {
// TODO-ML: clip-test
return undefined;
// const matches = await clipMatches(searchPhrase, ensureElectron());
// log.debug(() => ["clip/scores", matches]);
// return matches;
if (!isMLEnabled()) return undefined;
const matches = await clipMatches(searchPhrase, ensureElectron());
log.debug(() => ["clip/scores", matches]);
return matches;
};
function convertSuggestionToSearchQuery(option: Suggestion): Search {

View File

@@ -1,12 +1,8 @@
import {
disableML,
enableML,
getIsMLEnabledRemote,
isMLEnabled,
mlStatusSnapshot,
mlStatusSubscribe,
pauseML,
resumeML,
type MLStatus,
} from "@/new/photos/services/ml";
import { EnteDrawer } from "@/new/shared/components/EnteDrawer";
@@ -72,21 +68,7 @@ export const MLSettings: React.FC<MLSettingsProps> = ({
else onClose();
};
const handleEnableML = async () => {
startLoading();
try {
if (!(await getIsMLEnabledRemote())) {
setOpenFaceConsent(true);
} else {
await enableML();
}
} catch (e) {
log.error("Failed to enable or resume ML", e);
somethingWentWrong();
} finally {
finishLoading();
}
};
const handleEnableML = () => setOpenFaceConsent(true);
const handleConsent = async () => {
startLoading();
@@ -322,9 +304,6 @@ const ManageML: React.FC<ManageMLProps> = ({
let status: string;
switch (phase) {
case "paused":
status = pt("Paused");
break;
case "indexing":
status = pt("Indexing");
break;
@@ -338,17 +317,11 @@ const ManageML: React.FC<ManageMLProps> = ({
}
const processed = `${nSyncedFiles} / ${nTotalFiles}`;
const handleToggleLocal = () => (isMLEnabled() ? pauseML() : resumeML());
const confirmDisableML = () => {
setDialogBoxAttributesV2({
title: pt("Disable ML search"),
content: (
<Typography>
{pt(
"Do you want to disable ML search on all your devices?",
)}
</Typography>
content: pt(
"Do you want to disable ML search on all your devices?",
),
close: { text: t("CANCEL") },
proceed: {
@@ -371,14 +344,6 @@ const ManageML: React.FC<ManageMLProps> = ({
onClick={confirmDisableML}
/>
</MenuItemGroup>
<MenuItemGroup>
<EnteMenuItem
label={pt("On this device")}
variant="toggle"
checked={phase != "paused"}
onClick={handleToggleLocal}
/>
</MenuItemGroup>
</Stack>
<Paper variant="outlined">
<Stack>

View File

@@ -15,25 +15,13 @@ export interface ImageBitmapAndData {
}
/**
* Return an {@link ImageBitmap} and its {@link ImageData}.
*
* @param enteFile The {@link EnteFile} to index.
*
* @param uploadItem If we're called during the upload process, then this will
* be set to the {@link UploadItem} that was uploaded. This way, we can directly
* use the on-disk file instead of needing to download the original from remote.
*
* @param electron The {@link MLWorkerElectron} instance that allows us to call
* our Node.js layer for various functionality.
* Create an {@link ImageBitmap} from the given {@link imageBlob}, and return
* both the image bitmap and its {@link ImageData}.
*/
export const imageBitmapAndData = async (
enteFile: EnteFile,
uploadItem: UploadItem | undefined,
electron: MLWorkerElectron,
imageBlob: Blob,
): Promise<ImageBitmapAndData> => {
const imageBitmap = uploadItem
? await renderableUploadItemImageBitmap(enteFile, uploadItem, electron)
: await renderableImageBitmap(enteFile);
const imageBitmap = await createImageBitmap(imageBlob);
const { width, height } = imageBitmap;
@@ -47,14 +35,34 @@ export const imageBitmapAndData = async (
};
/**
* Return a {@link ImageBitmap} that downloads the source image corresponding to
* {@link enteFile} from remote.
* Return a {@link Blob} that can be used to create an {@link ImageBitmap}.
*
* The blob from the relevant image component is either constructed using the
* given {@link uploadItem} if present, otherwise it is downloaded from remote.
*
* - For images the original is used.
* - For live photos the original image component is used.
* - For videos the thumbnail is used.
*
* @param enteFile The {@link EnteFile} to index.
*
* @param uploadItem If we're called during the upload process, then this will
* be set to the {@link UploadItem} that was uploaded. This way, we can directly
* use the on-disk file instead of needing to download the original from remote.
*
* @param electron The {@link MLWorkerElectron} instance that allows us to call
* our Node.js layer for various functionality.
*/
export const renderableImageBitmap = async (enteFile: EnteFile) => {
export const renderableBlob = async (
enteFile: EnteFile,
uploadItem: UploadItem | undefined,
electron: MLWorkerElectron,
): Promise<Blob> =>
uploadItem
? await renderableUploadItemBlob(enteFile, uploadItem, electron)
: await renderableEnteFileBlob(enteFile);
export const renderableEnteFileBlob = async (enteFile: EnteFile) => {
const fileType = enteFile.metadata.fileType;
let blob: Blob | undefined;
if (fileType == FILE_TYPE.VIDEO) {
@@ -63,17 +71,10 @@ export const renderableImageBitmap = async (enteFile: EnteFile) => {
} else {
blob = await fetchRenderableBlob(enteFile);
}
return createImageBitmap(ensure(blob));
return ensure(blob);
};
/**
* Variant of {@link renderableImageBitmap} that uses the given
* {@link uploadItem} to construct the image bitmap instead of downloading the
* original from remote.
*
* For videos the thumbnail is still downloaded from remote.
*/
export const renderableUploadItemImageBitmap = async (
const renderableUploadItemBlob = async (
enteFile: EnteFile,
uploadItem: UploadItem,
electron: MLWorkerElectron,
@@ -87,7 +88,7 @@ export const renderableUploadItemImageBitmap = async (
const file = await readNonVideoUploadItem(uploadItem, electron);
blob = await renderableImageBlob(enteFile.metadata.title, file);
}
return createImageBitmap(ensure(blob));
return ensure(blob);
};
/**

View File

@@ -1,6 +1,6 @@
import type { EnteFile } from "@/new/photos/types/file";
import type { Electron } from "@/next/types/ipc";
import type { ImageBitmapAndData } from "./bitmap";
import type { ImageBitmapAndData } from "./blob";
import { clipIndexes } from "./db";
import { pixelRGBBicubic } from "./image";
import { dotProduct, norm } from "./math";

View File

@@ -1,7 +1,7 @@
import { blobCache } from "@/next/blob-cache";
import { ensure } from "@/utils/ensure";
import type { EnteFile } from "../../types/file";
import { renderableImageBitmap } from "./bitmap";
import { renderableEnteFileBlob } from "./blob";
import { type Box, type FaceIndex } from "./face";
import { clamp } from "./math";
@@ -26,7 +26,9 @@ export const regenerateFaceCrops = async (
enteFile: EnteFile,
faceIndex: FaceIndex,
) => {
const imageBitmap = await renderableImageBitmap(enteFile);
const imageBitmap = await createImageBitmap(
await renderableEnteFileBlob(enteFile),
);
try {
await saveFaceCrops(imageBitmap, faceIndex);

View File

@@ -18,7 +18,7 @@ import {
translate,
type Matrix as TransformationMatrix,
} from "transformation-matrix";
import type { ImageBitmapAndData } from "./bitmap";
import type { ImageBitmapAndData } from "./blob";
import { saveFaceCrops } from "./crop";
import {
grayscaleIntMatrixFromNormalized2List,

View File

@@ -19,25 +19,18 @@ import { clearMLDB, faceIndex, indexableAndIndexedCounts } from "./db";
import { MLWorker } from "./worker";
/**
* In-memory flag that tracks if ML is enabled locally.
* In-memory flag that tracks if ML is enabled.
*
* - On app start, this is read from local storage during {@link initML}.
*
* - It gets updated if the user enables/disables ML (remote) or if they
* pause/resume ML (local).
* - It gets updated when we sync with remote (so if the user enables/disables
* ML on a different device, this local value will also become true/false).
*
* - It gets updated when the user enables/disables ML on this device.
*
* - It is cleared in {@link logoutML}.
*/
let _isMLEnabledLocal = false;
/**
* In-memory flag that tracks if the remote flag for ML is set.
*
* - It is updated each time we sync the status with remote.
*
* - It is cleared in {@link logoutML}.
*/
let _isMLEnabledRemote: boolean | undefined;
let _isMLEnabled = false;
/** Cached instance of the {@link ComlinkWorker} that wraps our web worker. */
let _comlinkWorker: ComlinkWorker<typeof MLWorker> | undefined;
@@ -121,16 +114,16 @@ export const canEnableML = async () =>
* Initialize the ML subsystem if the user has enabled it in preferences.
*/
export const initML = () => {
_isMLEnabledLocal = isMLEnabledLocally();
_isMLEnabled = isMLEnabledLocal();
};
export const logoutML = async () => {
// `terminateMLWorker` is conceptually also part of this, but for the
// reasons mentioned in [Note: Caching IDB instances in separate execution
// contexts], it gets called first in the logout sequence, and then this
// function (`logoutML`) gets called at a later point in time.
_isMLEnabledLocal = false;
_isMLEnabledRemote = undefined;
// `terminateMLWorker` is conceptually also part of this sequence, but for
// the reasons mentioned in [Note: Caching IDB instances in separate
// execution contexts], it gets called first in the logout sequence, and
// then this function (`logoutML`) gets called at a later point in time.
_isMLEnabled = false;
_mlStatusListeners = [];
_mlStatusSnapshot = undefined;
await clearMLDB();
@@ -139,22 +132,13 @@ export const logoutML = async () => {
/**
* Return true if the user has enabled machine learning in their preferences.
*
* [Note: ML preferences]
*
* The user may enable ML. This enables in both locally by persisting a local
* storage flag, and sets a flag on remote so that the user's other devices can
* also enable it if they wish.
*
* The user may pause ML locally. This does not modify the remote flag, but it
* unsets the local flag. Subsequently resuming ML (locally) will set the local
* flag again.
*
* ML related operations are driven by the {@link isMLEnabled} property. This is
* true if ML is enabled locally (which implies it is also enabled on remote).
* Enabling ML enables in both locally by persisting a local storage flag, and
* sets a flag on remote so that the user's other devices can also enable it
* when they next sync with remote.
*/
export const isMLEnabled = () =>
// Implementation note: Keep it fast, it might be called frequently.
_isMLEnabledLocal;
_isMLEnabled;
/**
* Enable ML.
@@ -163,9 +147,8 @@ export const isMLEnabled = () =>
*/
export const enableML = async () => {
await updateIsMLEnabledRemote(true);
setIsMLEnabledLocally(true);
_isMLEnabledRemote = true;
_isMLEnabledLocal = true;
setIsMLEnabledLocal(true);
_isMLEnabled = true;
setInterimScheduledStatus();
triggerStatusUpdate();
triggerMLSync();
@@ -179,40 +162,14 @@ export const enableML = async () => {
*/
export const disableML = async () => {
await updateIsMLEnabledRemote(false);
setIsMLEnabledLocal(false);
_isMLEnabled = false;
terminateMLWorker();
setIsMLEnabledLocally(false);
_isMLEnabledRemote = false;
_isMLEnabledLocal = false;
triggerStatusUpdate();
};
/**
* Pause ML on this device.
*
* Stop any in-progress ML tasks, and persist the user's local preference.
*/
export const pauseML = () => {
terminateMLWorker();
setIsMLEnabledLocally(false);
_isMLEnabledLocal = false;
triggerStatusUpdate();
};
/**
* Resume ML on this device.
*
* Persist the user's preference locally, and trigger a sync.
*/
export const resumeML = () => {
setIsMLEnabledLocally(true);
_isMLEnabledLocal = true;
setInterimScheduledStatus();
triggerStatusUpdate();
triggerMLSync();
};
/**
* Return true if ML is enabled locally.
* Return true if our local persistence thinks that ML is enabled.
*
* This setting is persisted locally (in local storage). It is not synced with
* remote and only tracks if ML is enabled locally.
@@ -220,13 +177,14 @@ export const resumeML = () => {
* The remote status is tracked with a separate {@link isMLEnabledRemote} flag
* that is synced with remote.
*/
const isMLEnabledLocally = () =>
const isMLEnabledLocal = () =>
// TODO-ML: Rename this flag
localStorage.getItem("faceIndexingEnabled") == "1";
/**
* Update the (locally stored) value of {@link isMLEnabledLocally}.
* Update the (locally stored) value of {@link isMLEnabledLocal}.
*/
const setIsMLEnabledLocally = (enabled: boolean) =>
const setIsMLEnabledLocal = (enabled: boolean) =>
enabled
? localStorage.setItem("faceIndexingEnabled", "1")
: localStorage.removeItem("faceIndexingEnabled");
@@ -242,10 +200,10 @@ const mlRemoteKey = "faceSearchEnabled";
/**
* Return `true` if the flag to enable ML is set on remote.
*/
export const getIsMLEnabledRemote = () => getRemoteFlag(mlRemoteKey);
const getIsMLEnabledRemote = () => getRemoteFlag(mlRemoteKey);
/**
* Update the remote flag that tracks ML status across the user's devices.
* Update the remote flag that tracks the user's ML preference.
*/
const updateIsMLEnabledRemote = (enabled: boolean) =>
updateRemoteFlag(mlRemoteKey, enabled);
@@ -255,23 +213,20 @@ const updateIsMLEnabledRemote = (enabled: boolean) =>
*
* This is called during the global sync sequence.
*
* First we check again with remote ML flag is set. If it is not set, then we
* disable ML locally too.
* * It checks with remote if the ML flag is set, and updates our local flag to
* reflect that value.
*
* Otherwise, and if ML is enabled locally also, then we use this as a signal to
* pull embeddings from remote, and start backfilling if needed.
*
* This function does not wait for these processes to run to completion, and
* returns immediately.
* * If ML is enabled, it pulls any missing embeddings from remote and starts
* indexing to backfill any missing values.
*/
export const triggerMLSync = () => void mlSync();
const mlSync = async () => {
_isMLEnabledRemote = await getIsMLEnabledRemote();
if (!_isMLEnabledRemote) _isMLEnabledLocal = false;
_isMLEnabled = await getIsMLEnabledRemote();
setIsMLEnabledLocal(_isMLEnabled);
triggerStatusUpdate();
if (_isMLEnabledLocal) void worker().then((w) => w.sync());
if (_isMLEnabled) void worker().then((w) => w.sync());
};
/**
@@ -290,7 +245,7 @@ const mlSync = async () => {
* image part of the live photo that was uploaded.
*/
export const indexNewUpload = (enteFile: EnteFile, uploadItem: UploadItem) => {
if (!_isMLEnabledLocal) return;
if (!_isMLEnabled) return;
if (enteFile.metadata.fileType !== FILE_TYPE.IMAGE) return;
log.debug(() => ["ml/liveq", { enteFile, uploadItem }]);
void worker().then((w) => w.onUpload(enteFile, uploadItem));
@@ -303,9 +258,9 @@ export type MLStatus =
* Which phase we are in within the indexing pipeline when viewed across the
* user's entire library:
*
* - "paused": ML is currently paused on this device.
*
* - "scheduled": There are files we know of that have not been indexed.
* - "scheduled": A ML job is scheduled. Likely there are files we
* know of that have not been indexed, but is also the state before
* the first run of the indexer after app start.
*
* - "indexing": The indexer is currently running.
*
@@ -315,7 +270,7 @@ export type MLStatus =
* - "done": ML indexing and face clustering is complete for the user's
* library.
*/
phase: "paused" | "scheduled" | "indexing" | "clustering" | "done";
phase: "scheduled" | "indexing" | "clustering" | "done";
/** The number of files that have already been indexed. */
nSyncedFiles: number;
/** The total number of files that are eligible for indexing. */
@@ -372,27 +327,19 @@ const setMLStatusSnapshot = (snapshot: MLStatus) => {
};
/**
* Return the current state of the ML subsystem.
*
* Precondition: ML must be enabled on remote, though it is fine if it is paused
* locally.
* Compute the current state of the ML subsystem.
*/
const getMLStatus = async (): Promise<MLStatus> => {
if (!_isMLEnabledRemote) return { phase: "disabled" };
if (!_isMLEnabled) return { phase: "disabled" };
const { indexedCount, indexableCount } = await indexableAndIndexedCounts();
let phase: MLStatus["phase"];
if (!_isMLEnabledLocal) {
phase = "paused";
} else {
if (indexableCount > 0) {
const isIndexing = await (await worker()).isIndexing();
if (indexableCount > 0) {
phase = !isIndexing ? "scheduled" : "indexing";
} else {
phase = "done";
}
phase = !isIndexing ? "scheduled" : "indexing";
} else {
phase = "done";
}
return {
@@ -407,10 +354,10 @@ const getMLStatus = async (): Promise<MLStatus> => {
*
* So this is an intermediate state with possibly incorrect counts (but correct
* phase) that is set immediately to trigger a UI update. It uses the counts
* from the last known status, just updates the phase.
* from the last known status, and just updates the phase.
*
* Once the worker is initialized and the correct counts fetched, this will
* update to the correct state (should take less than one second).
* update to the correct state (should take less than a second).
*/
const setInterimScheduledStatus = () => {
let nSyncedFiles = 0,

View File

@@ -11,7 +11,11 @@ import { expose } from "comlink";
import downloadManager from "../download";
import { getAllLocalFiles, getLocalTrashedFiles } from "../files";
import type { UploadItem } from "../upload/types";
import { imageBitmapAndData, type ImageBitmapAndData } from "./bitmap";
import {
imageBitmapAndData,
renderableBlob,
type ImageBitmapAndData,
} from "./blob";
import { indexCLIP, type CLIPIndex } from "./clip";
import {
indexableFileIDs,
@@ -357,6 +361,18 @@ const syncWithLocalFilesAndGetFilesToIndex = async (
* downloaded and decrypted from remote.
*
* @param userAgent The UA of the client that is doing the indexing (us).
*
* ---
*
* [Note: ML indexing does more ML]
*
* Nominally, and primarily, indexing a file involves computing its various ML
* embeddings: faces and CLIP. However, since this is a occasion where we have
* the original file in memory, it is a great time to also compute other derived
* data related to the file (instead of re-downloading it again).
*
* So this index function also does things that are not related to ML:
* extracting and updating Exif.
*/
const index = async (
enteFile: EnteFile,
@@ -367,7 +383,20 @@ const index = async (
const f = fileLogID(enteFile);
const startTime = Date.now();
const image = await imageBitmapAndData(enteFile, uploadItem, electron);
const imageBlob = await renderableBlob(enteFile, uploadItem, electron);
let image: ImageBitmapAndData;
try {
image = await imageBitmapAndData(imageBlob);
} catch (e) {
// If we cannot get the raw image data for the file, then retrying again
// won't help. It'd only make sense to retry later if modify
// `renderableBlob` to be do something different for this type of file.
log.error(`Failed to get image data for indexing ${f}`, e);
await markIndexingFailed(enteFile.id);
throw e;
}
const res = await Promise.allSettled([
_indexFace(f, enteFile, image, electron, userAgent),
// TODO-ML: clip-test

View File

@@ -5,11 +5,11 @@ import { z } from "zod";
/**
* Fetch the value for the given {@link key} from remote store.
*
* If the key is not present in the remote store, return `undefined`.
* If the key is not present in the remote store, return {@link defaultValue}.
*/
export const getRemoteValue = async (key: string) => {
export const getRemoteValue = async (key: string, defaultValue: string) => {
const url = await apiURL("/remote-store");
const params = new URLSearchParams({ key });
const params = new URLSearchParams({ key, defaultValue });
const res = await fetch(`${url}?${params.toString()}`, {
headers: await authenticatedRequestHeaders(),
});
@@ -23,7 +23,7 @@ const GetRemoteStoreResponse = z.object({ value: z.string() }).nullable();
* Convenience wrapper over {@link getRemoteValue} that returns booleans.
*/
export const getRemoteFlag = async (key: string) =>
(await getRemoteValue(key)) == "true";
(await getRemoteValue(key, "false")) == "true";
/**
* Update or insert {@link value} for the given {@link key} into remote store.

View File

@@ -28,6 +28,7 @@ export const supportedLocales = [
"es-ES" /* Spanish */,
"pt-BR" /* Portuguese, Brazilian */,
"ru-RU" /* Russian */,
"pl-PL" /* Polish */,
] as const;
/** The type of {@link supportedLocales}. */
@@ -224,6 +225,8 @@ const closestSupportedLocale = (
return "pt-BR";
} else if (ls.startsWith("ru")) {
return "ru-RU";
} else if (ls.startsWith("pl")) {
return "pl-PL";
}
}