diff --git a/.github/workflows/auth-lint.yml b/.github/workflows/auth-lint.yml index 6504e0646a..e7c42e1a6b 100644 --- a/.github/workflows/auth-lint.yml +++ b/.github/workflows/auth-lint.yml @@ -3,7 +3,7 @@ name: "Lint (auth)" on: # Run on every push to a branch other than main that changes auth/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "auth/**" - ".github/workflows/auth-lint.yml" diff --git a/.github/workflows/auth-release.yml b/.github/workflows/auth-release.yml index 174b6c1d33..cf3749ae6a 100644 --- a/.github/workflows/auth-release.yml +++ b/.github/workflows/auth-release.yml @@ -85,30 +85,21 @@ jobs: - name: Install dependencies for desktop build run: | sudo apt-get update -y - sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' - - name: Install appimagetool - run: | - wget -O appimagetool "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage" - chmod +x appimagetool - mv appimagetool /usr/local/bin/ - - name: Build desktop app run: | flutter config --enable-linux-desktop dart pub global activate flutter_distributor flutter_distributor package --platform=linux --targets=deb --skip-clean - flutter_distributor package --platform=linux --targets=rpm --skip-clean - flutter_distributor package --platform=linux --targets=appimage --skip-clean mv dist/**/*-*-linux.deb artifacts/ente-${{ github.ref_name }}-x86_64.deb - mv dist/**/*-*-linux.rpm artifacts/ente-${{ github.ref_name }}-x86_64.rpm - mv dist/**/*-*-linux.AppImage artifacts/ente-${{ github.ref_name }}-x86_64.AppImage env: LIBSODIUM_USE_PKGCONFIG: 1 - - name: Generate checksums - run: sha256sum artifacts/ente-* > artifacts/sha256sum + - name: Generate checksums and push to artifacts + run: | + sha256sum artifacts/ente-* > artifacts/sha256sum-apk-deb - name: Create a draft GitHub release uses: ncipollo/release-action@v1 @@ -128,6 +119,61 @@ jobs: releaseFiles: auth/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab track: internal + build-fedora-etc: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: auth + + steps: + - name: Checkout code and submodules + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install Flutter ${{ env.FLUTTER_VERSION }} + uses: subosito/flutter-action@v2 + with: + channel: "stable" + flutter-version: ${{ env.FLUTTER_VERSION }} + cache: true + + - name: Create artifacts directory + run: mkdir artifacts + + - name: Install dependencies for desktop build + run: | + sudo apt-get update -y + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate libayatana-appindicator3-dev libffi-dev libtiff5 + sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' + + - name: Install appimagetool + run: | + wget -O appimagetool "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage" + chmod +x appimagetool + mv appimagetool /usr/local/bin/ + + - name: Build desktop app + run: | + flutter config --enable-linux-desktop + dart pub global activate flutter_distributor + flutter_distributor package --platform=linux --targets=rpm --skip-clean + flutter_distributor package --platform=linux --targets=appimage --skip-clean + mv dist/**/*-*-linux.rpm artifacts/ente-${{ github.ref_name }}-x86_64.rpm + mv dist/**/*-*-linux.AppImage artifacts/ente-${{ github.ref_name }}-x86_64.AppImage + + - name: Generate checksums + run: sha256sum artifacts/ente-* >> artifacts/sha256sum-rpm-appimage + + - name: Create a draft GitHub release + uses: ncipollo/release-action@v1 + with: + artifacts: "auth/artifacts/*" + draft: true + allowUpdates: true + updateOnlyUnreleased: true + build-windows: runs-on: windows-latest diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml index 0b8263f3d3..d1cfda884d 100644 --- a/.github/workflows/desktop-lint.yml +++ b/.github/workflows/desktop-lint.yml @@ -3,7 +3,7 @@ name: "Lint (desktop)" on: # Run on every push to a branch other than main that changes desktop/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "desktop/**" - ".github/workflows/desktop-lint.yml" diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml index 01b0c2254a..b824fe5c32 100644 --- a/.github/workflows/docs-deploy.yml +++ b/.github/workflows/docs-deploy.yml @@ -37,11 +37,8 @@ jobs: run: yarn build - name: Publish - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: help - directory: docs/docs/.vitepress/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=help docs/docs/.vitepress/dist diff --git a/.github/workflows/docs-verify-build.yml b/.github/workflows/docs-verify-build.yml index a57f71c868..addb52a059 100644 --- a/.github/workflows/docs-verify-build.yml +++ b/.github/workflows/docs-verify-build.yml @@ -6,7 +6,7 @@ name: "Verify build (docs)" on: # Run on every push to a branch other than main that changes docs/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "docs/**" - ".github/workflows/docs-verify-build.yml" diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml index 4ee7367424..fac4eb1d2f 100644 --- a/.github/workflows/mobile-internal-release.yml +++ b/.github/workflows/mobile-internal-release.yml @@ -1,10 +1,10 @@ -name: "Internal Release - Photos" +name: "Internal release (photos)" on: workflow_dispatch: # Allow manually running the action env: - FLUTTER_VERSION: "3.19.3" + FLUTTER_VERSION: "3.22.0" jobs: build: @@ -54,4 +54,3 @@ jobs: packageName: io.ente.photos releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab track: internal - changesNotSentForReview: true diff --git a/.github/workflows/mobile-lint.yml b/.github/workflows/mobile-lint.yml index 57b2ca4dbd..3a43924a35 100644 --- a/.github/workflows/mobile-lint.yml +++ b/.github/workflows/mobile-lint.yml @@ -3,13 +3,13 @@ name: "Lint (mobile)" on: # Run on every push to a branch other than main that changes mobile/ push: - branches-ignore: [main, f-droid, "deploy/**"] + branches-ignore: [main, f-droid] paths: - "mobile/**" - ".github/workflows/mobile-lint.yml" env: - FLUTTER_VERSION: "3.19.5" + FLUTTER_VERSION: "3.22.0" jobs: lint: diff --git a/.github/workflows/server-lint.yml b/.github/workflows/server-lint.yml index d25f2adcc8..c051d02901 100644 --- a/.github/workflows/server-lint.yml +++ b/.github/workflows/server-lint.yml @@ -3,7 +3,7 @@ name: "Lint (server)" on: # Run on every push to a branch other than main that changes server/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "server/**" - ".github/workflows/server-lint.yml" diff --git a/.github/workflows/server-publish.yml b/.github/workflows/server-publish.yml index 1ba1935171..b5aabbb8a2 100644 --- a/.github/workflows/server-publish.yml +++ b/.github/workflows/server-publish.yml @@ -38,3 +38,8 @@ jobs: tags: ${{ inputs.commit }}, latest username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + + - name: Tag as server/ghcr + run: | + git tag -f server/ghcr + git push -f origin server/ghcr diff --git a/.github/workflows/web-deploy-accounts.yml b/.github/workflows/web-deploy-accounts.yml deleted file mode 100644 index 33da5ee6f1..0000000000 --- a/.github/workflows/web-deploy-accounts.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (accounts)" - -on: - push: - # Run workflow on pushes to the deploy/accounts - branches: [deploy/accounts, deploy-f/accounts] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build accounts - run: yarn build:accounts - - - name: Publish accounts - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/accounts - directory: web/apps/accounts/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-auth.yml b/.github/workflows/web-deploy-auth.yml deleted file mode 100644 index d195b62f8c..0000000000 --- a/.github/workflows/web-deploy-auth.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (auth)" - -on: - push: - # Run workflow on pushes to the deploy/auth - branches: [deploy/auth] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build auth - run: yarn build:auth - - - name: Publish auth - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/auth - directory: web/apps/auth/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-cast.yml b/.github/workflows/web-deploy-cast.yml deleted file mode 100644 index 01e17486d0..0000000000 --- a/.github/workflows/web-deploy-cast.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (cast)" - -on: - push: - # Run workflow on pushes to the deploy/cast - branches: [deploy/cast, deploy-f/cast] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build cast - run: yarn build:cast - - - name: Publish cast - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/cast - directory: web/apps/cast/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-one.yml b/.github/workflows/web-deploy-one.yml new file mode 100644 index 0000000000..77c338513d --- /dev/null +++ b/.github/workflows/web-deploy-one.yml @@ -0,0 +1,61 @@ +name: "Deploy one (web)" + +on: + workflow_dispatch: + inputs: + app: + description: "App to build and deploy" + type: choice + required: true + default: "photos" + options: + - "accounts" + - "auth" + - "cast" + - "payments" + - "photos" + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build ${{ inputs.app }} + run: yarn build:${{ inputs.app }} + + - name: Publish ${{ inputs.app }} to preview + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + # [Note: Wrangler commit-dirty] + # + # Without the --commit-dirty flag, running the wrangler-action + # always prints a warning when used: + # + # Warning: Your working directory is a git repo and has uncommitted changes + # To silence this warning, pass in --commit-dirty=true + # + # There is no clear documentation of if passing this is + # harmless, but all indications and in-practice tests seem to + # indicate so. + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/${{ inputs.app }} web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-payments.yml b/.github/workflows/web-deploy-payments.yml deleted file mode 100644 index 367e1db186..0000000000 --- a/.github/workflows/web-deploy-payments.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (payments)" - -on: - push: - # Run workflow on pushes to the deploy/payments - branches: [deploy/payments] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build payments - run: yarn build:payments - - - name: Publish payments - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/payments - directory: web/apps/payments/dist - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-photos.yml b/.github/workflows/web-deploy-photos.yml deleted file mode 100644 index cb3a9db86d..0000000000 --- a/.github/workflows/web-deploy-photos.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (photos)" - -on: - push: - # Run workflow on pushes to the deploy/photos - branches: [deploy/photos] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build photos - run: yarn build:photos - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/photos - directory: web/apps/photos/out - wranglerVersion: "3" diff --git a/.github/workflows/web-preview.yml b/.github/workflows/web-deploy-preview.yml similarity index 84% rename from .github/workflows/web-preview.yml rename to .github/workflows/web-deploy-preview.yml index 8f39c02474..4bb1870726 100644 --- a/.github/workflows/web-preview.yml +++ b/.github/workflows/web-deploy-preview.yml @@ -1,4 +1,4 @@ -name: "Preview (web)" +name: "Deploy preview (web)" on: workflow_dispatch: @@ -43,11 +43,8 @@ jobs: run: yarn build:${{ inputs.app }} - name: Publish ${{ inputs.app }} to preview - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: preview - directory: web/apps/${{ inputs.app }}/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=preview web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-staff.yml b/.github/workflows/web-deploy-staff.yml index 4d386344df..854e163644 100644 --- a/.github/workflows/web-deploy-staff.yml +++ b/.github/workflows/web-deploy-staff.yml @@ -38,11 +38,8 @@ jobs: run: yarn build:staff - name: Publish staff - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/staff - directory: web/apps/staff/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/staff web/apps/staff/dist diff --git a/.github/workflows/web-deploy-staging.yml b/.github/workflows/web-deploy-staging.yml new file mode 100644 index 0000000000..ca3a6142b2 --- /dev/null +++ b/.github/workflows/web-deploy-staging.yml @@ -0,0 +1,86 @@ +name: "Deploy staging (web)" + +on: + schedule: + # Run everyday at ~3:00 PM IST + # + # See: [Note: Run workflow every 24 hours] + - cron: "25 9 * * *" + # Also allow manually running the workflow + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build photos + run: yarn build:photos + env: + NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-photos web/apps/photos/out + + - name: Build accounts + run: yarn build:accounts + + - name: Publish accounts + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-accounts web/apps/accounts/out + + - name: Build auth + run: yarn build:auth + + - name: Publish auth + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-auth web/apps/auth/out + + - name: Build cast + run: yarn build:cast + + - name: Publish cast + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-cast web/apps/cast/out + + - name: Build payments + run: yarn build:payments + + - name: Publish payments + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-payments web/apps/payments/dist diff --git a/.github/workflows/web-nightly.yml b/.github/workflows/web-deploy.yml similarity index 60% rename from .github/workflows/web-nightly.yml rename to .github/workflows/web-deploy.yml index 9497382924..6f6a113f24 100644 --- a/.github/workflows/web-nightly.yml +++ b/.github/workflows/web-deploy.yml @@ -1,17 +1,21 @@ -name: "Nightly (web)" +name: "Deploy (web)" on: schedule: # [Note: Run workflow every 24 hours] # - # Run every 24 hours - First field is minute, second is hour of the day - # This runs 23:15 UTC everyday - 1 and 15 are just arbitrary offset to - # avoid scheduling it on the exact hour, as suggested by GitHub. + # Run everyday at ~8:00 AM IST (except Sundays). + # + # First field is minute, second is hour of the day. Last is day of week, + # 0 being Sunday. + # + # Add a few minutes of offset to avoid scheduling on exact hourly + # boundaries (recommended by GitHub to avoid congestion). # # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule # https://crontab.guru/ # - - cron: "15 23 * * *" + - cron: "25 2 * * 1-6" # Also allow manually running the workflow workflow_dispatch: @@ -39,69 +43,52 @@ jobs: - name: Install dependencies run: yarn install + - name: Build photos + run: yarn build:photos + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/photos web/apps/photos/out + - name: Build accounts run: yarn build:accounts - name: Publish accounts - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-accounts - directory: web/apps/accounts/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/accounts web/apps/accounts/out - name: Build auth run: yarn build:auth - name: Publish auth - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-auth - directory: web/apps/auth/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/auth web/apps/auth/out - name: Build cast run: yarn build:cast - name: Publish cast - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-cast - directory: web/apps/cast/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/cast web/apps/cast/out - name: Build payments run: yarn build:payments - name: Publish payments - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-payments - directory: web/apps/payments/dist - wranglerVersion: "3" - - - name: Build photos - run: yarn build:photos - env: - NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-photos - directory: web/apps/photos/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/payments web/apps/payments/dist diff --git a/.github/workflows/web-lint.yml b/.github/workflows/web-lint.yml index 0dc11aa0e7..7f5d270029 100644 --- a/.github/workflows/web-lint.yml +++ b/.github/workflows/web-lint.yml @@ -3,7 +3,7 @@ name: "Lint (web)" on: # Run on every push to a branch other than main that changes web/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "web/**" - ".github/workflows/web-lint.yml" diff --git a/.gitignore b/.gitignore index 35ef93d427..0901b55d64 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ -# Let folks use their custom .vscode settings +# Let folks use their custom editor settings .vscode +.idea # macOS .DS_Store diff --git a/auth/android/app/build.gradle b/auth/android/app/build.gradle index 5621b08b6f..a0179af5b4 100644 --- a/auth/android/app/build.gradle +++ b/auth/android/app/build.gradle @@ -1,3 +1,9 @@ +plugins { + id "com.android.application" + id "kotlin-android" + id "dev.flutter.flutter-gradle-plugin" +} + def localProperties = new Properties() def localPropertiesFile = rootProject.file('local.properties') if (localPropertiesFile.exists()) { @@ -6,11 +12,6 @@ if (localPropertiesFile.exists()) { } } -def flutterRoot = localProperties.getProperty('flutter.sdk') -if (flutterRoot == null) { - throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") -} - def flutterVersionCode = localProperties.getProperty('flutter.versionCode') if (flutterVersionCode == null) { flutterVersionCode = '1' @@ -21,10 +22,6 @@ if (flutterVersionName == null) { flutterVersionName = '1.0' } -apply plugin: 'com.android.application' -apply plugin: 'kotlin-android' -apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" - def keystoreProperties = new Properties() def keystorePropertiesFile = rootProject.file('key.properties') if (keystorePropertiesFile.exists()) { @@ -32,7 +29,18 @@ if (keystorePropertiesFile.exists()) { } android { - compileSdkVersion 34 + namespace "io.ente.auth" + compileSdk 34 + ndkVersion flutter.ndkVersion + + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } + + kotlinOptions { + jvmTarget = '1.8' + } sourceSets { main.java.srcDirs += 'src/main/kotlin' @@ -46,6 +54,8 @@ android { defaultConfig { applicationId "io.ente.auth" + // You can update the following values to match your application needs. + // For more information, see: https://docs.flutter.dev/deployment/android#reviewing-the-gradle-build-configuration. minSdkVersion 21 targetSdkVersion 33 versionCode flutterVersionCode.toInteger() @@ -105,13 +115,4 @@ flutter { source '../..' } -dependencies { - implementation 'io.sentry:sentry-android:2.0.0' - implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" - implementation 'com.android.support:multidex:1.0.3' - implementation 'com.google.guava:guava:28.2-android' - implementation 'com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava' - testImplementation 'junit:junit:4.12' - androidTestImplementation 'androidx.test:runner:1.1.1' - androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' -} +dependencies {} diff --git a/auth/android/app/src/debug/AndroidManifest.xml b/auth/android/app/src/debug/AndroidManifest.xml index 68e4e89c47..399f6981d5 100644 --- a/auth/android/app/src/debug/AndroidManifest.xml +++ b/auth/android/app/src/debug/AndroidManifest.xml @@ -1,6 +1,6 @@ - - diff --git a/auth/android/app/src/main/AndroidManifest.xml b/auth/android/app/src/main/AndroidManifest.xml index abe72b5650..7c7a8ba5f0 100644 --- a/auth/android/app/src/main/AndroidManifest.xml +++ b/auth/android/app/src/main/AndroidManifest.xml @@ -1,8 +1,7 @@ + xmlns:tools="http://schemas.android.com/tools"> - + - diff --git a/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png deleted file mode 100644 index fbfe92399f..0000000000 Binary files a/auth/android/app/src/main/res/mipmap-hdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png index 6fbcb6df90..be000c8b34 100644 Binary files a/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-hdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png deleted file mode 100644 index 6105c4a2b8..0000000000 Binary files a/auth/android/app/src/main/res/mipmap-mdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png index 13fdf3b88d..f49d34bb5a 100644 Binary files a/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-mdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png deleted file mode 100644 index b34272b611..0000000000 Binary files a/auth/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png index 5f852e4a38..ef950b6e9d 100644 Binary files a/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png deleted file mode 100644 index faa2e9c60d..0000000000 Binary files a/auth/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png index 5c82f386a1..e97eba5d2b 100644 Binary files a/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xxhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png deleted file mode 100644 index 9814894c66..0000000000 Binary files a/auth/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png and /dev/null differ diff --git a/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png b/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png index 3bea3482ca..a37c745ae8 100644 Binary files a/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png and b/auth/android/app/src/main/res/mipmap-xxxhdpi/launcher_icon.png differ diff --git a/auth/android/app/src/main/res/values-night-v31/styles.xml b/auth/android/app/src/main/res/values-night-v31/styles.xml index 2c379953fa..c4a573dfe2 100644 --- a/auth/android/app/src/main/res/values-night-v31/styles.xml +++ b/auth/android/app/src/main/res/values-night-v31/styles.xml @@ -4,7 +4,10 @@ diff --git a/auth/android/build.gradle b/auth/android/build.gradle index 47890036d0..bc157bd1a1 100644 --- a/auth/android/build.gradle +++ b/auth/android/build.gradle @@ -1,16 +1,3 @@ -buildscript { - ext.kotlin_version = '1.8.22' - repositories { - google() - mavenCentral() - } - - dependencies { - classpath 'com.android.tools.build:gradle:7.1.2' - classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" - } -} - allprojects { repositories { google() @@ -21,6 +8,8 @@ allprojects { rootProject.buildDir = '../build' subprojects { project.buildDir = "${rootProject.buildDir}/${project.name}" +} +subprojects { project.evaluationDependsOn(':app') } diff --git a/auth/android/gradle.properties b/auth/android/gradle.properties index 94adc3a3f9..598d13fee4 100644 --- a/auth/android/gradle.properties +++ b/auth/android/gradle.properties @@ -1,3 +1,3 @@ -org.gradle.jvmargs=-Xmx1536M +org.gradle.jvmargs=-Xmx4G android.useAndroidX=true android.enableJetifier=true diff --git a/auth/android/gradle/wrapper/gradle-wrapper.properties b/auth/android/gradle/wrapper/gradle-wrapper.properties index cc5527d781..e1ca574ef0 100644 --- a/auth/android/gradle/wrapper/gradle-wrapper.properties +++ b/auth/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Fri Jun 23 08:50:38 CEST 2017 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.3-all.zip diff --git a/auth/android/settings.gradle b/auth/android/settings.gradle index 44e62bcf06..748caceba7 100644 --- a/auth/android/settings.gradle +++ b/auth/android/settings.gradle @@ -1,11 +1,26 @@ -include ':app' +pluginManagement { + def flutterSdkPath = { + def properties = new Properties() + file("local.properties").withInputStream { properties.load(it) } + def flutterSdkPath = properties.getProperty("flutter.sdk") + assert flutterSdkPath != null, "flutter.sdk not set in local.properties" + return flutterSdkPath + } + settings.ext.flutterSdkPath = flutterSdkPath() -def localPropertiesFile = new File(rootProject.projectDir, "local.properties") -def properties = new Properties() + includeBuild("${settings.ext.flutterSdkPath}/packages/flutter_tools/gradle") -assert localPropertiesFile.exists() -localPropertiesFile.withReader("UTF-8") { reader -> properties.load(reader) } + repositories { + google() + mavenCentral() + gradlePluginPortal() + } +} -def flutterSdkPath = properties.getProperty("flutter.sdk") -assert flutterSdkPath != null, "flutter.sdk not set in local.properties" -apply from: "$flutterSdkPath/packages/flutter_tools/gradle/app_plugin_loader.gradle" +plugins { + id "dev.flutter.flutter-plugin-loader" version "1.0.0" + id "com.android.application" version "7.3.0" apply false + id "org.jetbrains.kotlin.android" version "1.8.22" apply false +} + +include ":app" diff --git a/auth/assets/generation-icons/icon-light-adaptive-bg.png b/auth/assets/generation-icons/icon-light-adaptive-bg.png new file mode 100644 index 0000000000..d7bde2bdd1 Binary files /dev/null and b/auth/assets/generation-icons/icon-light-adaptive-bg.png differ diff --git a/auth/assets/generation-icons/icon-light-adaptive-fg.png b/auth/assets/generation-icons/icon-light-adaptive-fg.png index c3899f4468..6c1121a49e 100644 Binary files a/auth/assets/generation-icons/icon-light-adaptive-fg.png and b/auth/assets/generation-icons/icon-light-adaptive-fg.png differ diff --git a/auth/assets/generation-icons/icon-light.png b/auth/assets/generation-icons/icon-light.png index 5ef7b5a8a6..cccf23a2c7 100644 Binary files a/auth/assets/generation-icons/icon-light.png and b/auth/assets/generation-icons/icon-light.png differ diff --git a/auth/assets/icons/auth-icon.ico b/auth/assets/icons/auth-icon.ico index 38bb22bcf8..40b9b4837b 100644 Binary files a/auth/assets/icons/auth-icon.ico and b/auth/assets/icons/auth-icon.ico differ diff --git a/auth/assets/icons/auth-icon.png b/auth/assets/icons/auth-icon.png index 3db75740bb..ba6adca5b5 100644 Binary files a/auth/assets/icons/auth-icon.png and b/auth/assets/icons/auth-icon.png differ diff --git a/auth/assets/splash-screen-dark.png b/auth/assets/splash-screen-dark.png deleted file mode 100644 index 5401a47adc..0000000000 Binary files a/auth/assets/splash-screen-dark.png and /dev/null differ diff --git a/auth/assets/splash-screen-light.png b/auth/assets/splash-screen-light.png deleted file mode 100644 index a97df13b30..0000000000 Binary files a/auth/assets/splash-screen-light.png and /dev/null differ diff --git a/auth/assets/splash/splash-icon-fg-12.png b/auth/assets/splash/splash-icon-fg-12.png new file mode 100644 index 0000000000..1a82d32f2b Binary files /dev/null and b/auth/assets/splash/splash-icon-fg-12.png differ diff --git a/auth/assets/splash/splash-icon-fg.png b/auth/assets/splash/splash-icon-fg.png new file mode 100644 index 0000000000..58139acb24 Binary files /dev/null and b/auth/assets/splash/splash-icon-fg.png differ diff --git a/auth/assets/svg/button-tint.svg b/auth/assets/svg/button-tint.svg new file mode 100644 index 0000000000..1751aece14 --- /dev/null +++ b/auth/assets/svg/button-tint.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/auth/assets/svg/pin-active.svg b/auth/assets/svg/pin-active.svg new file mode 100644 index 0000000000..3ba870f5df --- /dev/null +++ b/auth/assets/svg/pin-active.svg @@ -0,0 +1,4 @@ + + + + diff --git a/auth/assets/svg/pin-card.svg b/auth/assets/svg/pin-card.svg new file mode 100644 index 0000000000..59b6e15e40 --- /dev/null +++ b/auth/assets/svg/pin-card.svg @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/auth/assets/svg/pin-inactive.svg b/auth/assets/svg/pin-inactive.svg new file mode 100644 index 0000000000..2cc59a362e --- /dev/null +++ b/auth/assets/svg/pin-inactive.svg @@ -0,0 +1,3 @@ + + + diff --git a/auth/ios/Podfile.lock b/auth/ios/Podfile.lock index 991f52b42a..814568fba4 100644 --- a/auth/ios/Podfile.lock +++ b/auth/ios/Podfile.lock @@ -6,35 +6,35 @@ PODS: - ReachabilitySwift - device_info_plus (0.0.1): - Flutter - - DKImagePickerController/Core (4.3.4): + - DKImagePickerController/Core (4.3.9): - DKImagePickerController/ImageDataManager - DKImagePickerController/Resource - - DKImagePickerController/ImageDataManager (4.3.4) - - DKImagePickerController/PhotoGallery (4.3.4): + - DKImagePickerController/ImageDataManager (4.3.9) + - DKImagePickerController/PhotoGallery (4.3.9): - DKImagePickerController/Core - DKPhotoGallery - - DKImagePickerController/Resource (4.3.4) - - DKPhotoGallery (0.0.17): - - DKPhotoGallery/Core (= 0.0.17) - - DKPhotoGallery/Model (= 0.0.17) - - DKPhotoGallery/Preview (= 0.0.17) - - DKPhotoGallery/Resource (= 0.0.17) + - DKImagePickerController/Resource (4.3.9) + - DKPhotoGallery (0.0.19): + - DKPhotoGallery/Core (= 0.0.19) + - DKPhotoGallery/Model (= 0.0.19) + - DKPhotoGallery/Preview (= 0.0.19) + - DKPhotoGallery/Resource (= 0.0.19) - SDWebImage - SwiftyGif - - DKPhotoGallery/Core (0.0.17): + - DKPhotoGallery/Core (0.0.19): - DKPhotoGallery/Model - DKPhotoGallery/Preview - SDWebImage - SwiftyGif - - DKPhotoGallery/Model (0.0.17): + - DKPhotoGallery/Model (0.0.19): - SDWebImage - SwiftyGif - - DKPhotoGallery/Preview (0.0.17): + - DKPhotoGallery/Preview (0.0.19): - DKPhotoGallery/Model - DKPhotoGallery/Resource - SDWebImage - SwiftyGif - - DKPhotoGallery/Resource (0.0.17): + - DKPhotoGallery/Resource (0.0.19): - SDWebImage - SwiftyGif - file_picker (0.0.1): @@ -81,17 +81,15 @@ PODS: - qr_code_scanner (0.2.0): - Flutter - MTBBarcodeScanner - - ReachabilitySwift (5.2.1) - - SDWebImage (5.19.0): - - SDWebImage/Core (= 5.19.0) - - SDWebImage/Core (5.19.0) - - Sentry/HybridSDK (8.21.0): - - SentryPrivate (= 8.21.0) - - sentry_flutter (7.19.0): + - ReachabilitySwift (5.2.2) + - SDWebImage (5.19.2): + - SDWebImage/Core (= 5.19.2) + - SDWebImage/Core (5.19.2) + - Sentry/HybridSDK (8.25.0) + - sentry_flutter (7.20.1): - Flutter - FlutterMacOS - - Sentry/HybridSDK (= 8.21.0) - - SentryPrivate (8.21.0) + - Sentry/HybridSDK (= 8.25.0) - share_plus (0.0.1): - Flutter - shared_preferences_foundation (0.0.1): @@ -102,23 +100,23 @@ PODS: - sqflite (0.0.3): - Flutter - FlutterMacOS - - sqlite3 (3.45.1): - - sqlite3/common (= 3.45.1) - - sqlite3/common (3.45.1) - - sqlite3/fts5 (3.45.1): + - "sqlite3 (3.45.3+1)": + - "sqlite3/common (= 3.45.3+1)" + - "sqlite3/common (3.45.3+1)" + - "sqlite3/fts5 (3.45.3+1)": - sqlite3/common - - sqlite3/perf-threadsafe (3.45.1): + - "sqlite3/perf-threadsafe (3.45.3+1)": - sqlite3/common - - sqlite3/rtree (3.45.1): + - "sqlite3/rtree (3.45.3+1)": - sqlite3/common - sqlite3_flutter_libs (0.0.1): - Flutter - - sqlite3 (~> 3.45.1) + - "sqlite3 (~> 3.45.3+1)" - sqlite3/fts5 - sqlite3/perf-threadsafe - sqlite3/rtree - - SwiftyGif (5.4.4) - - Toast (4.1.0) + - SwiftyGif (5.4.5) + - Toast (4.1.1) - url_launcher_ios (0.0.1): - Flutter @@ -160,7 +158,6 @@ SPEC REPOS: - ReachabilitySwift - SDWebImage - Sentry - - SentryPrivate - sqlite3 - SwiftyGif - Toast @@ -225,19 +222,19 @@ SPEC CHECKSUMS: app_links: e70ca16b4b0f88253b3b3660200d4a10b4ea9795 connectivity_plus: bf0076dd84a130856aa636df1c71ccaff908fa1d device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6 - DKImagePickerController: b512c28220a2b8ac7419f21c491fc8534b7601ac - DKPhotoGallery: fdfad5125a9fdda9cc57df834d49df790dbb4179 + DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c + DKPhotoGallery: b3834fecb755ee09a593d7c9e389d8b5d6deed60 file_picker: 15fd9539e4eb735dc54bae8c0534a7a9511a03de file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808 fk_user_agent: 1f47ec39291e8372b1d692b50084b0d54103c545 Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 - flutter_email_sender: 02d7443217d8c41483223627972bfdc09f74276b + flutter_email_sender: 10a22605f92809a11ef52b2f412db806c6082d40 flutter_inappwebview_ios: 97215cf7d4677db55df76782dbd2930c5e1c1ea0 flutter_local_authentication: 1172a4dd88f6306dadce067454e2c4caf07977bb flutter_local_notifications: 4cde75091f6327eb8517fa068a0a5950212d2086 flutter_native_splash: edf599c81f74d093a4daf8e17bd7a018854bc778 flutter_secure_storage: 23fc622d89d073675f2eaa109381aefbcf5a49be - fluttertoast: 31b00dabfa7fb7bacd9e7dbee580d7a2ff4bf265 + fluttertoast: 9f2f8e81bb5ce18facb9748d7855bf5a756fe3db local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98 move_to_background: 39a5b79b26d577b0372cbe8a8c55e7aa9fcd3a2d MTBBarcodeScanner: f453b33c4b7dfe545d8c6484ed744d55671788cb @@ -246,19 +243,18 @@ SPEC CHECKSUMS: path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c privacy_screen: 1a131c052ceb3c3659934b003b0d397c2381a24e qr_code_scanner: bb67d64904c3b9658ada8c402e8b4d406d5d796e - ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66 - SDWebImage: 981fd7e860af070920f249fd092420006014c3eb - Sentry: ebc12276bd17613a114ab359074096b6b3725203 - sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5 - SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe + ReachabilitySwift: 2128f3a8c9107e1ad33574c6e58e8285d460b149 + SDWebImage: dfe95b2466a9823cf9f0c6d01217c06550d7b29a + Sentry: cd86fc55628f5b7c572cabe66cc8f95a9d2f165a + sentry_flutter: 4cb24c1055c556d7b27262ab2e179d1e5a0b9b0c share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 sodium_libs: 1faae17af662384acbd13e41867a0008cd2e2318 sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec - sqlite3: 73b7fc691fdc43277614250e04d183740cb15078 - sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80 - SwiftyGif: 93a1cc87bf3a51916001cf8f3d63835fb64c819f - Toast: ec33c32b8688982cecc6348adeae667c1b9938da + sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a + sqlite3_flutter_libs: 9bfe005308998aeca155330bbc2ea6dddf834a3b + SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4 + Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e url_launcher_ios: 6116280ddcfe98ab8820085d8d76ae7449447586 PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png index 23ac5355ea..c3d5e06752 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png index 233c57d841..92a287d035 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png index 8dfb32a974..73c2972e76 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png index 780cae73ae..45a215602d 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png index 09f8c298d4..8a871c8e1b 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png index d198bb0829..3655056e3c 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png index 90060839db..3cdcbe9237 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png index 8dfb32a974..73c2972e76 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png index fe8e47ed38..7bf74dea01 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png index 14e9af73df..6cb3e22cd7 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png new file mode 100644 index 0000000000..8fb6f13c68 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png new file mode 100644 index 0000000000..63c4f03dba Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-50x50@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png new file mode 100644 index 0000000000..6ab8f0dc21 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png new file mode 100644 index 0000000000..9d2b175ed6 Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-57x57@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png index 14e9af73df..6cb3e22cd7 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png index 21b297f8d7..5c75eab747 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png new file mode 100644 index 0000000000..f36ab4838d Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png new file mode 100644 index 0000000000..8dc12384bb Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-72x72@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png index f7ef5fa1bc..cccb2c4fe5 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png index e2ed1b2839..1355c5b74c 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png index 450115a343..15e1f2c686 100644 Binary files a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png new file mode 100644 index 0000000000..f04fe3978e Binary files /dev/null and b/auth/ios/Runner/Assets.xcassets/AppIcon.appiconset/ItunesArtwork@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json index fa3132785b..8bb185b107 100644 --- a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json +++ b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/Contents.json @@ -2,8 +2,7 @@ "images" : [ { "filename" : "background.png", - "idiom" : "universal", - "scale" : "1x" + "idiom" : "universal" }, { "appearances" : [ @@ -13,36 +12,7 @@ } ], "filename" : "darkbackground.png", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "idiom" : "universal", - "scale" : "3x" + "idiom" : "universal" } ], "info" : { diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png index e29b3b59f9..3107d37fa5 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png and b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/background.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png index 1b5df34e7b..71e9c817e8 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png and b/auth/ios/Runner/Assets.xcassets/LaunchBackground.imageset/darkbackground.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json index f3387d4ae7..00cabce836 100644 --- a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json +++ b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json @@ -5,48 +5,15 @@ "idiom" : "universal", "scale" : "1x" }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark.png", - "idiom" : "universal", - "scale" : "1x" - }, { "filename" : "LaunchImage@2x.png", "idiom" : "universal", "scale" : "2x" }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark@2x.png", - "idiom" : "universal", - "scale" : "2x" - }, { "filename" : "LaunchImage@3x.png", "idiom" : "universal", "scale" : "3x" - }, - { - "appearances" : [ - { - "appearance" : "luminosity", - "value" : "dark" - } - ], - "filename" : "LaunchImageDark@3x.png", - "idiom" : "universal", - "scale" : "3x" } ], "info" : { diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png index 899cecf22c..91acb41ae9 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png index 4bb7a5751b..9a7c72afa9 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png index 176f0c723b..5b4d99582b 100644 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png and b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png deleted file mode 100644 index 87f84c70e6..0000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark.png and /dev/null differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png deleted file mode 100644 index ce01bec05c..0000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@2x.png and /dev/null differ diff --git a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png b/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png deleted file mode 100644 index 75f4b1f3c5..0000000000 Binary files a/auth/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImageDark@3x.png and /dev/null differ diff --git a/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard b/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard index 8d2b7d51a3..9e6bc010be 100644 --- a/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard +++ b/auth/ios/Runner/Base.lproj/LaunchScreen.storyboard @@ -38,7 +38,7 @@ - + diff --git a/auth/ios/Runner/Info.plist b/auth/ios/Runner/Info.plist index 35921ba0cc..87b959646b 100644 --- a/auth/ios/Runner/Info.plist +++ b/auth/ios/Runner/Info.plist @@ -1,86 +1,86 @@ - - CADisableMinimumFrameDurationOnPhone - - CFBundleDevelopmentRegion - $(DEVELOPMENT_LANGUAGE) - CFBundleDisplayName - auth - CFBundleExecutable - $(EXECUTABLE_NAME) - CFBundleIdentifier - $(PRODUCT_BUNDLE_IDENTIFIER) - CFBundleInfoDictionaryVersion - 6.0 - CFBundleLocalizations - - en - es - - CFBundleName - auth - CFBundlePackageType - APPL - CFBundleShortVersionString - $(FLUTTER_BUILD_NAME) - CFBundleSignature - ???? - CFBundleURLTypes - - - CFBundleTypeRole - Editor - CFBundleURLName - $(PRODUCT_BUNDLE_IDENTIFIER) - CFBundleURLSchemes - - otpauth - enteauth - - - - CFBundleVersion - $(FLUTTER_BUILD_NUMBER) - ITSAppUsesNonExemptEncryption - - LSRequiresIPhoneOS - - MinimumOSVersion - 12.0 - NSCameraUsageDescription - This app needs camera access to scan QR codes - NSFaceIDUsageDescription - Please allow auth to lock itself with FaceID or TouchID - NSPhotoLibraryUsageDescription - Please allow auth to pick a file to import data from - UIApplicationSupportsIndirectInputEvents - - UILaunchStoryboardName - LaunchScreen - UIMainStoryboardFile - Main - UIStatusBarHidden - - UISupportedInterfaceOrientations - - UIInterfaceOrientationPortrait - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UISupportedInterfaceOrientations~ipad - - UIInterfaceOrientationPortrait - UIInterfaceOrientationPortraitUpsideDown - UIInterfaceOrientationLandscapeLeft - UIInterfaceOrientationLandscapeRight - - UIViewControllerBasedStatusBarAppearance - - LSSupportsOpeningDocumentsInPlace - - UIFileSharingEnabled - - + + CADisableMinimumFrameDurationOnPhone + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleDisplayName + auth + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleLocalizations + + en + es + + CFBundleName + Auth + CFBundlePackageType + APPL + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleSignature + ???? + CFBundleURLTypes + + + CFBundleTypeRole + Editor + CFBundleURLName + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleURLSchemes + + otpauth + enteauth + + + + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + ITSAppUsesNonExemptEncryption + + LSRequiresIPhoneOS + + MinimumOSVersion + 12.0 + NSCameraUsageDescription + This app needs camera access to scan QR codes + NSFaceIDUsageDescription + Please allow auth to lock itself with FaceID or TouchID + NSPhotoLibraryUsageDescription + Please allow auth to pick a file to import data from + UIApplicationSupportsIndirectInputEvents + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIStatusBarHidden + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + LSSupportsOpeningDocumentsInPlace + + UIFileSharingEnabled + + diff --git a/auth/lib/app/view/app.dart b/auth/lib/app/view/app.dart index 3bd9d4e734..5053f70f12 100644 --- a/auth/lib/app/view/app.dart +++ b/auth/lib/app/view/app.dart @@ -189,7 +189,7 @@ class _AppState extends State with WindowListener, TrayListener { windowManager.show(); break; case 'exit_app': - windowManager.close(); + windowManager.destroy(); break; } } diff --git a/auth/lib/ente_theme_data.dart b/auth/lib/ente_theme_data.dart index 0316d014f9..2eb19bf27c 100644 --- a/auth/lib/ente_theme_data.dart +++ b/auth/lib/ente_theme_data.dart @@ -427,6 +427,10 @@ extension CustomColorScheme on ColorScheme { ? const Color.fromRGBO(246, 246, 246, 1) : const Color.fromRGBO(40, 40, 40, 0.6); + Color get primaryColor => brightness == Brightness.light + ? const Color(0xFF9610D6) + : const Color(0xFF9610D6); + EnteTheme get enteTheme => brightness == Brightness.light ? lightTheme : darkTheme; @@ -493,7 +497,7 @@ ElevatedButtonThemeData buildElevatedButtonThemeData({ ), padding: const EdgeInsets.symmetric(vertical: 18), shape: const RoundedRectangleBorder( - borderRadius: BorderRadius.all(Radius.circular(8)), + borderRadius: BorderRadius.all(Radius.circular(4)), ), ), ); diff --git a/auth/lib/l10n/arb/app_en.arb b/auth/lib/l10n/arb/app_en.arb index c22bac930d..e4d1a07a50 100644 --- a/auth/lib/l10n/arb/app_en.arb +++ b/auth/lib/l10n/arb/app_en.arb @@ -20,6 +20,8 @@ "codeIssuerHint": "Issuer", "codeSecretKeyHint": "Secret Key", "codeAccountHint": "Account (you@domain.com)", + "codeTagHint": "Tag", + "accountKeyType": "Type of key", "sessionExpired": "Session expired", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -156,6 +158,7 @@ } } }, + "invalidQRCode": "Invalid QR code", "noRecoveryKeyTitle": "No recovery key?", "enterEmailHint": "Enter your email address", "invalidEmailTitle": "Invalid email address", @@ -420,5 +423,18 @@ "invalidEndpoint": "Invalid endpoint", "invalidEndpointMessage": "Sorry, the endpoint you entered is invalid. Please enter a valid endpoint and try again.", "endpointUpdatedMessage": "Endpoint updated successfully", - "customEndpoint": "Connected to {endpoint}" + "customEndpoint": "Connected to {endpoint}", + "pinText": "Pin", + "unpinText": "Unpin", + "pinnedCodeMessage": "{code} has been pinned", + "unpinnedCodeMessage": "{code} has been unpinned", + "tags": "Tags", + "createNewTag": "Create New Tag", + "tag": "Tag", + "create": "Create", + "editTag": "Edit Tag", + "deleteTagTitle": "Delete tag?", + "deleteTagMessage": "Are you sure you want to delete this tag? This action is irreversible.", + "somethingWentWrongParsingCode": "We were unable to parse {x} codes.", + "updateNotAvailable": "Update not available" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_pl.arb b/auth/lib/l10n/arb/app_pl.arb index 3132f66608..796623def2 100644 --- a/auth/lib/l10n/arb/app_pl.arb +++ b/auth/lib/l10n/arb/app_pl.arb @@ -185,6 +185,8 @@ "recoveryKeySaveDescription": "Nie przechowujemy tego klucza, proszę zachować ten 24 wyrazowy klucz w bezpiecznym miejscu.", "doThisLater": "Zrób To Później", "saveKey": "Zapisz klucz", + "save": "Zapisz", + "send": "Wyślij", "back": "Wstecz", "createAccount": "Utwórz konto", "passwordStrength": "Siła hasła: {passwordStrengthValue}", @@ -335,6 +337,10 @@ "@androidBiometricNotRecognized": { "description": "Message to let the user know that authentication was failed. It is used on Android side. Maximum 60 characters." }, + "androidCancelButton": "Anuluj", + "@androidCancelButton": { + "description": "Message showed on a button that the user can click to leave the current dialog. It is used on Android side. Maximum 30 characters." + }, "androidSignInTitle": "Wymagana autoryzacja", "@androidSignInTitle": { "description": "Message showed as a title in a dialog which indicates the user that they need to scan biometric to continue. It is used on Android side. Maximum 60 characters." diff --git a/auth/lib/l10n/arb/app_pt.arb b/auth/lib/l10n/arb/app_pt.arb index 9b1f5b1b0a..3f92822d9a 100644 --- a/auth/lib/l10n/arb/app_pt.arb +++ b/auth/lib/l10n/arb/app_pt.arb @@ -20,6 +20,8 @@ "codeIssuerHint": "Emissor", "codeSecretKeyHint": "Chave secreta", "codeAccountHint": "Conta (voce@dominio.com)", + "codeTagHint": "Etiqueta", + "accountKeyType": "Tipo de chave", "sessionExpired": "Sessão expirada", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -156,6 +158,7 @@ } } }, + "invalidQRCode": "QR Code inválido", "noRecoveryKeyTitle": "Sem chave de recuperação?", "enterEmailHint": "Insira o seu endereço de e-mail", "invalidEmailTitle": "Endereço de e-mail inválido", @@ -420,5 +423,16 @@ "invalidEndpoint": "Endpoint inválido", "invalidEndpointMessage": "Desculpe, o endpoint que você inseriu é inválido. Por favor, insira um endpoint válido e tente novamente.", "endpointUpdatedMessage": "Endpoint atualizado com sucesso", - "customEndpoint": "Conectado a {endpoint}" + "customEndpoint": "Conectado a {endpoint}", + "pinText": "Fixar", + "pinnedCodeMessage": "{code} foi fixado", + "tags": "Etiquetas", + "createNewTag": "Criar etiqueta", + "tag": "Etiqueta", + "create": "Criar", + "editTag": "Editar etiqueta", + "deleteTagTitle": "Excluir etiqueta?", + "deleteTagMessage": "Tem certeza de que deseja excluir esta etiqueta? Essa ação é irreversível.", + "somethingWentWrongParsingCode": "Não foi possível analisar os códigos {x}.", + "updateNotAvailable": "Atualização não está disponível" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_ru.arb b/auth/lib/l10n/arb/app_ru.arb index ca98611ee1..42571a166b 100644 --- a/auth/lib/l10n/arb/app_ru.arb +++ b/auth/lib/l10n/arb/app_ru.arb @@ -188,6 +188,8 @@ "recoveryKeySaveDescription": "Мы не храним этот ключ, пожалуйста, сохраните этот ключ в безопасном месте.", "doThisLater": "Сделать позже", "saveKey": "Сохранить ключ", + "save": "Сохранить", + "send": "Отправить", "back": "Вернуться", "createAccount": "Создать аккаунт", "passwordStrength": "Мощность пароля: {passwordStrengthValue}", @@ -394,5 +396,13 @@ "signOutOtherDevices": "Выйти из других устройств", "doNotSignOut": "Не выходить", "hearUsWhereTitle": "Как вы узнали о Ente? (необязательно)", - "hearUsExplanation": "Будет полезно, если вы укажете, где нашли нас, так как мы не отслеживаем установки приложения" + "hearUsExplanation": "Будет полезно, если вы укажете, где нашли нас, так как мы не отслеживаем установки приложения", + "waitingForVerification": "Ожидание подтверждения...", + "developerSettingsWarning": "Вы уверены, что хотите изменить настройки разработчика?", + "developerSettings": "Настройки разработчика", + "serverEndpoint": "Конечная точка сервера", + "invalidEndpoint": "Неверная конечная точка", + "invalidEndpointMessage": "Извините, введенная вами конечная точка неверна. Пожалуйста, введите корректную конечную точку и повторите попытку.", + "endpointUpdatedMessage": "Конечная точка успешно обновлена", + "customEndpoint": "Подключено к {endpoint}" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_zh.arb b/auth/lib/l10n/arb/app_zh.arb index c50e76c1dd..1e58bd2b98 100644 --- a/auth/lib/l10n/arb/app_zh.arb +++ b/auth/lib/l10n/arb/app_zh.arb @@ -20,6 +20,8 @@ "codeIssuerHint": "发行人", "codeSecretKeyHint": "私钥", "codeAccountHint": "账户 (you@domain.com)", + "codeTagHint": "标签", + "accountKeyType": "密钥类型", "sessionExpired": "会话已过期", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -156,6 +158,7 @@ } } }, + "invalidQRCode": "二维码无效", "noRecoveryKeyTitle": "没有恢复密钥吗?", "enterEmailHint": "请输入您的电子邮件地址", "invalidEmailTitle": "无效的电子邮件地址", @@ -420,5 +423,18 @@ "invalidEndpoint": "端点无效", "invalidEndpointMessage": "抱歉,您输入的端点无效。请输入有效的端点,然后重试。", "endpointUpdatedMessage": "端点更新成功", - "customEndpoint": "已连接至 {endpoint}" + "customEndpoint": "已连接至 {endpoint}", + "pinText": "置顶", + "unpinText": "取消置顶", + "pinnedCodeMessage": "{code} 已被置顶", + "unpinnedCodeMessage": "{code} 已被取消置顶", + "tags": "标签", + "createNewTag": "创建新标签", + "tag": "标签", + "create": "创建", + "editTag": "编辑标签", + "deleteTagTitle": "要删除标签吗?", + "deleteTagMessage": "您确定要删除此标签吗?此操作是不可逆的。", + "somethingWentWrongParsingCode": "我们无法解析 {x} 代码。", + "updateNotAvailable": "更新不可用" } \ No newline at end of file diff --git a/auth/lib/main.dart b/auth/lib/main.dart index d8d22ca4fe..9fa2841ff0 100644 --- a/auth/lib/main.dart +++ b/auth/lib/main.dart @@ -17,6 +17,7 @@ import 'package:ente_auth/services/update_service.dart'; import 'package:ente_auth/services/user_remote_flag_service.dart'; import 'package:ente_auth/services/user_service.dart'; import 'package:ente_auth/services/window_listener_service.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/store/code_store.dart'; import 'package:ente_auth/ui/tools/app_lock.dart'; import 'package:ente_auth/ui/tools/lock_screen.dart'; @@ -145,6 +146,7 @@ Future _init(bool bool, {String? via}) async { await PreferenceService.instance.init(); await CodeStore.instance.init(); + await CodeDisplayStore.instance.init(); await Configuration.instance.init(); await Network.instance.init(); await UserService.instance.init(); @@ -157,7 +159,7 @@ Future _init(bool bool, {String? via}) async { } Future _setupPrivacyScreen() async { - if (!PlatformUtil.isMobile()) return; + if (!PlatformUtil.isMobile() || kDebugMode) return; final brightness = SchedulerBinding.instance.platformDispatcher.platformBrightness; bool isInDarkMode = brightness == Brightness.dark; diff --git a/auth/lib/models/code.dart b/auth/lib/models/code.dart index bd6077326c..696d3f2fc1 100644 --- a/auth/lib/models/code.dart +++ b/auth/lib/models/code.dart @@ -1,3 +1,6 @@ +import 'dart:convert'; + +import 'package:ente_auth/models/code_display.dart'; import 'package:ente_auth/utils/totp_util.dart'; class Code { @@ -13,10 +16,19 @@ class Code { final String secret; final Algorithm algorithm; final Type type; + + /// otpauth url in the code final String rawData; final int counter; bool? hasSynced; + final CodeDisplay display; + + bool get isPinned => display.pinned; + + final Object? err; + bool get hasError => err != null; + Code( this.account, this.issuer, @@ -28,8 +40,26 @@ class Code { this.counter, this.rawData, { this.generatedID, + required this.display, + this.err, }); + factory Code.withError(Object error, String rawData) { + return Code( + "", + "", + 0, + 0, + "", + Algorithm.sha1, + Type.totp, + 0, + rawData, + err: error, + display: CodeDisplay(), + ); + } + Code copyWith({ String? account, String? issuer, @@ -39,6 +69,7 @@ class Code { Algorithm? algorithm, Type? type, int? counter, + CodeDisplay? display, }) { final String updateAccount = account ?? this.account; final String updateIssuer = issuer ?? this.issuer; @@ -48,6 +79,7 @@ class Code { final Algorithm updatedAlgo = algorithm ?? this.algorithm; final Type updatedType = type ?? this.type; final int updatedCounter = counter ?? this.counter; + final CodeDisplay updatedDisplay = display ?? this.display; return Code( updateAccount, @@ -62,6 +94,7 @@ class Code { "&digits=$updatedDigits&issuer=$updateIssuer" "&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}", generatedID: generatedID, + display: updatedDisplay, ); } @@ -70,6 +103,7 @@ class Code { String account, String issuer, String secret, + CodeDisplay? display, int digits, ) { return Code( @@ -82,30 +116,33 @@ class Code { type, 0, "otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret", + display: display ?? CodeDisplay(), ); } - static Code fromRawData(String rawData) { + static Code fromOTPAuthUrl(String rawData, {CodeDisplay? display}) { Uri uri = Uri.parse(rawData); final issuer = _getIssuer(uri); try { - return Code( + final code = Code( _getAccount(uri), issuer, - _getDigits(uri, issuer), + _getDigits(uri), _getPeriod(uri), getSanitizedSecret(uri.queryParameters['secret']!), _getAlgorithm(uri), _getType(uri), _getCounter(uri), rawData, + display: CodeDisplay.fromUri(uri) ?? CodeDisplay(), ); + return code; } catch (e) { // if account name contains # without encoding, // rest of the url are treated as url fragment if (rawData.contains("#")) { - return Code.fromRawData(rawData.replaceAll("#", '%23')); + return Code.fromOTPAuthUrl(rawData.replaceAll("#", '%23')); } else { rethrow; } @@ -129,6 +166,23 @@ class Code { } } + static Code fromExportJson(Map rawJson) { + Code resultCode = Code.fromOTPAuthUrl( + rawJson['rawData'], + display: CodeDisplay.fromJson(rawJson['display']), + ); + return resultCode; + } + + String toOTPAuthUrlFormat() { + final uri = Uri.parse(rawData.replaceAll("#", '%23')); + final query = {...uri.queryParameters}; + query["codeDisplay"] = jsonEncode(display.toJson()); + + final newUri = uri.replace(queryParameters: query); + return jsonEncode(newUri.toString()); + } + static String _getIssuer(Uri uri) { try { if (uri.queryParameters.containsKey("issuer")) { @@ -147,11 +201,11 @@ class Code { } } - static int _getDigits(Uri uri, String issuer) { + static int _getDigits(Uri uri) { try { return int.parse(uri.queryParameters['digits']!); } catch (e) { - if (issuer.toLowerCase() == "steam") { + if (uri.host == "steam") { return steamDigits; } return defaultDigits; diff --git a/auth/lib/models/code_display.dart b/auth/lib/models/code_display.dart new file mode 100644 index 0000000000..6bbf78f1fc --- /dev/null +++ b/auth/lib/models/code_display.dart @@ -0,0 +1,118 @@ +import 'dart:convert'; + +import 'package:flutter/foundation.dart'; +import 'package:logging/logging.dart'; + +/// Used to store the display settings of a code. +class CodeDisplay { + final bool pinned; + final bool trashed; + final int lastUsedAt; + final int tapCount; + final List tags; + + CodeDisplay({ + this.pinned = false, + this.trashed = false, + this.lastUsedAt = 0, + this.tapCount = 0, + this.tags = const [], + }); + + // copyWith + CodeDisplay copyWith({ + bool? pinned, + bool? trashed, + int? lastUsedAt, + int? tapCount, + List? tags, + }) { + final bool updatedPinned = pinned ?? this.pinned; + final bool updatedTrashed = trashed ?? this.trashed; + final int updatedLastUsedAt = lastUsedAt ?? this.lastUsedAt; + final int updatedTapCount = tapCount ?? this.tapCount; + final List updatedTags = tags ?? this.tags; + + return CodeDisplay( + pinned: updatedPinned, + trashed: updatedTrashed, + lastUsedAt: updatedLastUsedAt, + tapCount: updatedTapCount, + tags: updatedTags, + ); + } + + factory CodeDisplay.fromJson(Map? json) { + if (json == null) { + return CodeDisplay(); + } + return CodeDisplay( + pinned: json['pinned'] ?? false, + trashed: json['trashed'] ?? false, + lastUsedAt: json['lastUsedAt'] ?? 0, + tapCount: json['tapCount'] ?? 0, + tags: List.from(json['tags'] ?? []), + ); + } + + /// Converts the [CodeDisplay] to a json object. + /// When [safeParsing] is true, the json will be parsed safely. + /// If we fail to parse the json, we will return an empty [CodeDisplay]. + static CodeDisplay? fromUri(Uri uri, {bool safeParsing = false}) { + if (!uri.queryParameters.containsKey("codeDisplay")) return null; + final String codeDisplay = + uri.queryParameters['codeDisplay']!.replaceAll('%2C', ','); + return _parseCodeDisplayJson(codeDisplay, safeParsing); + } + + static CodeDisplay _parseCodeDisplayJson(String json, bool safeParsing) { + try { + final decodedDisplay = jsonDecode(json); + return CodeDisplay.fromJson(decodedDisplay); + } catch (e, s) { + Logger("CodeDisplay") + .severe("Could not parse code display from json", e, s); + // (ng/prateek) Handle the case where we have fragment in the rawDataUrl + if (!json.endsWith("}") && json.contains("}#")) { + Logger("CodeDisplay").warning("ignoring code display as it's invalid"); + return CodeDisplay(); + } + if (safeParsing) { + return CodeDisplay(); + } else { + rethrow; + } + } + } + + Map toJson() { + return { + 'pinned': pinned, + 'trashed': trashed, + 'lastUsedAt': lastUsedAt, + 'tapCount': tapCount, + 'tags': tags, + }; + } + + @override + bool operator ==(Object other) { + if (identical(this, other)) return true; + + return other is CodeDisplay && + other.pinned == pinned && + other.trashed == trashed && + other.lastUsedAt == lastUsedAt && + other.tapCount == tapCount && + listEquals(other.tags, tags); + } + + @override + int get hashCode { + return pinned.hashCode ^ + trashed.hashCode ^ + lastUsedAt.hashCode ^ + tapCount.hashCode ^ + tags.hashCode; + } +} diff --git a/auth/lib/onboarding/model/tag_enums.dart b/auth/lib/onboarding/model/tag_enums.dart new file mode 100644 index 0000000000..6661b6770a --- /dev/null +++ b/auth/lib/onboarding/model/tag_enums.dart @@ -0,0 +1,10 @@ +enum TagChipState { + selected, + unselected, +} + +enum TagChipAction { + none, + menu, + check, +} diff --git a/auth/lib/onboarding/view/common/add_chip.dart b/auth/lib/onboarding/view/common/add_chip.dart new file mode 100644 index 0000000000..39971f416e --- /dev/null +++ b/auth/lib/onboarding/view/common/add_chip.dart @@ -0,0 +1,26 @@ +import "package:ente_auth/theme/ente_theme.dart"; +import "package:flutter/material.dart"; + +class AddChip extends StatelessWidget { + final VoidCallback? onTap; + + const AddChip({ + super.key, + this.onTap, + }); + + @override + Widget build(BuildContext context) { + return GestureDetector( + onTap: onTap, + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 8.0), + child: Icon( + Icons.add_circle_outline, + size: 30, + color: getEnteColorScheme(context).iconButtonColor, + ), + ), + ); + } +} diff --git a/auth/lib/onboarding/view/common/add_tag.dart b/auth/lib/onboarding/view/common/add_tag.dart new file mode 100644 index 0000000000..716515ad48 --- /dev/null +++ b/auth/lib/onboarding/view/common/add_tag.dart @@ -0,0 +1,78 @@ +import "package:ente_auth/l10n/l10n.dart"; +import "package:flutter/material.dart"; + +class AddTagDialog extends StatefulWidget { + const AddTagDialog({ + super.key, + required this.onTap, + }); + + final void Function(String) onTap; + + @override + State createState() => _AddTagDialogState(); +} + +class _AddTagDialogState extends State { + String _tag = ""; + + @override + Widget build(BuildContext context) { + final l10n = context.l10n; + return AlertDialog( + title: Text(l10n.createNewTag), + content: SingleChildScrollView( + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + TextFormField( + maxLength: 100, + decoration: InputDecoration( + hintText: l10n.tag, + hintStyle: const TextStyle( + color: Colors.white30, + ), + contentPadding: const EdgeInsets.all(12), + ), + onChanged: (value) { + setState(() { + _tag = value; + }); + }, + autocorrect: false, + initialValue: _tag, + autofocus: true, + ), + ], + ), + ), + actions: [ + TextButton( + child: Text( + l10n.cancel, + style: const TextStyle( + color: Colors.redAccent, + ), + ), + onPressed: () { + Navigator.pop(context); + }, + ), + TextButton( + child: Text( + l10n.create, + style: const TextStyle( + color: Colors.purple, + ), + ), + onPressed: () { + if (_tag.trim().isEmpty) return; + + widget.onTap(_tag); + }, + ), + ], + ); + } +} diff --git a/auth/lib/onboarding/view/common/edit_tag.dart b/auth/lib/onboarding/view/common/edit_tag.dart new file mode 100644 index 0000000000..3885312d22 --- /dev/null +++ b/auth/lib/onboarding/view/common/edit_tag.dart @@ -0,0 +1,90 @@ +import "package:ente_auth/l10n/l10n.dart"; +import 'package:ente_auth/store/code_display_store.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class EditTagDialog extends StatefulWidget { + const EditTagDialog({ + super.key, + required this.tag, + }); + + final String tag; + + @override + State createState() => _EditTagDialogState(); +} + +class _EditTagDialogState extends State { + late String _tag = widget.tag; + + @override + Widget build(BuildContext context) { + final l10n = context.l10n; + return AlertDialog( + title: Text(l10n.editTag), + content: SingleChildScrollView( + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + TextFormField( + maxLength: 100, + decoration: InputDecoration( + hintText: l10n.tag, + hintStyle: const TextStyle( + color: Colors.white30, + ), + contentPadding: const EdgeInsets.all(12), + ), + onChanged: (value) { + setState(() { + _tag = value; + }); + }, + autocorrect: false, + initialValue: _tag, + autofocus: true, + ), + ], + ), + ), + actions: [ + TextButton( + child: Text( + l10n.cancel, + style: const TextStyle( + color: Colors.redAccent, + ), + ), + onPressed: () { + Navigator.pop(context); + }, + ), + TextButton( + child: Text( + l10n.saveAction, + style: const TextStyle( + color: Colors.purple, + ), + ), + onPressed: () async { + if (_tag.trim().isEmpty) return; + + final dialog = createProgressDialog( + context, + context.l10n.pleaseWait, + ); + await dialog.show(); + + await CodeDisplayStore.instance.editTag(widget.tag, _tag); + + await dialog.hide(); + + Navigator.pop(context); + }, + ), + ], + ); + } +} diff --git a/auth/lib/onboarding/view/common/tag_chip.dart b/auth/lib/onboarding/view/common/tag_chip.dart new file mode 100644 index 0000000000..7f71e68b84 --- /dev/null +++ b/auth/lib/onboarding/view/common/tag_chip.dart @@ -0,0 +1,132 @@ +import "package:ente_auth/l10n/l10n.dart"; +import "package:ente_auth/onboarding/model/tag_enums.dart"; +import "package:ente_auth/store/code_display_store.dart"; +import "package:ente_auth/theme/ente_theme.dart"; +import "package:flutter/material.dart"; +import "package:gradient_borders/box_borders/gradient_box_border.dart"; + +class TagChip extends StatelessWidget { + final String label; + final VoidCallback? onTap; + final TagChipState state; + final TagChipAction action; + + const TagChip({ + super.key, + required this.label, + this.state = TagChipState.unselected, + this.action = TagChipAction.none, + this.onTap, + }); + + @override + Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + + return GestureDetector( + onTap: onTap, + child: Container( + decoration: BoxDecoration( + color: state == TagChipState.selected + ? colorScheme.tagChipSelectedColor + : colorScheme.tagChipUnselectedColor, + borderRadius: BorderRadius.circular(100), + border: GradientBoxBorder( + gradient: LinearGradient( + colors: state == TagChipState.selected + ? colorScheme.tagChipSelectedGradient + : colorScheme.tagChipUnselectedGradient, + begin: Alignment.topLeft, + end: Alignment.bottomRight, + ), + ), + ), + margin: const EdgeInsets.symmetric(vertical: 4), + padding: const EdgeInsets.symmetric(vertical: 6, horizontal: 16) + .copyWith(right: 0), + child: Row( + mainAxisSize: MainAxisSize.min, + children: [ + Text( + label, + style: TextStyle( + color: state == TagChipState.selected || + Theme.of(context).brightness == Brightness.dark + ? Colors.white + : colorScheme.tagTextUnselectedColor, + ), + ), + if (state == TagChipState.selected && + action == TagChipAction.check) ...[ + const SizedBox(width: 16), + const Icon( + Icons.check, + size: 16, + color: Colors.white, + ), + const SizedBox(width: 16), + ] else if (state == TagChipState.selected && + action == TagChipAction.menu) ...[ + SizedBox( + width: 48, + child: PopupMenuButton( + iconSize: 16, + padding: const EdgeInsets.symmetric(horizontal: 16), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(8), + ), + surfaceTintColor: Theme.of(context).cardColor, + iconColor: Colors.white, + initialValue: -1, + onSelected: (value) { + if (value == 0) { + CodeDisplayStore.instance.showEditDialog(context, label); + } else if (value == 1) { + CodeDisplayStore.instance + .showDeleteTagDialog(context, label); + } + }, + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + child: Row( + children: [ + const Icon(Icons.edit_outlined, size: 16), + const SizedBox(width: 12), + Text(context.l10n.edit), + ], + ), + value: 0, + ), + PopupMenuItem( + child: Row( + children: [ + Icon( + Icons.delete_outline, + size: 16, + color: colorScheme.deleteTagIconColor, + ), + const SizedBox(width: 12), + Text( + context.l10n.delete, + style: TextStyle( + color: colorScheme.deleteTagTextColor, + ), + ), + ], + ), + value: 1, + ), + ]; + }, + ), + ), + ] else ...[ + const SizedBox(width: 16), + ], + ], + ), + ), + ); + } +} diff --git a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart index 57edcc2e1a..b4ab8bfd0f 100644 --- a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart +++ b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart @@ -1,5 +1,15 @@ +import 'dart:async'; + +import 'package:ente_auth/core/event_bus.dart'; +import 'package:ente_auth/events/codes_updated_event.dart'; import "package:ente_auth/l10n/l10n.dart"; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/models/code_display.dart'; +import 'package:ente_auth/onboarding/model/tag_enums.dart'; +import 'package:ente_auth/onboarding/view/common/add_chip.dart'; +import 'package:ente_auth/onboarding/view/common/add_tag.dart'; +import 'package:ente_auth/onboarding/view/common/tag_chip.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/ui/components/buttons/button_widget.dart'; import 'package:ente_auth/ui/components/models/button_result.dart'; import 'package:ente_auth/utils/dialog_util.dart'; @@ -21,6 +31,9 @@ class _SetupEnterSecretKeyPageState extends State { late TextEditingController _accountController; late TextEditingController _secretController; late bool _secretKeyObscured; + late List tags = [...?widget.code?.display.tags]; + List allTags = []; + StreamSubscription? _streamSubscription; @override void initState() { @@ -35,9 +48,26 @@ class _SetupEnterSecretKeyPageState extends State { text: widget.code?.secret, ); _secretKeyObscured = widget.code != null; + _loadTags(); + _streamSubscription = Bus.instance.on().listen((event) { + _loadTags(); + }); super.initState(); } + @override + void dispose() { + _streamSubscription?.cancel(); + super.dispose(); + } + + Future _loadTags() async { + allTags = await CodeDisplayStore.instance.getAllTags(); + if (mounted) { + setState(() {}); + } + } + @override Widget build(BuildContext context) { final l10n = context.l10n; @@ -50,6 +80,7 @@ class _SetupEnterSecretKeyPageState extends State { child: Padding( padding: const EdgeInsets.symmetric(vertical: 40.0, horizontal: 40), child: Column( + crossAxisAlignment: CrossAxisAlignment.start, children: [ TextFormField( // The validator receives the text that the user has entered. @@ -115,6 +146,65 @@ class _SetupEnterSecretKeyPageState extends State { controller: _accountController, ), const SizedBox(height: 40), + const SizedBox( + height: 20, + ), + Text( + l10n.tags, + style: const TextStyle( + fontWeight: FontWeight.bold, + ), + ), + const SizedBox(height: 10), + Wrap( + spacing: 12, + alignment: WrapAlignment.start, + children: [ + ...allTags.map( + (e) => TagChip( + label: e, + action: TagChipAction.check, + state: tags.contains(e) + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + if (tags.contains(e)) { + tags.remove(e); + } else { + tags.add(e); + } + setState(() {}); + }, + ), + ), + AddChip( + onTap: () { + showDialog( + context: context, + builder: (BuildContext context) { + return AddTagDialog( + onTap: (tag) { + if (allTags.contains(tag) && + tags.contains(tag)) { + return; + } + allTags.add(tag); + tags.add(tag); + setState(() {}); + Navigator.pop(context); + }, + ); + }, + barrierColor: Colors.black.withOpacity(0.85), + barrierDismissible: false, + ); + }, + ), + ], + ), + const SizedBox( + height: 40, + ), SizedBox( width: 400, child: OutlinedButton( @@ -134,13 +224,7 @@ class _SetupEnterSecretKeyPageState extends State { } await _saveCode(); }, - child: Padding( - padding: const EdgeInsets.symmetric( - horizontal: 16.0, - vertical: 4, - ), - child: Text(l10n.saveAction), - ), + child: Text(l10n.saveAction), ), ), ], @@ -156,7 +240,7 @@ class _SetupEnterSecretKeyPageState extends State { final account = _accountController.text.trim(); final issuer = _issuerController.text.trim(); final secret = _secretController.text.trim().replaceAll(' ', ''); - final isStreamCode = issuer.toLowerCase() == "steam"; + final isStreamCode = issuer.toLowerCase() == "steam" || issuer.toLowerCase().contains('steampowered.com'); if (widget.code != null && widget.code!.secret != secret) { ButtonResult? result = await showChoiceActionSheet( context, @@ -171,18 +255,22 @@ class _SetupEnterSecretKeyPageState extends State { return; } } + final CodeDisplay display = + widget.code?.display.copyWith(tags: tags) ?? CodeDisplay(tags: tags); final Code newCode = widget.code == null ? Code.fromAccountAndSecret( isStreamCode ? Type.steam : Type.totp, account, issuer, secret, + display, isStreamCode ? Code.steamDigits : Code.defaultDigits, ) : widget.code!.copyWith( account: account, issuer: issuer, secret: secret, + display: display, ); // Verify the validity of the code getOTP(newCode); diff --git a/auth/lib/store/code_display_store.dart b/auth/lib/store/code_display_store.dart new file mode 100644 index 0000000000..74972f5a22 --- /dev/null +++ b/auth/lib/store/code_display_store.dart @@ -0,0 +1,112 @@ +import 'package:ente_auth/l10n/l10n.dart'; +import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/onboarding/view/common/edit_tag.dart'; +import 'package:ente_auth/services/authenticator_service.dart'; +import 'package:ente_auth/store/code_store.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class CodeDisplayStore { + static final CodeDisplayStore instance = + CodeDisplayStore._privateConstructor(); + + CodeDisplayStore._privateConstructor(); + + late CodeStore _codeStore; + + Future init() async { + _codeStore = CodeStore.instance; + } + + Future> getAllTags({ + AccountMode? accountMode, + List? allCodes, + }) async { + final codes = allCodes ?? + await _codeStore.getAllCodes( + accountMode: accountMode, + sortCodes: false, + ); + final tags = {}; + for (final code in codes) { + if (code.hasError) continue; + tags.addAll(code.display.tags); + } + return tags.toList(); + } + + Future showDeleteTagDialog(BuildContext context, String tag) async { + FocusScope.of(context).requestFocus(); + final l10n = context.l10n; + + await showChoiceActionSheet( + context, + title: l10n.deleteTagTitle, + body: l10n.deleteTagMessage, + firstButtonLabel: l10n.delete, + isCritical: true, + firstButtonOnTap: () async { + // traverse through all the codes and edit this tag's value + final relevantCodes = await _getCodesByTag(tag); + + final tasks = []; + + for (final code in relevantCodes) { + final tags = code.display.tags; + tags.remove(tag); + tasks.add( + _codeStore.addCode( + code.copyWith( + display: code.display.copyWith(tags: tags), + ), + ), + ); + } + + await Future.wait(tasks); + }, + ); + } + + Future showEditDialog(BuildContext context, String tag) async { + await showDialog( + context: context, + builder: (BuildContext context) { + return EditTagDialog(tag: tag); + }, + barrierColor: Colors.black.withOpacity(0.85), + barrierDismissible: false, + ); + } + + Future> _getCodesByTag(String tag) async { + final codes = await _codeStore.getAllCodes(sortCodes: false); + return codes + .where( + (element) => !element.hasError && element.display.tags.contains(tag), + ) + .toList(); + } + + Future editTag(String previousTag, String updatedTag) async { + // traverse through all the codes and edit this tag's value + final relevantCodes = await _getCodesByTag(previousTag); + + final tasks = []; + + for (final code in relevantCodes) { + final tags = code.display.tags; + tags.remove(previousTag); + tags.add(updatedTag); + tasks.add( + CodeStore.instance.addCode( + code.copyWith( + display: code.display.copyWith(tags: tags), + ), + ), + ); + } + + await Future.wait(tasks); + } +} diff --git a/auth/lib/store/code_store.dart b/auth/lib/store/code_store.dart index 9b199f1656..449bb93166 100644 --- a/auth/lib/store/code_store.dart +++ b/auth/lib/store/code_store.dart @@ -22,27 +22,52 @@ class CodeStore { _authenticatorService = AuthenticatorService.instance; } - Future> getAllCodes({AccountMode? accountMode}) async { + Future> getAllCodes({ + AccountMode? accountMode, + bool sortCodes = true, + }) async { final mode = accountMode ?? _authenticatorService.getAccountMode(); final List entities = await _authenticatorService.getEntities(mode); final List codes = []; + for (final entity in entities) { - final decodeJson = jsonDecode(entity.rawData); - final code = Code.fromRawData(decodeJson); + late Code code; + try { + final decodeJson = jsonDecode(entity.rawData); + + if (decodeJson is String && decodeJson.startsWith('otpauth://')) { + code = Code.fromOTPAuthUrl(decodeJson); + } else { + code = Code.fromExportJson(decodeJson); + } + } catch (e, s) { + code = Code.withError(e, entity.rawData); + _logger.severe("Could not parse code", e, s); + } code.generatedID = entity.generatedID; code.hasSynced = entity.hasSynced; codes.add(code); } - // sort codes by issuer,account - codes.sort((a, b) { - final issuerComparison = compareAsciiLowerCaseNatural(a.issuer, b.issuer); - if (issuerComparison != 0) { - return issuerComparison; - } - return compareAsciiLowerCaseNatural(a.account, b.account); - }); + if (sortCodes) { + // sort codes by issuer,account + codes.sort((firstCode, secondCode) { + if (secondCode.isPinned && !firstCode.isPinned) return 1; + if (!secondCode.isPinned && firstCode.isPinned) return -1; + + final issuerComparison = + compareAsciiLowerCaseNatural(firstCode.issuer, secondCode.issuer); + if (issuerComparison != 0) { + return issuerComparison; + } + return compareAsciiLowerCaseNatural( + firstCode.account, + secondCode.account, + ); + }); + } + return codes; } @@ -52,30 +77,36 @@ class CodeStore { AccountMode? accountMode, }) async { final mode = accountMode ?? _authenticatorService.getAccountMode(); - final codes = await getAllCodes(accountMode: mode); + final allCodes = await getAllCodes(accountMode: mode); bool isExistingCode = false; - for (final existingCode in codes) { - if (existingCode == code) { - _logger.info("Found duplicate code, skipping add"); - return AddResult.duplicate; - } else if (existingCode.generatedID == code.generatedID) { + bool hasSameCode = false; + for (final existingCode in allCodes) { + if (existingCode.hasError) continue; + if (code.generatedID != null && + existingCode.generatedID == code.generatedID) { isExistingCode = true; break; } + if (existingCode == code) { + hasSameCode = true; + } + } + if (!isExistingCode && hasSameCode) { + return AddResult.duplicate; } late AddResult result; if (isExistingCode) { result = AddResult.updateCode; await _authenticatorService.updateEntry( code.generatedID!, - jsonEncode(code.rawData), + code.toOTPAuthUrlFormat(), shouldSync, mode, ); } else { result = AddResult.newCode; code.generatedID = await _authenticatorService.addEntry( - jsonEncode(code.rawData), + code.toOTPAuthUrlFormat(), shouldSync, mode, ); @@ -93,7 +124,7 @@ class CodeStore { bool _isOfflineImportRunning = false; Future importOfflineCodes() async { - if(_isOfflineImportRunning) { + if (_isOfflineImportRunning) { return; } _isOfflineImportRunning = true; @@ -107,8 +138,10 @@ class CodeStore { } logger.info('start import'); - List offlineCodes = await CodeStore.instance - .getAllCodes(accountMode: AccountMode.offline); + List offlineCodes = (await CodeStore.instance + .getAllCodes(accountMode: AccountMode.offline)) + .where((element) => !element.hasError) + .toList(); if (offlineCodes.isEmpty) { return; } @@ -117,8 +150,10 @@ class CodeStore { logger.info("skip as online sync is not done"); return; } - final List onlineCodes = - await CodeStore.instance.getAllCodes(accountMode: AccountMode.online); + final List onlineCodes = (await CodeStore.instance + .getAllCodes(accountMode: AccountMode.online)) + .where((element) => !element.hasError) + .toList(); logger.info( 'importing ${offlineCodes.length} offline codes with ${onlineCodes.length} online codes', ); diff --git a/auth/lib/theme/colors.dart b/auth/lib/theme/colors.dart index 9ac9d2d7e2..278c00777b 100644 --- a/auth/lib/theme/colors.dart +++ b/auth/lib/theme/colors.dart @@ -1,5 +1,3 @@ -import 'dart:ui'; - import 'package:flutter/material.dart'; class EnteColorScheme { @@ -41,6 +39,8 @@ class EnteColorScheme { final Color primary400; final Color primary300; + final Color iconButtonColor; + final Color warning700; final Color warning500; final Color warning400; @@ -48,6 +48,28 @@ class EnteColorScheme { final Color caution500; final List avatarColors; + + // Tags + final Color tagChipSelectedColor; + final Color tagChipUnselectedColor; + final List tagChipSelectedGradient; + final List tagChipUnselectedGradient; + final Color tagTextUnselectedColor; + final Color deleteTagIconColor; + final Color deleteTagTextColor; + + // Code Widget + final Color errorCodeProgressColor; + final Color infoIconColor; + final Color errorCardTextColor; + final Color deleteCodeTextColor; + final List pinnedCardBoxShadow; + final Color pinnedBgColor; + + // Gradient Button + final Color gradientButtonBgColor; + final List gradientButtonBgColors; + const EnteColorScheme( this.backgroundBase, this.backgroundElevated, @@ -70,7 +92,23 @@ class EnteColorScheme { this.blurStrokeBase, this.blurStrokeFaint, this.blurStrokePressed, - this.avatarColors, { + this.avatarColors, + this.iconButtonColor, + this.tagChipUnselectedColor, + this.tagChipSelectedGradient, + this.tagChipUnselectedGradient, + this.pinnedBgColor, { + this.tagChipSelectedColor = _tagChipSelectedColor, + this.tagTextUnselectedColor = _tagTextUnselectedColor, + this.deleteTagIconColor = _deleteTagIconColor, + this.deleteTagTextColor = _deleteTagTextColor, + this.errorCodeProgressColor = _errorCodeProgressColor, + this.infoIconColor = _infoIconColor, + this.errorCardTextColor = _errorCardTextColor, + this.deleteCodeTextColor = _deleteCodeTextColor, + this.pinnedCardBoxShadow = _pinnedCardBoxShadow, + this.gradientButtonBgColor = _gradientButtonBgColor, + this.gradientButtonBgColors = _gradientButtonBgColors, this.primaryGreen = _primaryGreen, this.primary700 = _primary700, this.primary500 = _primary500, @@ -107,6 +145,11 @@ const EnteColorScheme lightScheme = EnteColorScheme( blurStrokeFaintLight, blurStrokePressedLight, avatarLight, + _iconButtonBrightColor, + _tagChipUnselectedColorLight, + _tagChipSelectedGradientLight, + _tagChipUnselectedGradientLight, + _pinnedBgColorLight, ); const EnteColorScheme darkScheme = EnteColorScheme( @@ -132,6 +175,11 @@ const EnteColorScheme darkScheme = EnteColorScheme( blurStrokeFaintDark, blurStrokePressedDark, avatarDark, + _iconButtonDarkColor, + _tagChipUnselectedColorDark, + _tagChipSelectedGradientDark, + _tagChipUnselectedGradientDark, + _pinnedBgColorDark, ); // Background Colors @@ -200,7 +248,10 @@ const Color _primary500 = Color.fromARGB(255, 204, 10, 101); const Color _primary400 = Color.fromARGB(255, 122, 41, 193); const Color _primary300 = Color.fromARGB(255, 152, 77, 244); -const Color _warning700 = Color.fromRGBO(234, 63, 63, 1); +const Color _iconButtonBrightColor = Color.fromRGBO(130, 50, 225, 1); +const Color _iconButtonDarkColor = Color.fromRGBO(255, 150, 16, 1); + +const Color _warning700 = Color.fromRGBO(245, 52, 52, 1); const Color _warning500 = Color.fromRGBO(255, 101, 101, 1); const Color _warning800 = Color(0xFFF53434); const Color warning500 = Color.fromRGBO(255, 101, 101, 1); @@ -260,3 +311,64 @@ const List avatarDark = [ Color.fromRGBO(209, 132, 132, 1), Color.fromRGBO(120, 181, 167, 1), ]; + +// Tags +const Color _tagChipUnselectedColorLight = Color(0xFFFCF5FF); +const Color _tagChipUnselectedColorDark = Color(0xFF1C0F22); +const List _tagChipUnselectedGradientLight = [ + Color(0x33AD00FF), + Color(0x338609C2), +]; +const List _tagChipUnselectedGradientDark = [ + Color(0xFFAD00FF), + Color(0x87A269BD), +]; +const Color _tagChipSelectedColor = Color(0xFF722ED1); +const List _tagChipSelectedGradientLight = [ + Color(0xFFB37FEB), + Color(0xFFAE40E3), +]; +const List _tagChipSelectedGradientDark = [ + Color(0xFFB37FEB), + Color(0x87AE40E3), +]; +const Color _tagTextUnselectedColor = Color(0xFF8232E1); +const Color _deleteTagIconColor = Color(0xFFF53434); +const Color _deleteTagTextColor = Color(0xFFF53434); + +// Code Widget +const Color _pinnedBgColorLight = Color(0xFFF9ECFF); +const Color _pinnedBgColorDark = Color(0xFF390C4F); +const Color _errorCodeProgressColor = Color(0xFFF53434); +const Color _infoIconColor = Color(0xFFF53434); +const Color _errorCardTextColor = Color(0xFFF53434); +const Color _deleteCodeTextColor = Color(0xFFFE4A49); +const List _pinnedCardBoxShadow = [ + BoxShadow( + color: Color(0x08000000), + blurRadius: 2, + offset: Offset(0, 7), + ), + BoxShadow( + color: Color(0x17000000), + blurRadius: 2, + offset: Offset(0, 4), + ), + BoxShadow( + color: Color(0x29000000), + blurRadius: 1, + offset: Offset(0, 1), + ), + BoxShadow( + color: Color(0x2E000000), + blurRadius: 1, + offset: Offset(0, 0), + ), +]; + +// Gradient Button +const Color _gradientButtonBgColor = Color(0xFF531DAB); +const List _gradientButtonBgColors = [ + Color(0xFFB37FEB), + Color(0xFF22075E), +]; diff --git a/auth/lib/ui/code_error_widget.dart b/auth/lib/ui/code_error_widget.dart new file mode 100644 index 0000000000..ec532ccba2 --- /dev/null +++ b/auth/lib/ui/code_error_widget.dart @@ -0,0 +1,111 @@ +import 'package:ente_auth/ente_theme_data.dart'; +import 'package:ente_auth/l10n/l10n.dart'; +import 'package:ente_auth/theme/ente_theme.dart'; +import 'package:ente_auth/ui/common/gradient_button.dart'; +import 'package:ente_auth/ui/linear_progress_widget.dart'; +import 'package:ente_auth/utils/dialog_util.dart'; +import 'package:flutter/material.dart'; + +class CodeErrorWidget extends StatelessWidget { + const CodeErrorWidget({ + super.key, + required this.errors, + }); + + final int errors; + + @override + Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + + return Container( + height: 132, + width: double.infinity, + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.codeCardBackgroundColor, + borderRadius: BorderRadius.circular(8), + ), + margin: const EdgeInsets.only( + left: 16, + right: 16, + bottom: 8, + top: 8, + ), + child: ClipRRect( + borderRadius: BorderRadius.circular(8), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + SizedBox( + height: 3, + child: LinearProgressWidget( + color: colorScheme.errorCodeProgressColor, + fractionOfStorage: 1, + ), + ), + const SizedBox(height: 16), + Row( + children: [ + const SizedBox(width: 8), + Align( + alignment: Alignment.center, + child: Icon( + Icons.info, + size: 18, + color: colorScheme.infoIconColor, + ), + ), + const SizedBox(width: 8), + Text( + context.l10n.error, + style: TextStyle( + fontSize: 18, + fontWeight: FontWeight.w600, + color: colorScheme.errorCardTextColor, + ), + ), + ], + ), + const SizedBox(height: 8), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 8.0), + child: Text( + context.l10n.somethingWentWrongParsingCode(errors), + style: const TextStyle( + fontSize: 14, + fontWeight: FontWeight.w500, + ), + ), + ), + const Spacer(), + Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + SizedBox( + width: 102, + height: 28, + child: GradientButton( + text: context.l10n.contactSupport, + fontSize: 10, + onTap: () async { + await showErrorDialog( + context, + context.l10n.contactSupport, + context.l10n + .contactSupportViaEmailMessage("support@ente.io"), + ); + }, + borderWidth: 0.6, + borderRadius: 6, + ), + ), + const SizedBox(width: 6), + ], + ), + const SizedBox(height: 12), + ], + ), + ), + ); + } +} diff --git a/auth/lib/ui/code_timer_progress.dart b/auth/lib/ui/code_timer_progress.dart index b524a0c238..a215f0ca02 100644 --- a/auth/lib/ui/code_timer_progress.dart +++ b/auth/lib/ui/code_timer_progress.dart @@ -1,3 +1,4 @@ +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:ente_auth/ui/linear_progress_widget.dart'; import 'package:flutter/material.dart'; import 'package:flutter/scheduler.dart'; @@ -47,9 +48,14 @@ class _CodeTimerProgressState extends State @override Widget build(BuildContext context) { - return LinearProgressWidget( - color: _progress > 0.4 ? Colors.green : Colors.orange, - fractionOfStorage: _progress, + return SizedBox( + height: 3, + child: LinearProgressWidget( + color: _progress > 0.4 + ? getEnteColorScheme(context).primary700 + : Colors.orange, + fractionOfStorage: _progress, + ), ); } } diff --git a/auth/lib/ui/code_widget.dart b/auth/lib/ui/code_widget.dart index d989edf18f..cb8b274cad 100644 --- a/auth/lib/ui/code_widget.dart +++ b/auth/lib/ui/code_widget.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'dart:ui' as ui; import 'package:clipboard/clipboard.dart'; import 'package:ente_auth/core/configuration.dart'; @@ -11,6 +12,7 @@ import 'package:ente_auth/onboarding/view/view_qr_page.dart'; import 'package:ente_auth/services/local_authentication_service.dart'; import 'package:ente_auth/services/preference_service.dart'; import 'package:ente_auth/store/code_store.dart'; +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:ente_auth/ui/code_timer_progress.dart'; import 'package:ente_auth/ui/utils/icon_utils.dart'; import 'package:ente_auth/utils/dialog_util.dart'; @@ -20,13 +22,17 @@ import 'package:ente_auth/utils/totp_util.dart'; import 'package:flutter/material.dart'; import 'package:flutter_context_menu/flutter_context_menu.dart'; import 'package:flutter_slidable/flutter_slidable.dart'; +import 'package:flutter_svg/flutter_svg.dart'; import 'package:logging/logging.dart'; import 'package:move_to_background/move_to_background.dart'; class CodeWidget extends StatefulWidget { final Code code; - const CodeWidget(this.code, {super.key}); + const CodeWidget( + this.code, { + super.key, + }); @override State createState() => _CodeWidgetState(); @@ -71,6 +77,7 @@ class _CodeWidgetState extends State { @override Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); if (isMaskingEnabled != PreferenceService.instance.shouldHideCodes()) { isMaskingEnabled = PreferenceService.instance.shouldHideCodes(); _hideCode = isMaskingEnabled; @@ -84,6 +91,100 @@ class _CodeWidgetState extends State { _isInitialized = true; } final l10n = context.l10n; + + Widget getCardContents(AppLocalizations l10n) { + return Stack( + children: [ + if (widget.code.isPinned) + Align( + alignment: Alignment.topRight, + child: CustomPaint( + painter: PinBgPainter( + color: colorScheme.pinnedBgColor, + ), + size: const Size(39, 39), + ), + ), + Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + if (widget.code.type.isTOTPCompatible) + CodeTimerProgress( + period: widget.code.period, + ), + const SizedBox(height: 16), + Row( + children: [ + _shouldShowLargeIcon ? _getIcon() : const SizedBox.shrink(), + Expanded( + child: Column( + children: [ + _getTopRow(), + const SizedBox(height: 4), + _getBottomRow(l10n), + ], + ), + ), + ], + ), + const SizedBox( + height: 20, + ), + ], + ), + if (widget.code.isPinned) ...[ + Align( + alignment: Alignment.topRight, + child: Padding( + padding: const EdgeInsets.only(right: 6, top: 6), + child: SvgPicture.asset("assets/svg/pin-card.svg"), + ), + ), + ], + ], + ); + } + + Widget clippedCard(AppLocalizations l10n) { + return Container( + height: 132, + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(8), + color: Theme.of(context).colorScheme.codeCardBackgroundColor, + boxShadow: + widget.code.isPinned ? colorScheme.pinnedCardBoxShadow : [], + ), + child: ClipRRect( + borderRadius: BorderRadius.circular(8), + child: Material( + color: Colors.transparent, + child: InkWell( + customBorder: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(10), + ), + onTap: () { + _copyCurrentOTPToClipboard(); + }, + onDoubleTap: isMaskingEnabled + ? () { + setState( + () { + _hideCode = !_hideCode; + }, + ); + } + : null, + onLongPress: () { + _copyCurrentOTPToClipboard(); + }, + child: getCardContents(l10n), + ), + ), + ), + ); + } + return Container( margin: const EdgeInsets.only(left: 16, right: 16, bottom: 8, top: 8), child: Builder( @@ -97,6 +198,13 @@ class _CodeWidgetState extends State { icon: Icons.qr_code_2_outlined, onSelected: () => _onShowQrPressed(null), ), + MenuItem( + label: widget.code.isPinned ? l10n.unpinText : l10n.pinText, + icon: widget.code.isPinned + ? Icons.push_pin + : Icons.push_pin_outlined, + onSelected: () => _onPinPressed(null), + ), MenuItem( label: l10n.edit, icon: Icons.edit, @@ -112,23 +220,23 @@ class _CodeWidgetState extends State { ], padding: const EdgeInsets.all(8.0), ), - child: _clippedCard(l10n), + child: clippedCard(l10n), ); } return Slidable( key: ValueKey(widget.code.hashCode), endActionPane: ActionPane( - extentRatio: 0.60, + extentRatio: 0.90, motion: const ScrollMotion(), children: [ const SizedBox( - width: 4, + width: 14, ), SlidableAction( onPressed: _onShowQrPressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), + borderRadius: const BorderRadius.all(Radius.circular(8)), foregroundColor: Theme.of(context).colorScheme.inverseBackgroundColor, icon: Icons.qr_code_2_outlined, @@ -137,12 +245,48 @@ class _CodeWidgetState extends State { spacing: 8, ), const SizedBox( - width: 4, + width: 14, + ), + CustomSlidableAction( + onPressed: _onPinPressed, + backgroundColor: Colors.grey.withOpacity(0.1), + borderRadius: const BorderRadius.all(Radius.circular(8)), + foregroundColor: + Theme.of(context).colorScheme.inverseBackgroundColor, + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + if (widget.code.isPinned) + SvgPicture.asset( + "assets/svg/pin-active.svg", + colorFilter: ui.ColorFilter.mode( + Theme.of(context).colorScheme.primary, + BlendMode.srcIn, + ), + ) + else + SvgPicture.asset( + "assets/svg/pin-inactive.svg", + colorFilter: ui.ColorFilter.mode( + Theme.of(context).colorScheme.primary, + BlendMode.srcIn, + ), + ), + const SizedBox(height: 8), + Text( + widget.code.isPinned ? l10n.unpinText : l10n.pinText, + ), + ], + ), + padding: const EdgeInsets.only(left: 4, right: 0), + ), + const SizedBox( + width: 14, ), SlidableAction( onPressed: _onEditPressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), + borderRadius: const BorderRadius.all(Radius.circular(8)), foregroundColor: Theme.of(context).colorScheme.inverseBackgroundColor, icon: Icons.edit_outlined, @@ -151,13 +295,13 @@ class _CodeWidgetState extends State { spacing: 8, ), const SizedBox( - width: 4, + width: 14, ), SlidableAction( onPressed: _onDeletePressed, backgroundColor: Colors.grey.withOpacity(0.1), - borderRadius: const BorderRadius.all(Radius.circular(12.0)), - foregroundColor: const Color(0xFFFE4A49), + borderRadius: const BorderRadius.all(Radius.circular(8)), + foregroundColor: colorScheme.deleteCodeTextColor, icon: Icons.delete, label: l10n.delete, padding: const EdgeInsets.only(left: 0, right: 0), @@ -166,7 +310,7 @@ class _CodeWidgetState extends State { ], ), child: Builder( - builder: (context) => _clippedCard(l10n), + builder: (context) => clippedCard(l10n), ), ); }, @@ -174,74 +318,6 @@ class _CodeWidgetState extends State { ); } - Widget _clippedCard(AppLocalizations l10n) { - return ClipRRect( - borderRadius: BorderRadius.circular(8), - child: Container( - color: Theme.of(context).colorScheme.codeCardBackgroundColor, - child: Material( - color: Colors.transparent, - child: InkWell( - customBorder: RoundedRectangleBorder( - borderRadius: BorderRadius.circular(10), - ), - onTap: () { - _copyCurrentOTPToClipboard(); - }, - onDoubleTap: isMaskingEnabled - ? () { - setState( - () { - _hideCode = !_hideCode; - }, - ); - } - : null, - onLongPress: () { - _copyCurrentOTPToClipboard(); - }, - child: _getCardContents(l10n), - ), - ), - ), - ); - } - - Widget _getCardContents(AppLocalizations l10n) { - return SizedBox( - child: Column( - crossAxisAlignment: CrossAxisAlignment.start, - mainAxisAlignment: MainAxisAlignment.center, - children: [ - if (widget.code.type.isTOTPCompatible) - CodeTimerProgress( - period: widget.code.period, - ), - const SizedBox( - height: 16, - ), - Row( - children: [ - _shouldShowLargeIcon ? _getIcon() : const SizedBox.shrink(), - Expanded( - child: Column( - children: [ - _getTopRow(), - const SizedBox(height: 4), - _getBottomRow(l10n), - ], - ), - ), - ], - ), - const SizedBox( - height: 20, - ), - ], - ), - ); - } - Widget _getBottomRow(AppLocalizations l10n) { return Container( padding: const EdgeInsets.only(left: 16, right: 16), @@ -422,7 +498,9 @@ class _CodeWidgetState extends State { final Code? code = await Navigator.of(context).push( MaterialPageRoute( builder: (BuildContext context) { - return SetupEnterSecretKeyPage(code: widget.code); + return SetupEnterSecretKeyPage( + code: widget.code, + ); }, ), ); @@ -448,6 +526,24 @@ class _CodeWidgetState extends State { ); } + Future _onPinPressed(_) async { + bool currentlyPinned = widget.code.isPinned; + final display = widget.code.display; + final Code code = widget.code.copyWith( + display: display.copyWith(pinned: !currentlyPinned), + ); + unawaited( + CodeStore.instance.addCode(code).then( + (value) => showToast( + context, + !currentlyPinned + ? context.l10n.pinnedCodeMessage(widget.code.issuer) + : context.l10n.unpinnedCodeMessage(widget.code.issuer), + ), + ), + ); + } + void _onDeletePressed(_) async { bool isAuthSuccessful = await LocalAuthenticationService.instance.requestLocalAuthentication( @@ -491,7 +587,7 @@ class _CodeWidgetState extends State { String _getFormattedCode(String code) { if (_hideCode) { // replace all digits with • - code = code.replaceAll(RegExp(r'\d'), '•'); + code = code.replaceAll(RegExp(r'\S'), '•'); } if (code.length == 6) { return "${code.substring(0, 3)} ${code.substring(3, 6)}"; @@ -499,3 +595,36 @@ class _CodeWidgetState extends State { return code; } } + +class PinBgPainter extends CustomPainter { + final Color color; + final PaintingStyle paintingStyle; + + PinBgPainter({ + this.color = Colors.black, + this.paintingStyle = PaintingStyle.fill, + }); + + @override + void paint(Canvas canvas, Size size) { + Paint paint = Paint() + ..color = color + ..style = paintingStyle; + + canvas.drawPath(getTrianglePath(size.width, size.height), paint); + } + + Path getTrianglePath(double x, double y) { + return Path() + ..moveTo(0, 0) + ..lineTo(x, 0) + ..lineTo(x, y) + ..lineTo(0, 0); + } + + @override + bool shouldRepaint(PinBgPainter oldDelegate) { + return oldDelegate.color != color || + oldDelegate.paintingStyle != paintingStyle; + } +} diff --git a/auth/lib/ui/common/gradient_button.dart b/auth/lib/ui/common/gradient_button.dart index 8a24c68325..436e1bfb9b 100644 --- a/auth/lib/ui/common/gradient_button.dart +++ b/auth/lib/ui/common/gradient_button.dart @@ -1,7 +1,9 @@ +import 'package:ente_auth/theme/ente_theme.dart'; import 'package:flutter/material.dart'; +import 'package:flutter_svg/flutter_svg.dart'; +import 'package:gradient_borders/box_borders/gradient_box_border.dart'; -class GradientButton extends StatelessWidget { - final List linearGradientColors; +class GradientButton extends StatefulWidget { final Function? onTap; // text is ignored if child is specified @@ -13,33 +15,39 @@ class GradientButton extends StatelessWidget { // padding between the text and icon final double paddingValue; - // used when two icons are in row - final bool reversedGradient; + final double fontSize; + final double borderRadius; + final double borderWidth; const GradientButton({ super.key, - this.linearGradientColors = const [ - Color.fromARGB(255, 133, 44, 210), - Color.fromARGB(255, 187, 26, 93), - ], - this.reversedGradient = false, this.onTap, this.text = '', this.iconData, this.paddingValue = 0.0, + this.fontSize = 18, + this.borderRadius = 4, + this.borderWidth = 1, }); + @override + State createState() => _GradientButtonState(); +} + +class _GradientButtonState extends State { + bool isTapped = false; + @override Widget build(BuildContext context) { Widget buttonContent; - if (iconData == null) { + if (widget.iconData == null) { buttonContent = Text( - text, - style: const TextStyle( + widget.text, + style: TextStyle( color: Colors.white, fontWeight: FontWeight.w600, fontFamily: 'Inter-SemiBold', - fontSize: 18, + fontSize: widget.fontSize, ), ); } else { @@ -48,38 +56,79 @@ class GradientButton extends StatelessWidget { crossAxisAlignment: CrossAxisAlignment.center, children: [ Icon( - iconData, + widget.iconData, size: 20, color: Colors.white, ), const Padding(padding: EdgeInsets.symmetric(horizontal: 6)), Text( - text, - style: const TextStyle( + widget.text, + style: TextStyle( color: Colors.white, fontWeight: FontWeight.w600, fontFamily: 'Inter-SemiBold', - fontSize: 18, + fontSize: widget.fontSize, ), ), ], ); } + final colorScheme = getEnteColorScheme(context); + return InkWell( - onTap: onTap as void Function()?, - child: Container( - height: 56, - decoration: BoxDecoration( - gradient: LinearGradient( - begin: const Alignment(0.1, -0.9), - end: const Alignment(-0.6, 0.9), - colors: reversedGradient - ? linearGradientColors.reversed.toList() - : linearGradientColors, + onTapDown: (_) { + setState(() { + isTapped = true; + }); + }, + onTapUp: (_) { + setState(() { + isTapped = false; + }); + }, + onTapCancel: () { + setState(() { + isTapped = false; + }); + }, + borderRadius: BorderRadius.circular(widget.borderRadius), + onTap: widget.onTap as void Function()?, + child: Stack( + children: [ + Container( + height: 56, + width: double.infinity, + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(widget.borderRadius), + color: colorScheme.gradientButtonBgColor, + ), ), - borderRadius: BorderRadius.circular(8), - ), - child: Center(child: buttonContent), + if (!isTapped) + ClipRRect( + borderRadius: BorderRadius.circular(widget.borderRadius), + child: SvgPicture.asset( + 'assets/svg/button-tint.svg', + fit: BoxFit.fill, + width: double.infinity, + height: 56, + ), + ), + Container( + height: 56, + decoration: BoxDecoration( + border: GradientBoxBorder( + width: widget.borderWidth, + gradient: LinearGradient( + colors: colorScheme.gradientButtonBgColors, + begin: Alignment.topLeft, + end: Alignment.bottomRight, + ), + ), + borderRadius: BorderRadius.circular(widget.borderRadius), + ), + child: Center(child: buttonContent), + ), + ], ), ); } diff --git a/auth/lib/ui/home_page.dart b/auth/lib/ui/home_page.dart index c3397d79af..4110a5f88e 100644 --- a/auth/lib/ui/home_page.dart +++ b/auth/lib/ui/home_page.dart @@ -2,6 +2,7 @@ import 'dart:async'; import 'dart:io'; import 'package:app_links/app_links.dart'; +import 'package:collection/collection.dart'; import 'package:ente_auth/core/configuration.dart'; import 'package:ente_auth/core/event_bus.dart'; import 'package:ente_auth/ente_theme_data.dart'; @@ -10,11 +11,15 @@ import 'package:ente_auth/events/icons_changed_event.dart'; import 'package:ente_auth/events/trigger_logout_event.dart'; import "package:ente_auth/l10n/l10n.dart"; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/onboarding/model/tag_enums.dart'; +import 'package:ente_auth/onboarding/view/common/tag_chip.dart'; import 'package:ente_auth/onboarding/view/setup_enter_secret_key_page.dart'; import 'package:ente_auth/services/preference_service.dart'; import 'package:ente_auth/services/user_service.dart'; +import 'package:ente_auth/store/code_display_store.dart'; import 'package:ente_auth/store/code_store.dart'; import 'package:ente_auth/ui/account/logout_dialog.dart'; +import 'package:ente_auth/ui/code_error_widget.dart'; import 'package:ente_auth/ui/code_widget.dart'; import 'package:ente_auth/ui/common/loading_widget.dart'; import 'package:ente_auth/ui/home/coach_mark_widget.dart'; @@ -54,11 +59,13 @@ class _HomePageState extends State { final FocusNode searchInputFocusNode = FocusNode(); bool _showSearchBox = false; String _searchText = ""; - List _codes = []; + List? _allCodes; + List tags = []; List _filteredCodes = []; StreamSubscription? _streamSubscription; StreamSubscription? _triggerLogoutEvent; StreamSubscription? _iconsChangedEvent; + String selectedTag = ""; @override void initState() { @@ -96,14 +103,26 @@ class _HomePageState extends State { void _loadCodes() { CodeStore.instance.getAllCodes().then((codes) { - _codes = codes; - _hasLoaded = true; - _applyFilteringAndRefresh(); + _allCodes = codes; + + CodeDisplayStore.instance.getAllTags(allCodes: _allCodes).then((value) { + tags = value; + + if (mounted) { + if (!tags.contains(selectedTag)) { + selectedTag = ""; + } + _hasLoaded = true; + _applyFilteringAndRefresh(); + } + }); + }).onError((error, stackTrace) { + _logger.severe('Error while loading codes', error, stackTrace); }); } void _applyFilteringAndRefresh() { - if (_searchText.isNotEmpty && _showSearchBox) { + if (_searchText.isNotEmpty && _showSearchBox && _allCodes != null) { final String val = _searchText.toLowerCase(); // Prioritize issuer match above account for better UX while searching // for a specific TOTP for email providers. Searching for "emailProvider" like (gmail, proton) should @@ -112,17 +131,31 @@ class _HomePageState extends State { final List issuerMatch = []; final List accountMatch = []; - for (final Code code in _codes) { - if (code.issuer.toLowerCase().contains(val)) { - issuerMatch.add(code); - } else if (code.account.toLowerCase().contains(val)) { - accountMatch.add(code); + for (final Code codeState in _allCodes!) { + if (codeState.hasError || + selectedTag != "" && + !codeState.display.tags.contains(selectedTag)) { + continue; + } + + if (codeState.issuer.toLowerCase().contains(val)) { + issuerMatch.add(codeState); + } else if (codeState.account.toLowerCase().contains(val)) { + accountMatch.add(codeState); } } _filteredCodes = issuerMatch; _filteredCodes.addAll(accountMatch); } else { - _filteredCodes = _codes; + _filteredCodes = _allCodes + ?.where( + (element) => + !element.hasError && + (selectedTag == "" || + element.display.tags.contains(selectedTag)), + ) + .toList() ?? + []; } if (mounted) { setState(() {}); @@ -149,7 +182,7 @@ class _HomePageState extends State { if (code != null) { await CodeStore.instance.addCode(code); // Focus the new code by searching - if (_codes.length > 2) { + if ((_allCodes?.where((e) => !e.hasError).length ?? 0) > 2) { _focusNewCode(code); } } @@ -171,6 +204,7 @@ class _HomePageState extends State { @override Widget build(BuildContext context) { final l10n = context.l10n; + return PopScope( onPopInvoked: (_) async { if (_isSettingsOpen) { @@ -217,6 +251,7 @@ class _HomePageState extends State { focusedBorder: InputBorder.none, ), ), + centerTitle: true, actions: [ IconButton( icon: _showSearchBox @@ -241,7 +276,7 @@ class _HomePageState extends State { ], ), floatingActionButton: !_hasLoaded || - _codes.isEmpty || + (_allCodes?.isEmpty ?? true) || !PreferenceService.instance.hasShownCoachMark() ? null : _getFab(), @@ -258,18 +293,86 @@ class _HomePageState extends State { onManuallySetupTap: _redirectToManualEntryPage, ); } else { - final list = AlignedGridView.count( - crossAxisCount: (MediaQuery.sizeOf(context).width ~/ 400) - .clamp(1, double.infinity) - .toInt(), - itemBuilder: ((context, index) { - try { - return ClipRect(child: CodeWidget(_filteredCodes[index])); - } catch (e) { - return const Text("Failed"); - } - }), - itemCount: _filteredCodes.length, + final anyCodeHasError = + _allCodes?.firstWhereOrNull((element) => element.hasError) != null; + final indexOffset = anyCodeHasError ? 1 : 0; + + final list = Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + if (!anyCodeHasError) + SizedBox( + height: 48, + child: ListView.separated( + scrollDirection: Axis.horizontal, + padding: + const EdgeInsets.symmetric(horizontal: 16, vertical: 2), + separatorBuilder: (context, index) => + const SizedBox(width: 8), + itemCount: tags.length + 1, + itemBuilder: (context, index) { + if (index == 0) { + return TagChip( + label: "All", + state: selectedTag == "" + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + selectedTag = ""; + setState(() {}); + _applyFilteringAndRefresh(); + }, + ); + } + return TagChip( + label: tags[index - 1], + action: TagChipAction.menu, + state: selectedTag == tags[index - 1] + ? TagChipState.selected + : TagChipState.unselected, + onTap: () { + if (selectedTag == tags[index - 1]) { + selectedTag = ""; + setState(() {}); + _applyFilteringAndRefresh(); + return; + } + selectedTag = tags[index - 1]; + setState(() {}); + _applyFilteringAndRefresh(); + }, + ); + }, + ), + ), + Expanded( + child: AlignedGridView.count( + crossAxisCount: (MediaQuery.sizeOf(context).width ~/ 400) + .clamp(1, double.infinity) + .toInt(), + physics: const AlwaysScrollableScrollPhysics(), + padding: const EdgeInsets.only(bottom: 80), + itemBuilder: ((context, index) { + if (index == 0 && anyCodeHasError) { + return CodeErrorWidget( + errors: _allCodes + ?.where((element) => element.hasError) + .length ?? + 0, + ); + } + final newIndex = index - indexOffset; + + return ClipRect( + child: CodeWidget( + _filteredCodes[newIndex], + ), + ); + }), + itemCount: _filteredCodes.length + indexOffset, + ), + ), + ], ); if (!PreferenceService.instance.hasShownCoachMark()) { return Stack( @@ -288,22 +391,12 @@ class _HomePageState extends State { (MediaQuery.sizeOf(context).width ~/ 400) .clamp(1, double.infinity) .toInt(), + padding: const EdgeInsets.only(bottom: 80), itemBuilder: ((context, index) { - Code? code; - try { - code = _filteredCodes[index]; - return CodeWidget(code); - } catch (e, s) { - _logger.severe("code widget error", e, s); - return Center( - child: Padding( - padding: const EdgeInsets.all(8.0), - child: Text( - l10n.sorryUnableToGenCode(code?.issuer ?? ""), - ), - ), - ); - } + final codeState = _filteredCodes[index]; + return CodeWidget( + codeState, + ); }), itemCount: _filteredCodes.length, ) @@ -360,7 +453,7 @@ class _HomePageState extends State { } if (mounted && link.toLowerCase().startsWith("otpauth://")) { try { - final newCode = Code.fromRawData(link); + final newCode = Code.fromOTPAuthUrl(link); getNextTotp(newCode); CodeStore.instance.addCode(newCode); _focusNewCode(newCode); diff --git a/auth/lib/ui/scanner_page.dart b/auth/lib/ui/scanner_page.dart index 6a77936316..a0f88b7c87 100644 --- a/auth/lib/ui/scanner_page.dart +++ b/auth/lib/ui/scanner_page.dart @@ -2,6 +2,7 @@ import 'dart:io'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/utils/toast_util.dart'; import 'package:flutter/material.dart'; import 'package:qr_code_scanner/qr_code_scanner.dart'; @@ -66,11 +67,12 @@ class ScannerPageState extends State { } controller.scannedDataStream.listen((scanData) { try { - final code = Code.fromRawData(scanData.code!); + final code = Code.fromOTPAuthUrl(scanData.code!); controller.dispose(); Navigator.of(context).pop(code); } catch (e) { // Log + showToast(context, context.l10n.invalidQRCode); } }); } diff --git a/auth/lib/ui/settings/data/export_widget.dart b/auth/lib/ui/settings/data/export_widget.dart index ef438301cf..0df7482898 100644 --- a/auth/lib/ui/settings/data/export_widget.dart +++ b/auth/lib/ui/settings/data/export_widget.dart @@ -171,10 +171,12 @@ Future _exportCodes(BuildContext context, String fileContent) async { } Future _getAuthDataForExport() async { - final codes = await CodeStore.instance.getAllCodes(); + final allCodes = await CodeStore.instance.getAllCodes(); String data = ""; - for (final code in codes) { - data += "${code.rawData}\n"; + for (final code in allCodes) { + if (code.hasError) continue; + data += "${code.rawData.replaceAll(',', '%2C')}\n"; } + return data; } diff --git a/auth/lib/ui/settings/data/import/aegis_import.dart b/auth/lib/ui/settings/data/import/aegis_import.dart index b801e64a5f..f6dd872522 100644 --- a/auth/lib/ui/settings/data/import/aegis_import.dart +++ b/auth/lib/ui/settings/data/import/aegis_import.dart @@ -2,8 +2,8 @@ import 'dart:async'; import 'dart:convert'; import 'dart:io'; import 'dart:typed_data'; -import 'package:convert/convert.dart'; +import 'package:convert/convert.dart'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; import 'package:ente_auth/services/authenticator_service.dart'; @@ -150,7 +150,7 @@ Future _processAegisExportFile( } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/bitwarden_import.dart b/auth/lib/ui/settings/data/import/bitwarden_import.dart index 7a562d82b5..6878fa9f05 100644 --- a/auth/lib/ui/settings/data/import/bitwarden_import.dart +++ b/auth/lib/ui/settings/data/import/bitwarden_import.dart @@ -86,7 +86,7 @@ Future _processBitwardenExportFile( Code code; if (totp.contains("otpauth://")) { - code = Code.fromRawData(totp); + code = Code.fromOTPAuthUrl(totp); } else { var issuer = item['name']; var account = item['login']['username']; @@ -96,6 +96,7 @@ Future _processBitwardenExportFile( account, issuer, totp, + null, Code.defaultDigits, ); } diff --git a/auth/lib/ui/settings/data/import/encrypted_ente_import.dart b/auth/lib/ui/settings/data/import/encrypted_ente_import.dart index 511c9bbf96..3d7896f88e 100644 --- a/auth/lib/ui/settings/data/import/encrypted_ente_import.dart +++ b/auth/lib/ui/settings/data/import/encrypted_ente_import.dart @@ -110,7 +110,7 @@ Future _decryptExportData( final parsedCodes = []; for (final code in splitCodes) { try { - parsedCodes.add(Code.fromRawData(code)); + parsedCodes.add(Code.fromOTPAuthUrl(code)); } catch (e) { Logger('EncryptedText').severe("Could not parse code", e); } diff --git a/auth/lib/ui/settings/data/import/google_auth_import.dart b/auth/lib/ui/settings/data/import/google_auth_import.dart index 12df41a142..c14752fa47 100644 --- a/auth/lib/ui/settings/data/import/google_auth_import.dart +++ b/auth/lib/ui/settings/data/import/google_auth_import.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:convert'; + import 'package:base32/base32.dart'; import 'package:ente_auth/l10n/l10n.dart'; import 'package:ente_auth/models/code.dart'; @@ -124,7 +125,7 @@ List parseGoogleAuth(String qrCodeData) { } else { throw Exception('Invalid OTP type'); } - codes.add(Code.fromRawData(otpUrl)); + codes.add(Code.fromOTPAuthUrl(otpUrl)); } return codes; } catch (e, s) { diff --git a/auth/lib/ui/settings/data/import/lastpass_import.dart b/auth/lib/ui/settings/data/import/lastpass_import.dart index 53f8b453d2..8c36f02536 100644 --- a/auth/lib/ui/settings/data/import/lastpass_import.dart +++ b/auth/lib/ui/settings/data/import/lastpass_import.dart @@ -89,8 +89,8 @@ Future _processLastpassExportFile( // Build the OTP URL String otpUrl = - 'otpauth://totp/$issuer:$account?secret=$secret&issuer=$issuer&algorithm=$algorithm&digits=$digits&period=$timer'; - parsedCodes.add(Code.fromRawData(otpUrl)); + 'otpauth://totp/$issuer:$account?secret=$secret&issuer=$issuer&algorithm=$algorithm&digits=$digits&period=$timer'; + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/plain_text_import.dart b/auth/lib/ui/settings/data/import/plain_text_import.dart index 03bc50dcea..6867584b0f 100644 --- a/auth/lib/ui/settings/data/import/plain_text_import.dart +++ b/auth/lib/ui/settings/data/import/plain_text_import.dart @@ -13,12 +13,15 @@ import 'package:file_picker/file_picker.dart'; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; +final _logger = Logger('PlainText'); + class PlainTextImport extends StatelessWidget { const PlainTextImport({super.key}); @override Widget build(BuildContext context) { final l10n = context.l10n; + return Column( children: [ Text( @@ -101,20 +104,35 @@ Future _pickImportFile(BuildContext context) async { final progressDialog = createProgressDialog(context, l10n.pleaseWait); await progressDialog.show(); try { + final parsedCodes = []; File file = File(result.files.single.path!); final codes = await file.readAsString(); - List splitCodes = codes.split(","); - if (splitCodes.length == 1) { - splitCodes = const LineSplitter().convert(codes); - } - final parsedCodes = []; - for (final code in splitCodes) { - try { - parsedCodes.add(Code.fromRawData(code)); - } catch (e) { - Logger('PlainText').severe("Could not parse code", e); + + if (codes.startsWith('otpauth://')) { + List splitCodes = codes.split(","); + if (splitCodes.length == 1) { + splitCodes = const LineSplitter().convert(codes); + } + for (final code in splitCodes) { + try { + parsedCodes.add(Code.fromOTPAuthUrl(code)); + } catch (e) { + Logger('PlainText').severe("Could not parse code", e); + } + } + } else { + final decodedCodes = jsonDecode(codes); + List splitCodes = List.from(decodedCodes["items"]); + + for (final code in splitCodes) { + try { + parsedCodes.add(Code.fromExportJson(code)); + } catch (e) { + _logger.severe("Could not parse code", e); + } } } + for (final code in parsedCodes) { await CodeStore.instance.addCode(code, shouldSync: false); } diff --git a/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart b/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart index 48fc748887..3590a38b37 100644 --- a/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart +++ b/auth/lib/ui/settings/data/import/raivo_plain_text_import.dart @@ -57,7 +57,7 @@ Future _pickRaivoJsonFile(BuildContext context) async { String path = result.files.single.path!; int? count = await _processRaivoExportFile(context, path); await progressDialog.hide(); - if(count != null) { + if (count != null) { await importSuccessDialog(context, count); } } catch (e) { @@ -70,9 +70,9 @@ Future _pickRaivoJsonFile(BuildContext context) async { } } -Future _processRaivoExportFile(BuildContext context,String path) async { +Future _processRaivoExportFile(BuildContext context, String path) async { File file = File(path); - if(path.endsWith('.zip')) { + if (path.endsWith('.zip')) { await showErrorDialog( context, context.l10n.sorry, @@ -105,7 +105,7 @@ Future _processRaivoExportFile(BuildContext context,String path) async { } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings/data/import/two_fas_import.dart b/auth/lib/ui/settings/data/import/two_fas_import.dart index ae5a05b0bb..710d898d44 100644 --- a/auth/lib/ui/settings/data/import/two_fas_import.dart +++ b/auth/lib/ui/settings/data/import/two_fas_import.dart @@ -158,7 +158,7 @@ Future _process2FasExportFile( } else { throw Exception('Invalid OTP type'); } - parsedCodes.add(Code.fromRawData(otpUrl)); + parsedCodes.add(Code.fromOTPAuthUrl(otpUrl)); } for (final code in parsedCodes) { diff --git a/auth/lib/ui/settings_page.dart b/auth/lib/ui/settings_page.dart index 48fd6467ca..0e99a1ea36 100644 --- a/auth/lib/ui/settings_page.dart +++ b/auth/lib/ui/settings_page.dart @@ -108,8 +108,9 @@ class SettingsPage extends StatelessWidget { await handleExportClick(context); } else { if (result.action == ButtonAction.second) { - bool hasCodes = - (await CodeStore.instance.getAllCodes()).isNotEmpty; + bool hasCodes = (await CodeStore.instance.getAllCodes()) + .where((element) => !element.hasError) + .isNotEmpty; if (hasCodes) { final hasAuthenticated = await LocalAuthenticationService .instance diff --git a/auth/lib/utils/email_util.dart b/auth/lib/utils/email_util.dart index 582449edbb..8b04122289 100644 --- a/auth/lib/utils/email_util.dart +++ b/auth/lib/utils/email_util.dart @@ -146,7 +146,7 @@ Future getZippedLogsFile(BuildContext context) async { final encoder = ZipFileEncoder(); encoder.create(zipFilePath); await encoder.addDirectory(logsDirectory); - encoder.close(); + await encoder.close(); await dialog.hide(); return zipFilePath; } diff --git a/auth/lib/utils/totp_util.dart b/auth/lib/utils/totp_util.dart index 0d6a8bd68f..61c7f20e92 100644 --- a/auth/lib/utils/totp_util.dart +++ b/auth/lib/utils/totp_util.dart @@ -1,8 +1,12 @@ import 'package:ente_auth/models/code.dart'; import 'package:flutter/foundation.dart'; import 'package:otp/otp.dart' as otp; +import 'package:steam_totp/steam_totp.dart'; String getOTP(Code code) { + if (code.type == Type.steam) { + return _getSteamCode(code); + } if (code.type == Type.hotp) { return _getHOTPCode(code); } @@ -26,7 +30,18 @@ String _getHOTPCode(Code code) { ); } +String _getSteamCode(Code code, [bool isNext = false]) { + final SteamTOTP steamtotp = SteamTOTP(secret: code.secret); + + return steamtotp.generate( + DateTime.now().millisecondsSinceEpoch ~/ 1000 + (isNext ? code.period : 0), + ); +} + String getNextTotp(Code code) { + if (code.type == Type.steam) { + return _getSteamCode(code, true); + } return otp.OTP.generateTOTPCodeString( getSanitizedSecret(code.secret), DateTime.now().millisecondsSinceEpoch + code.period * 1000, diff --git a/auth/linux/packaging/appimage/make_config.yaml b/auth/linux/packaging/appimage/make_config.yaml index 90db9c5879..9a3004dcd6 100644 --- a/auth/linux/packaging/appimage/make_config.yaml +++ b/auth/linux/packaging/appimage/make_config.yaml @@ -24,5 +24,6 @@ startup_notify: false # include: # - libcurl.so.4 include: - - libffi.so.7 + - libffi.so.8 - libtiff.so.5 + - libjpeg.so.8 diff --git a/auth/macos/Podfile.lock b/auth/macos/Podfile.lock index a5b6eb77c9..92d05104e1 100644 --- a/auth/macos/Podfile.lock +++ b/auth/macos/Podfile.lock @@ -26,40 +26,36 @@ PODS: - path_provider_foundation (0.0.1): - Flutter - FlutterMacOS - - ReachabilitySwift (5.0.0) + - ReachabilitySwift (5.2.2) - screen_retriever (0.0.1): - FlutterMacOS - - Sentry/HybridSDK (8.21.0): - - SentryPrivate (= 8.21.0) - - sentry_flutter (0.0.1): + - Sentry/HybridSDK (8.25.0) + - sentry_flutter (7.20.1): - Flutter - FlutterMacOS - - Sentry/HybridSDK (= 8.21.0) - - SentryPrivate (8.21.0) + - Sentry/HybridSDK (= 8.25.0) - share_plus (0.0.1): - FlutterMacOS - shared_preferences_foundation (0.0.1): - Flutter - FlutterMacOS - - smart_auth (0.0.1): - - FlutterMacOS - sodium_libs (2.2.1): - FlutterMacOS - sqflite (0.0.3): - Flutter - FlutterMacOS - - sqlite3 (3.45.1): - - sqlite3/common (= 3.45.1) - - sqlite3/common (3.45.1) - - sqlite3/fts5 (3.45.1): + - "sqlite3 (3.45.3+1)": + - "sqlite3/common (= 3.45.3+1)" + - "sqlite3/common (3.45.3+1)" + - "sqlite3/fts5 (3.45.3+1)": - sqlite3/common - - sqlite3/perf-threadsafe (3.45.1): + - "sqlite3/perf-threadsafe (3.45.3+1)": - sqlite3/common - - sqlite3/rtree (3.45.1): + - "sqlite3/rtree (3.45.3+1)": - sqlite3/common - sqlite3_flutter_libs (0.0.1): - FlutterMacOS - - sqlite3 (~> 3.45.1) + - "sqlite3 (~> 3.45.3+1)" - sqlite3/fts5 - sqlite3/perf-threadsafe - sqlite3/rtree @@ -87,7 +83,6 @@ DEPENDENCIES: - sentry_flutter (from `Flutter/ephemeral/.symlinks/plugins/sentry_flutter/macos`) - share_plus (from `Flutter/ephemeral/.symlinks/plugins/share_plus/macos`) - shared_preferences_foundation (from `Flutter/ephemeral/.symlinks/plugins/shared_preferences_foundation/darwin`) - - smart_auth (from `Flutter/ephemeral/.symlinks/plugins/smart_auth/macos`) - sodium_libs (from `Flutter/ephemeral/.symlinks/plugins/sodium_libs/macos`) - sqflite (from `Flutter/ephemeral/.symlinks/plugins/sqflite/darwin`) - sqlite3_flutter_libs (from `Flutter/ephemeral/.symlinks/plugins/sqlite3_flutter_libs/macos`) @@ -100,7 +95,6 @@ SPEC REPOS: - OrderedSet - ReachabilitySwift - Sentry - - SentryPrivate - sqlite3 EXTERNAL SOURCES: @@ -136,8 +130,6 @@ EXTERNAL SOURCES: :path: Flutter/ephemeral/.symlinks/plugins/share_plus/macos shared_preferences_foundation: :path: Flutter/ephemeral/.symlinks/plugins/shared_preferences_foundation/darwin - smart_auth: - :path: Flutter/ephemeral/.symlinks/plugins/smart_auth/macos sodium_libs: :path: Flutter/ephemeral/.symlinks/plugins/sodium_libs/macos sqflite: @@ -165,22 +157,20 @@ SPEC CHECKSUMS: OrderedSet: aaeb196f7fef5a9edf55d89760da9176ad40b93c package_info_plus: 02d7a575e80f194102bef286361c6c326e4c29ce path_provider_foundation: 3784922295ac71e43754bd15e0653ccfd36a147c - ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825 + ReachabilitySwift: 2128f3a8c9107e1ad33574c6e58e8285d460b149 screen_retriever: 59634572a57080243dd1bf715e55b6c54f241a38 - Sentry: ebc12276bd17613a114ab359074096b6b3725203 - sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e - SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe + Sentry: cd86fc55628f5b7c572cabe66cc8f95a9d2f165a + sentry_flutter: 4cb24c1055c556d7b27262ab2e179d1e5a0b9b0c share_plus: 76dd39142738f7a68dd57b05093b5e8193f220f7 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 - smart_auth: b38e3ab4bfe089eacb1e233aca1a2340f96c28e9 sodium_libs: d39bd76697736cb11ce4a0be73b9b4bc64466d6f sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec - sqlite3: 73b7fc691fdc43277614250e04d183740cb15078 - sqlite3_flutter_libs: 06a05802529659a272beac4ee1350bfec294f386 + sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a + sqlite3_flutter_libs: 8d204ef443cf0d5c1c8b058044eab53f3943a9c5 tray_manager: 9064e219c56d75c476e46b9a21182087930baf90 url_launcher_macos: d2691c7dd33ed713bf3544850a623080ec693d95 window_manager: 3a1844359a6295ab1e47659b1a777e36773cd6e8 PODFILE CHECKSUM: f401c31c8f7c5571f6f565c78915d54338812dab -COCOAPODS: 1.14.3 +COCOAPODS: 1.15.2 diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/1024-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/1024-mac.png index bbf24e4364..840c8bfc3a 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/1024-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/1024-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/128-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/128-mac.png index 2a210095a5..331be75f38 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/128-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/128-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/16-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/16-mac.png index fb83d3abe9..b1d5492fbd 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/16-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/16-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/256-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/256-mac.png index f64b470b01..c4a7d049b6 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/256-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/256-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/32-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/32-mac.png index c72e503af5..2cd08ec25c 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/32-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/32-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/512-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/512-mac.png index 07f8c930f9..53ff9127dd 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/512-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/512-mac.png differ diff --git a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/64-mac.png b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/64-mac.png index d7c149e3d1..daecae1f30 100644 Binary files a/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/64-mac.png and b/auth/macos/Runner/Assets.xcassets/AppIcon.appiconset/64-mac.png differ diff --git a/auth/pubspec.lock b/auth/pubspec.lock index 7724160420..b3a643b0be 100644 --- a/auth/pubspec.lock +++ b/auth/pubspec.lock @@ -45,10 +45,10 @@ packages: dependency: "direct main" description: name: archive - sha256: "22600aa1e926be775fa5fe7e6894e7fb3df9efda8891c73f70fb3262399a432d" + sha256: "0763b45fa9294197a2885c8567927e2830ade852e5c896fd4ab7e0e348d0f373" url: "https://pub.dev" source: hosted - version: "3.4.10" + version: "3.5.0" args: dependency: transitive description: @@ -318,10 +318,10 @@ packages: dependency: "direct main" description: name: dio - sha256: "639179e1cc0957779e10dd5b786ce180c477c4c0aca5aaba5d1700fa2e834801" + sha256: "11e40df547d418cc0c4900a9318b26304e665da6fa4755399a9ff9efd09034b5" url: "https://pub.dev" source: hosted - version: "5.4.3" + version: "5.4.3+1" dotted_border: dependency: "direct main" description: @@ -468,10 +468,10 @@ packages: dependency: "direct main" description: name: flutter_email_sender - sha256: "5001e9158f91a8799140fb30a11ad89cd587244f30b4f848d87085985c49b60f" + sha256: fb515d4e073d238d0daf1d765e5318487b6396d46b96e0ae9745dbc9a133f97a url: "https://pub.dev" source: hosted - version: "6.0.2" + version: "6.0.3" flutter_inappwebview: dependency: "direct main" description: @@ -565,10 +565,10 @@ packages: dependency: transitive description: name: flutter_local_notifications_platform_interface - sha256: "7cf643d6d5022f3baed0be777b0662cce5919c0a7b86e700299f22dc4ae660ef" + sha256: "340abf67df238f7f0ef58f4a26d2a83e1ab74c77ab03cd2b2d5018ac64db30b7" url: "https://pub.dev" source: hosted - version: "7.0.0+1" + version: "7.1.0" flutter_localizations: dependency: "direct main" description: flutter @@ -685,10 +685,10 @@ packages: dependency: "direct main" description: name: fluttertoast - sha256: dfdde255317af381bfc1c486ed968d5a43a2ded9c931e87cbecd88767d6a71c1 + sha256: "81b68579e23fcbcada2db3d50302813d2371664afe6165bc78148050ab94bf66" url: "https://pub.dev" source: hosted - version: "8.2.4" + version: "8.2.5" freezed_annotation: dependency: transitive description: @@ -721,6 +721,14 @@ packages: url: "https://pub.dev" source: hosted version: "5.0.6" + gradient_borders: + dependency: "direct main" + description: + name: gradient_borders + sha256: "69eeaff519d145a4c6c213ada1abae386bcc8981a4970d923e478ce7ba19e309" + url: "https://pub.dev" + source: hosted + version: "1.0.0" graphs: dependency: transitive description: @@ -737,6 +745,22 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.0" + hashlib: + dependency: transitive + description: + name: hashlib + sha256: "67e640e19cc33070113acab3125cd48ebe480a0300e15554dec089b8878a729f" + url: "https://pub.dev" + source: hosted + version: "1.16.0" + hashlib_codecs: + dependency: transitive + description: + name: hashlib_codecs + sha256: "49e2a471f74b15f1854263e58c2ac11f2b631b5b12c836f9708a35397d36d626" + url: "https://pub.dev" + source: hosted + version: "2.2.0" hex: dependency: transitive description: @@ -813,18 +837,18 @@ packages: dependency: "direct main" description: name: json_annotation - sha256: b10a7b2ff83d83c777edba3c6a0f97045ddadd56c944e1a23a3fdf43a1bf4467 + sha256: "1ce844379ca14835a50d2f019a3099f419082cfdd231cd86a142af94dd5c6bb1" url: "https://pub.dev" source: hosted - version: "4.8.1" + version: "4.9.0" json_serializable: dependency: "direct dev" description: name: json_serializable - sha256: aa1f5a8912615733e0fdc7a02af03308933c93235bdc8d50d0b0c8a8ccb0b969 + sha256: ea1432d167339ea9b5bb153f0571d0039607a873d6e04e0117af043f14a1fd4b url: "https://pub.dev" source: hosted - version: "6.7.1" + version: "6.8.0" leak_tracker: dependency: transitive description: @@ -869,10 +893,10 @@ packages: dependency: "direct main" description: name: local_auth_android - sha256: "3bcd732dda7c75fcb7ddaef12e131230f53dcc8c00790d0d6efb3aa0fbbeda57" + sha256: e0e5b1ea247c5a0951c13a7ee13dc1beae69750e6a2e1910d1ed6a3cd4d56943 url: "https://pub.dev" source: hosted - version: "1.0.37" + version: "1.0.38" local_auth_darwin: dependency: "direct main" description: @@ -1133,10 +1157,10 @@ packages: dependency: "direct main" description: name: pointycastle - sha256: "70fe966348fe08c34bf929582f1d8247d9d9408130723206472b4687227e4333" + sha256: "79fbafed02cfdbe85ef3fd06c7f4bc2cbcba0177e61b765264853d4253b21744" url: "https://pub.dev" source: hosted - version: "3.8.0" + version: "3.9.0" pool: dependency: transitive description: @@ -1221,18 +1245,18 @@ packages: dependency: "direct main" description: name: sentry - sha256: fe99a06970b909a491b7f89d54c9b5119772e3a48a400308a6e129625b333f5b + sha256: e572d33a3ff1d69549f33ee828a8ff514047d43ca8eea4ab093d72461205aa3e url: "https://pub.dev" source: hosted - version: "7.19.0" + version: "7.20.1" sentry_flutter: dependency: "direct main" description: name: sentry_flutter - sha256: fc013d4a753447320f62989b1871fdc1f20c77befcc8be3e38774dd7402e7a62 + sha256: ac8cf6bb849f3560353ae33672e17b2713809a4e8de0d3cf372e9e9c42013757 url: "https://pub.dev" source: hosted - version: "7.19.0" + version: "7.20.1" share_plus: dependency: "direct main" description: @@ -1419,10 +1443,10 @@ packages: dependency: "direct main" description: name: sqlite3_flutter_libs - sha256: d6c31c8511c441d1f12f20b607343df1afe4eddf24a1cf85021677c8eea26060 + sha256: fb2a106a2ea6042fe57de2c47074cc31539a941819c91e105b864744605da3f5 url: "https://pub.dev" source: hosted - version: "0.5.20" + version: "0.5.21" stack_trace: dependency: transitive description: @@ -1431,6 +1455,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.11.1" + steam_totp: + dependency: "direct main" + description: + name: steam_totp + sha256: "3c09143c983f6bb05bb53e9232f9d40bbcc01c596ba0273c3e6bb246729abfa1" + url: "https://pub.dev" + source: hosted + version: "0.0.1" step_progress_indicator: dependency: "direct main" description: @@ -1499,10 +1531,10 @@ packages: dependency: transitive description: name: timezone - sha256: "1cfd8ddc2d1cfd836bc93e67b9be88c3adaeca6f40a00ca999104c30693cdca0" + sha256: a6ccda4a69a442098b602c44e61a1e2b4bf6f5516e875bbf0f427d5df14745d5 url: "https://pub.dev" source: hosted - version: "0.9.2" + version: "0.9.3" timing: dependency: transitive description: @@ -1595,10 +1627,10 @@ packages: dependency: transitive description: name: url_launcher_web - sha256: "3692a459204a33e04bc94f5fb91158faf4f2c8903281ddd82915adecdb1a901d" + sha256: "8d9e750d8c9338601e709cd0885f95825086bd8b642547f26bda435aade95d8a" url: "https://pub.dev" source: hosted - version: "2.3.0" + version: "2.3.1" url_launcher_windows: dependency: transitive description: @@ -1683,18 +1715,18 @@ packages: dependency: "direct main" description: name: win32 - sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a" + sha256: "0eaf06e3446824099858367950a813472af675116bf63f008a4c2a75ae13e9cb" url: "https://pub.dev" source: hosted - version: "5.4.0" + version: "5.5.0" win32_registry: dependency: transitive description: name: win32_registry - sha256: "41fd8a189940d8696b1b810efb9abcf60827b6cbfab90b0c43e8439e3a39d85a" + sha256: "10589e0d7f4e053f2c61023a31c9ce01146656a70b7b7f0828c0b46d7da2a9bb" url: "https://pub.dev" source: hosted - version: "1.1.2" + version: "1.1.3" window_manager: dependency: "direct main" description: diff --git a/auth/pubspec.yaml b/auth/pubspec.yaml index b7a35b6996..3a127cee31 100644 --- a/auth/pubspec.yaml +++ b/auth/pubspec.yaml @@ -1,6 +1,6 @@ name: ente_auth description: ente two-factor authenticator -version: 2.0.57+257 +version: 3.0.4+304 publish_to: none environment: @@ -14,7 +14,7 @@ dependencies: bip39: ^1.0.6 #done bloc: ^8.1.2 clipboard: ^0.1.3 - collection: # dart + collection: ^1.18.0 # dart confetti: ^0.7.0 connectivity_plus: ^5.0.2 convert: ^3.1.1 @@ -62,6 +62,7 @@ dependencies: flutter_svg: ^2.0.5 fluttertoast: ^8.1.1 google_nav_bar: ^5.0.5 #supported + gradient_borders: ^1.0.0 http: ^1.1.0 intl: ^0.18.0 json_annotation: ^4.5.0 @@ -93,6 +94,7 @@ dependencies: sqflite_common_ffi: ^2.3.0+4 sqlite3: ^2.1.0 sqlite3_flutter_libs: ^0.5.19+1 + steam_totp: ^0.0.1 step_progress_indicator: ^1.0.2 styled_text: ^8.1.0 tray_manager: ^0.2.1 @@ -129,6 +131,7 @@ flutter: - assets/simple-icons/_data/ - assets/custom-icons/icons/ - assets/custom-icons/_data/ + - assets/svg/ fonts: - family: Inter @@ -145,16 +148,38 @@ flutter: flutter_icons: android: "launcher_icon" adaptive_icon_foreground: "assets/generation-icons/icon-light-adaptive-fg.png" - adaptive_icon_background: "#ffffff" + adaptive_icon_background: "assets/generation-icons/icon-light-adaptive-bg.png" ios: true image_path: "assets/generation-icons/icon-light.png" remove_alpha_ios: true flutter_native_splash: - color: "#ffffff" + color: "#FFFFFF" color_dark: "#000000" - image: assets/splash-screen-light.png - image_dark: assets/splash-screen-dark.png - android_fullscreen: true + image: "assets/splash/splash-icon-fg.png" android_gravity: center ios_content_mode: center + android_12: + # The image parameter sets the splash screen icon image. If this parameter is not specified, + # the app's launcher icon will be used instead. + # Please note that the splash screen will be clipped to a circle on the center of the screen. + # App icon with an icon background: This should be 960×960 pixels, and fit within a circle + # 640 pixels in diameter. + # App icon without an icon background: This should be 1152×1152 pixels, and fit within a circle + # 768 pixels in diameter. + image: "assets/splash/splash-icon-fg-12.png" + + # Splash screen background color. + color: "#FFFFFF" + + # App icon background color. + #icon_background_color: "#111111" + + # The branding property allows you to specify an image used as branding in the splash screen. + #branding: assets/dart.png + + # The image_dark, color_dark, icon_background_color_dark, and branding_dark set values that + # apply when the device is in dark mode. If they are not specified, the app will use the + # parameters from above. + color_dark: "#000000" + #icon_background_color_dark: "#eeeeee" diff --git a/auth/test/models/code_test.dart b/auth/test/models/code_test.dart index 30ea23a4fa..f51364118e 100644 --- a/auth/test/models/code_test.dart +++ b/auth/test/models/code_test.dart @@ -1,9 +1,12 @@ +import 'dart:convert'; + import 'package:ente_auth/models/code.dart'; +import 'package:ente_auth/models/code_display.dart'; import 'package:flutter_test/flutter_test.dart'; void main() { test("parseCodeFromRawData", () { - final code1 = Code.fromRawData( + final code1 = Code.fromOTPAuthUrl( "otpauth://totp/example%20finance%3Aee%40ff.gg?secret=ASKZNWOU6SVYAMVS", ); expect(code1.issuer, "example finance", reason: "issuerMismatch"); @@ -12,7 +15,7 @@ void main() { }); test("parseDocumentedFormat", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://totp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub", ); expect(code.issuer, "GitHub", reason: "issuerMismatch"); @@ -21,7 +24,7 @@ void main() { }); test("validateCount", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://hotp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub&counter=15", ); expect(code.issuer, "GitHub", reason: "issuerMismatch"); @@ -29,10 +32,29 @@ void main() { expect(code.secret, "ASKZNWOU6SVYAMVS"); expect(code.counter, 15); }); + + test("validateDisplay", () { + Code code = Code.fromOTPAuthUrl( + "otpauth://hotp/testdata@ente.io?secret=ASKZNWOU6SVYAMVS&issuer=GitHub&counter=15", + ); + expect(code.issuer, "GitHub", reason: "issuerMismatch"); + expect(code.account, "testdata@ente.io", reason: "accountMismatch"); + expect(code.secret, "ASKZNWOU6SVYAMVS"); + expect(code.counter, 15); + code = code.copyWith( + display: CodeDisplay(pinned: true, tags: ["tag1", "com,ma", ';;%\$']), + ); + final dataToStore = code.toOTPAuthUrlFormat(); + final restoredCode = Code.fromOTPAuthUrl(jsonDecode(dataToStore)); + expect(restoredCode.display.pinned, true); + expect(restoredCode.display.tags, ["tag1", "com,ma", ';;%\$']); + final secondDataToStore = restoredCode.toOTPAuthUrlFormat(); + expect(dataToStore, secondDataToStore); + }); // test("parseWithFunnyAccountName", () { - final code = Code.fromRawData( + final code = Code.fromOTPAuthUrl( "otpauth://totp/Mongo Atlas:Acc !@#444?algorithm=sha1&digits=6&issuer=Mongo Atlas&period=30&secret=NI4CTTFEV4G2JFE6", ); expect(code.issuer, "Mongo Atlas", reason: "issuerMismatch"); @@ -43,11 +65,11 @@ void main() { test("parseAndUpdateInChinese", () { const String rubberDuckQr = 'otpauth://totp/%E6%A9%A1%E7%9A%AE%E9%B8%AD?secret=2CWDCK4EOIN5DJDRMYUMYBBO4MKSR5AX&issuer=ente.io'; - final code = Code.fromRawData(rubberDuckQr); + final code = Code.fromOTPAuthUrl(rubberDuckQr); expect(code.account, '橡皮鸭'); final String updatedRawCode = code.copyWith(account: '伍迪', issuer: '鸭子').rawData; - final updateCode = Code.fromRawData(updatedRawCode); + final updateCode = Code.fromOTPAuthUrl(updatedRawCode); expect(updateCode.account, '伍迪', reason: 'updated accountMismatch'); expect(updateCode.issuer, '鸭子', reason: 'updated issuerMismatch'); }); diff --git a/auth/web/index.html b/auth/web/index.html index ef953df53b..097159f9ed 100644 --- a/auth/web/index.html +++ b/auth/web/index.html @@ -29,9 +29,92 @@ Auth - + - + + + + + + + + + + + + + + + @@ -40,6 +123,13 @@ + + + + + + + diff --git a/auth/web/splash/img/dark-1x.png b/auth/web/splash/img/dark-1x.png index 87f84c70e6..91acb41ae9 100644 Binary files a/auth/web/splash/img/dark-1x.png and b/auth/web/splash/img/dark-1x.png differ diff --git a/auth/web/splash/img/dark-2x.png b/auth/web/splash/img/dark-2x.png index ce01bec05c..9a7c72afa9 100644 Binary files a/auth/web/splash/img/dark-2x.png and b/auth/web/splash/img/dark-2x.png differ diff --git a/auth/web/splash/img/dark-3x.png b/auth/web/splash/img/dark-3x.png index 75f4b1f3c5..5b4d99582b 100644 Binary files a/auth/web/splash/img/dark-3x.png and b/auth/web/splash/img/dark-3x.png differ diff --git a/auth/web/splash/img/dark-4x.png b/auth/web/splash/img/dark-4x.png index 2beb1c8167..1666311d28 100644 Binary files a/auth/web/splash/img/dark-4x.png and b/auth/web/splash/img/dark-4x.png differ diff --git a/auth/web/splash/img/light-1x.png b/auth/web/splash/img/light-1x.png index 899cecf22c..91acb41ae9 100644 Binary files a/auth/web/splash/img/light-1x.png and b/auth/web/splash/img/light-1x.png differ diff --git a/auth/web/splash/img/light-2x.png b/auth/web/splash/img/light-2x.png index 4bb7a5751b..9a7c72afa9 100644 Binary files a/auth/web/splash/img/light-2x.png and b/auth/web/splash/img/light-2x.png differ diff --git a/auth/web/splash/img/light-3x.png b/auth/web/splash/img/light-3x.png index 176f0c723b..5b4d99582b 100644 Binary files a/auth/web/splash/img/light-3x.png and b/auth/web/splash/img/light-3x.png differ diff --git a/auth/web/splash/img/light-4x.png b/auth/web/splash/img/light-4x.png index a0d1a26f75..1666311d28 100644 Binary files a/auth/web/splash/img/light-4x.png and b/auth/web/splash/img/light-4x.png differ diff --git a/desktop/.github/workflows/desktop-draft-release.yml b/desktop/.github/workflows/desktop-draft-release.yml deleted file mode 100644 index 8c0652dfcd..0000000000 --- a/desktop/.github/workflows/desktop-draft-release.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: "Draft release" - -# Build the desktop/draft-release branch and update the existing draft release -# with the resultant artifacts. -# -# This is meant for doing tests that require the app to be signed and packaged. -# Such releases should not be published to end users. -# -# Workflow: -# -# 1. Push your changes to the "desktop/draft-release" branch on -# https://github.com/ente-io/ente. -# -# 2. Create a draft release with tag equal to the version in the `package.json`. -# -# 3. Trigger this workflow. You can trigger it multiple times, each time it'll -# just update the artifacts attached to the same draft. -# -# 4. Once testing is done delete the draft. - -on: - # Trigger manually or `gh workflow run desktop-draft-release.yml`. - workflow_dispatch: - -jobs: - release: - runs-on: macos-latest - - defaults: - run: - working-directory: desktop - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - repository: ente-io/ente - ref: desktop/draft-release - submodules: recursive - - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: 20 - - - name: Install dependencies - run: yarn install - - - name: Build - uses: ente-io/action-electron-builder@v1.0.0 - with: - package_root: desktop - - # GitHub token, automatically provided to the action - # (No need to define this secret in the repo settings) - github_token: ${{ secrets.GITHUB_TOKEN }} - - # If the commit is tagged with a version (e.g. "v1.0.0"), - # release the app after building. - release: ${{ startsWith(github.ref, 'refs/tags/v') }} - - mac_certs: ${{ secrets.MAC_CERTS }} - mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }} - env: - # macOS notarization credentials key details - APPLE_ID: ${{ secrets.APPLE_ID }} - APPLE_APP_SPECIFIC_PASSWORD: - ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }} - APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} - USE_HARD_LINKS: false diff --git a/desktop/.github/workflows/desktop-release.yml b/desktop/.github/workflows/desktop-release.yml index 2fa3823767..70eedf3ea6 100644 --- a/desktop/.github/workflows/desktop-release.yml +++ b/desktop/.github/workflows/desktop-release.yml @@ -1,20 +1,12 @@ name: "Release" -# This will create a new draft release with public artifacts. +# Build the ente-io/ente's desktop/rc branch and create/update a draft release. # -# Note that a release will only get created if there is an associated tag -# (GitHub releases need a corresponding tag). -# -# The canonical source for this action is in the repository where we keep the -# source code for the Ente Photos desktop app: https://github.com/ente-io/ente -# -# However, it actually lives and runs in the repository that we use for making -# releases: https://github.com/ente-io/photos-desktop -# -# We need two repositories because Electron updater currently doesn't work well -# with monorepos. For more details, see `docs/release.md`. +# For more details, see `docs/release.md` in ente-io/ente. on: + # Trigger manually or `gh workflow run desktop-release.yml`. + workflow_dispatch: push: # Run when a tag matching the pattern "v*"" is pushed. # @@ -38,11 +30,9 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - # Checkout the tag photosd-v1.x.x from the source code - # repository when we're invoked for tag v1.x.x on the releases - # repository. + # Checkout the desktop/rc branch from the source repository. repository: ente-io/ente - ref: photosd-${{ github.ref_name }} + ref: desktop/rc submodules: recursive - name: Setup node @@ -50,6 +40,11 @@ jobs: with: node-version: 20 + - name: Increase yarn timeout + # `yarn install` times out sometimes on the Windows runner, + # resulting in flaky builds. + run: yarn config set network-timeout 900000 -g + - name: Install dependencies run: yarn install @@ -63,13 +58,15 @@ jobs: uses: ente-io/action-electron-builder@v1.0.0 with: package_root: desktop + build_script_name: build:ci # GitHub token, automatically provided to the action # (No need to define this secret in the repo settings) github_token: ${{ secrets.GITHUB_TOKEN }} # If the commit is tagged with a version (e.g. "v1.0.0"), - # release the app after building. + # create a (draft) release after building. Otherwise upload + # assets to the existing draft named after the version. release: ${{ startsWith(github.ref, 'refs/tags/v') }} mac_certs: ${{ secrets.MAC_CERTS }} diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md index eb118a424d..5fbbefaaa8 100644 --- a/desktop/CHANGELOG.md +++ b/desktop/CHANGELOG.md @@ -2,11 +2,17 @@ ## v1.7.0 (Unreleased) -v1.7 is a major rewrite to improve the security of our app. We have enabled -sandboxing and disabled node integration for the renderer process. All this -required restructuring our IPC mechanisms, which resulted in a lot of under the -hood changes. The outcome is a more secure app that also uses the latest and -greatest Electron recommendations. +v1.7 is a major rewrite to improve the security of our app. In particular, the +UI and the native parts of the app now run isolated from each other and +communicate only using a predefined IPC boundary. + +Other highlights: + +- View your photos on big screens and Chromecast devices by using the "Play + album on TV" option in the album menu. +- Support Brazilian Portuguese, German and Russian. +- Provide a checkbox to select all photos in a day. +- Fix a case where the dedup screen would not refresh after removing items. ## v1.6.63 diff --git a/desktop/docs/dependencies.md b/desktop/docs/dependencies.md index 6052357033..a9e92b50d8 100644 --- a/desktop/docs/dependencies.md +++ b/desktop/docs/dependencies.md @@ -83,9 +83,6 @@ are similar to that in the web code. Some extra ones specific to the code here are: -- [concurrently](https://github.com/open-cli-tools/concurrently) for spawning - parallel tasks when we do `yarn dev`. - - [shx](https://github.com/shelljs/shx) for providing a portable way to use Unix commands in our `package.json` scripts. This allows us to use the same commands (like `ln`) across different platforms like Linux and Windows. diff --git a/desktop/docs/release.md b/desktop/docs/release.md index b55c96326d..1cda1c11b1 100644 --- a/desktop/docs/release.md +++ b/desktop/docs/release.md @@ -1,46 +1,64 @@ ## Releases -Conceptually, the release is straightforward: We push a tag, a GitHub workflow -gets triggered that creates a draft release with artifacts built from that tag. -We then publish that release. The download links on our website, and existing -apps already know how to check for the latest GitHub release and update -accordingly. +Conceptually, the release is straightforward: We trigger a GitHub workflow that +creates a draft release with artifacts built. When ready, we publish that +release. The download links on our website, and existing apps already check the +latest GitHub release and update accordingly. -The complication comes by the fact that Electron Updater (the mechanism that we -use for auto updates) doesn't work well with monorepos. So we need to keep a -separate (non-mono) repository just for doing releases. +The complication comes by the fact that electron-builder's auto updaterr (the +mechanism that we use for auto updates) doesn't work with monorepos. So we need +to keep a separate (non-mono) repository just for doing releases. - Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente). - Releases are done from [ente-io/photos-desktop](https://github.com/ente-io/photos-desktop). -## Workflow +## Workflow - Release Candidates -The workflow is: +Leading up to the release, we can make one or more draft releases that are not +intended to be published, but serve as test release candidates. -1. Finalize the changes in the source repo. +The workflow for making such "rc" builds is: - - Update the CHANGELOG. - - Update the version in `package.json` - - `git commit -m "[photosd] Release v1.2.3"` - - Open PR, merge into main. +1. Update `package.json` in the source repo to use version `1.x.x-rc`. Create a + new draft release in the release repo with title `1.x.x-rc`. In the tag + input enter `v1.x.x-rc` and select the option to "create a new tag on + publish". -2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where - `1.2.3` is the version in `package.json` +2. Push code to the `desktop/rc` branch in the source repo. + +3. Trigger the GitHub action in the release repo ```sh - git tag photosd-v1.x.x - git push origin photosd-v1.x.x + gh workflow run desktop-release.yml ``` -3. Head over to the releases repository and run the trigger script, passing it - the tag _without_ the `photosd-` prefix. +We can do steps 2 and 3 multiple times: each time it'll just update the +artifacts attached to the same draft. + +## Workflow - Release + +1. Update source repo to set version `1.x.x` in `package.json` and finialize + the CHANGELOG. + +2. Push code to the `desktop/rc` branch in the source repo. + +3. In the release repo ```sh ./.github/trigger-release.sh v1.x.x ``` +4. If the build is successful, tag `desktop/rc` in the source repo. + + ```sh + # Assuming we're on desktop/rc that just got build + + git tag photosd-v1.x.x + git push origin photosd-v1.x.x + ``` + ## Post build The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts diff --git a/desktop/electron-builder.yml b/desktop/electron-builder.yml index 298b1c5f36..c2c000ce9f 100644 --- a/desktop/electron-builder.yml +++ b/desktop/electron-builder.yml @@ -29,4 +29,3 @@ mac: arch: [universal] category: public.app-category.photography hardenedRuntime: true -afterSign: electron-builder-notarize diff --git a/desktop/package.json b/desktop/package.json index 462857a8bf..236dd55927 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -1,6 +1,6 @@ { "name": "ente", - "version": "1.7.0-beta.0", + "version": "1.7.0-rc", "private": true, "description": "Desktop client for Ente Photos", "repository": "github:ente-io/photos-desktop", @@ -11,9 +11,10 @@ "build-main": "tsc && electron-builder", "build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null", "build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && shx rm -f out && shx ln -sf ../web/apps/photos/out out", + "build:ci": "yarn build-renderer && tsc", "build:quick": "yarn build-renderer && yarn build-main:quick", "dev": "concurrently --kill-others --success first --names 'main,rndr' \"yarn dev-main\" \"yarn dev-renderer\"", - "dev-main": "tsc && electron app/main.js", + "dev-main": "tsc && electron .", "dev-renderer": "cd ../web && yarn install && yarn dev:photos", "postinstall": "electron-builder install-app-deps", "lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc", @@ -46,7 +47,6 @@ "concurrently": "^8", "electron": "^30", "electron-builder": "25.0.0-alpha.6", - "electron-builder-notarize": "^1.5", "eslint": "^8", "prettier": "^3", "prettier-plugin-organize-imports": "^3", @@ -54,5 +54,6 @@ "shx": "^0.3", "typescript": "^5" }, + "packageManager": "yarn@1.22.21", "productName": "ente" } diff --git a/desktop/src/main.ts b/desktop/src/main.ts index 9cba9178df..463774dc2b 100644 --- a/desktop/src/main.ts +++ b/desktop/src/main.ts @@ -17,7 +17,11 @@ import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import os from "node:os"; import path from "node:path"; -import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc"; +import { + attachFSWatchIPCHandlers, + attachIPCHandlers, + attachLogoutIPCHandler, +} from "./main/ipc"; import log, { initLogging } from "./main/log"; import { createApplicationMenu, createTrayContextMenu } from "./main/menu"; import { setupAutoUpdater } from "./main/services/app-update"; @@ -237,7 +241,7 @@ const uniqueSavePath = (dirPath: string, fileName: string) => { * * @param webContents The renderer to configure. */ -export const allowExternalLinks = (webContents: WebContents) => { +export const allowExternalLinks = (webContents: WebContents) => // By default, if the user were open a link, say // https://github.com/ente-io/ente/discussions, then it would open a _new_ // BrowserWindow within our app. @@ -249,13 +253,37 @@ export const allowExternalLinks = (webContents: WebContents) => { // Returning `action` "deny" accomplishes this. webContents.setWindowOpenHandler(({ url }) => { if (!url.startsWith(rendererURL)) { + // This does not work in Ubuntu currently: mailto links seem to just + // get ignored, and HTTP links open in the text editor instead of in + // the browser. + // https://github.com/electron/electron/issues/31485 void shell.openExternal(url); return { action: "deny" }; } else { return { action: "allow" }; } }); -}; + +/** + * Allow uploading to arbitrary S3 buckets. + * + * The files in the desktop app are served over the ente:// protocol. During + * testing or self-hosting, we might be using a S3 bucket that does not allow + * whitelisting a custom URI scheme. To avoid requiring the bucket to set an + * "Access-Control-Allow-Origin: *" or do a echo-back of `Origin`, we add a + * workaround here instead, intercepting the ACAO header and allowing `*`. + */ +export const allowAllCORSOrigins = (webContents: WebContents) => + webContents.session.webRequest.onHeadersReceived( + ({ responseHeaders }, callback) => { + const headers: NonNullable = {}; + for (const [key, value] of Object.entries(responseHeaders ?? {})) + if (key.toLowerCase() != "access-control-allow-origin") + headers[key] = value; + headers["Access-Control-Allow-Origin"] = ["*"]; + callback({ responseHeaders: headers }); + }, + ); /** * Add an icon for our app in the system tray. @@ -287,32 +315,18 @@ const setupTrayItem = (mainWindow: BrowserWindow) => { /** * Older versions of our app used to maintain a cache dir using the main - * process. This has been removed in favor of cache on the web layer. + * process. This has been removed in favor of cache on the web layer. Delete the + * old cache dir if it exists. * - * Delete the old cache dir if it exists. - * - * This will happen in two phases. The cache had three subdirectories: - * - * - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024). - * - * - The third one, "face-crops" will be removed once we finish the face search - * changes. See: [Note: Legacy face crops]. - * - * This migration code can be removed after some time once most people have - * upgraded to newer versions. + * Added May 2024, v1.7.0. This migration code can be removed after some time + * once most people have upgraded to newer versions. */ const deleteLegacyDiskCacheDirIfExists = async () => { - const removeIfExists = async (dirPath: string) => { - if (existsSync(dirPath)) { - log.info(`Removing legacy disk cache from ${dirPath}`); - await fs.rm(dirPath, { recursive: true }); - } - }; // [Note: Getting the cache path] // // The existing code was passing "cache" as a parameter to getPath. // - // However, "cache" is not a valid parameter to getPath. It works! (for + // However, "cache" is not a valid parameter to getPath. It works (for // example, on macOS I get `~/Library/Caches`), but it is intentionally not // documented as part of the public API: // @@ -325,8 +339,8 @@ const deleteLegacyDiskCacheDirIfExists = async () => { // @ts-expect-error "cache" works but is not part of the public API. const cacheDir = path.join(app.getPath("cache"), "ente"); if (existsSync(cacheDir)) { - await removeIfExists(path.join(cacheDir, "thumbs")); - await removeIfExists(path.join(cacheDir, "files")); + log.info(`Removing legacy disk cache from ${cacheDir}`); + await fs.rm(cacheDir, { recursive: true }); } }; @@ -377,13 +391,19 @@ const main = () => { void (async () => { // Create window and prepare for the renderer. mainWindow = createMainWindow(); + + // Setup IPC and streams. + const watcher = createWatcher(mainWindow); attachIPCHandlers(); - attachFSWatchIPCHandlers(createWatcher(mainWindow)); + attachFSWatchIPCHandlers(watcher); + attachLogoutIPCHandler(watcher); registerStreamProtocol(); // Configure the renderer's environment. - setDownloadPath(mainWindow.webContents); - allowExternalLinks(mainWindow.webContents); + const webContents = mainWindow.webContents; + setDownloadPath(webContents); + allowExternalLinks(webContents); + allowAllCORSOrigins(webContents); // Start loading the renderer. void mainWindow.loadURL(rendererURL); diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index 1393f4bfd3..6e7df7cdea 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -24,7 +24,6 @@ import { updateOnNextRestart, } from "./services/app-update"; import { - legacyFaceCrop, openDirectory, openLogDirectory, selectDirectory, @@ -41,16 +40,13 @@ import { fsWriteFile, } from "./services/fs"; import { convertToJPEG, generateImageThumbnail } from "./services/image"; +import { logout } from "./services/logout"; import { - clipImageEmbedding, - clipTextEmbeddingIfAvailable, + computeCLIPImageEmbedding, + computeCLIPTextEmbeddingIfAvailable, } from "./services/ml-clip"; -import { detectFaces, faceEmbedding } from "./services/ml-face"; -import { - clearStores, - encryptionKey, - saveEncryptionKey, -} from "./services/store"; +import { computeFaceEmbeddings, detectFaces } from "./services/ml-face"; +import { encryptionKey, saveEncryptionKey } from "./services/store"; import { clearPendingUploads, listZipItems, @@ -65,7 +61,6 @@ import { watchFindFiles, watchGet, watchRemove, - watchReset, watchUpdateIgnoredFiles, watchUpdateSyncedFiles, } from "./services/watch"; @@ -106,8 +101,6 @@ export const attachIPCHandlers = () => { ipcMain.handle("selectDirectory", () => selectDirectory()); - ipcMain.on("clearStores", () => clearStores()); - ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) => saveEncryptionKey(encryptionKey), ); @@ -171,36 +164,27 @@ export const attachIPCHandlers = () => { command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, - ) => - ffmpegExec( - command, - dataOrPathOrZipItem, - outputFileExtension, - timeoutMS, - ), + ) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension), ); // - ML - ipcMain.handle("clipImageEmbedding", (_, jpegImageData: Uint8Array) => - clipImageEmbedding(jpegImageData), + ipcMain.handle( + "computeCLIPImageEmbedding", + (_, jpegImageData: Uint8Array) => + computeCLIPImageEmbedding(jpegImageData), ); - ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) => - clipTextEmbeddingIfAvailable(text), + ipcMain.handle("computeCLIPTextEmbeddingIfAvailable", (_, text: string) => + computeCLIPTextEmbeddingIfAvailable(text), ); ipcMain.handle("detectFaces", (_, input: Float32Array) => detectFaces(input), ); - ipcMain.handle("faceEmbedding", (_, input: Float32Array) => - faceEmbedding(input), - ); - - ipcMain.handle("legacyFaceCrop", (_, faceID: string) => - legacyFaceCrop(faceID), + ipcMain.handle("computeFaceEmbeddings", (_, input: Float32Array) => + computeFaceEmbeddings(input), ); // - Upload @@ -269,6 +253,12 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => { ipcMain.handle("watchFindFiles", (_, folderPath: string) => watchFindFiles(folderPath), ); - - ipcMain.handle("watchReset", () => watchReset(watcher)); +}; + +/** + * Sibling of {@link attachIPCHandlers} specifically for use with the logout + * event with needs access to the {@link FSWatcher} instance. + */ +export const attachLogoutIPCHandler = (watcher: FSWatcher) => { + ipcMain.handle("logout", () => logout(watcher)); }; diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts index cf1404a90a..9718dfea56 100644 --- a/desktop/src/main/log.ts +++ b/desktop/src/main/log.ts @@ -5,11 +5,8 @@ import { isDev } from "./utils/electron"; /** * Initialize logging in the main process. * - * This will set our underlying logger up to log to a file named `ente.log`, - * - * - on Linux at ~/.config/ente/logs/ente.log - * - on macOS at ~/Library/Logs/ente/ente.log - * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log + * This will set our underlying logger up to log to a file named `ente.log`, see + * [Note: App log path]. * * On dev builds, it will also log to the console. */ @@ -41,36 +38,41 @@ export const logToDisk = (message: string) => { log.info(`[rndr] ${message}`); }; -const logError = (message: string, e?: unknown) => { - if (!e) { - logError_(message); - return; - } +const messageWithError = (message: string, e?: unknown) => { + if (!e) return message; let es: string; if (e instanceof Error) { // In practice, we expect ourselves to be called with Error objects, so // this is the happy path so to say. - es = `${e.name}: ${e.message}\n${e.stack}`; + es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n"); } else { // For the rest rare cases, use the default string serialization of e. es = String(e); } - logError_(`${message}: ${es}`); + return `${message}: ${es}`; }; -const logError_ = (message: string) => { - log.error(`[main] [error] ${message}`); - if (isDev) console.error(`[error] ${message}`); +const logError = (message: string, e?: unknown) => { + const m = `[error] ${messageWithError(message, e)}`; + console.error(m); + log.error(`[main] ${m}`); +}; + +const logWarn = (message: string, e?: unknown) => { + const m = `[warn] ${messageWithError(message, e)}`; + console.error(m); + log.error(`[main] ${m}`); }; const logInfo = (...params: unknown[]) => { const message = params .map((p) => (typeof p == "string" ? p : util.inspect(p))) .join(" "); - log.info(`[main] ${message}`); - if (isDev) console.log(`[info] ${message}`); + const m = `[info] ${message}`; + if (isDev) console.log(m); + log.info(`[main] ${m}`); }; const logDebug = (param: () => unknown) => { @@ -96,10 +98,15 @@ export default { * any arbitrary object that we obtain, say, when in a try-catch handler (in * JavaScript any arbitrary value can be thrown). * - * The log is written to disk. In development builds, the log is also - * printed to the main (Node.js) process console. + * The log is written to disk and printed to the main (Node.js) process's + * console. */ error: logError, + /** + * Sibling of {@link error}, with the same parameters and behaviour, except + * it gets prefixed with a warning instead of an error tag. + */ + warn: logWarn, /** * Log a message. * @@ -120,7 +127,7 @@ export default { * The function can return an arbitrary value which is serialized before * being logged. * - * This log is NOT written to disk. And it is printed to the main (Node.js) + * This log is NOT written to disk. It is printed to the main (Node.js) * process console, but only on development builds. */ debug: logDebug, diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts index 45cbd63624..188b195f82 100644 --- a/desktop/src/main/menu.ts +++ b/desktop/src/main/menu.ts @@ -82,12 +82,14 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { checked: isAutoLaunchEnabled, click: toggleAutoLaunch, }, - { - label: "Hide Dock Icon", - type: "checkbox", - checked: shouldHideDockIcon, - click: toggleHideDockIcon, - }, + ...macOSOnly([ + { + label: "Hide Dock Icon", + type: "checkbox", + checked: shouldHideDockIcon, + click: toggleHideDockIcon, + }, + ]), ], }, diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts index 5788b9b27a..8b2d07a49c 100644 --- a/desktop/src/main/services/app-update.ts +++ b/desktop/src/main/services/app-update.ts @@ -11,15 +11,90 @@ import { isDev } from "../utils/electron"; export const setupAutoUpdater = (mainWindow: BrowserWindow) => { autoUpdater.logger = electronLog; autoUpdater.autoDownload = false; + // This is going to be the default at some point, right now if we don't + // explicitly set this to true then electron-builder prints a (harmless) + // warning when updating on Windows. + // See: https://github.com/electron-userland/electron-builder/pull/6575 + autoUpdater.disableWebInstaller = true; - // Skip checking for updates automatically in dev builds. Installing an - // update would fail anyway since (at least on macOS), the auto update - // process requires signed builds. - // - // Even though this is skipped on app start, we can still use the "Check for - // updates..." menu option to trigger the update if we wish in dev builds. + /** + * [Note: Testing auto updates] + * + * By default, we skip checking for updates automatically in dev builds. + * This is because even if installing updates would fail (at least on macOS) + * because auto updates only work for signed builds. + * + * So an end to end testing for updates requires using a temporary GitHub + * repository and signed builds therein. More on this later. + * + * --------------- + * + * [Note: Testing auto updates - Sanity checks] + * + * However, for partial checks of the UI flow, something like the following + * can be used to do a test of the update process (up until the actual + * installation itself). + * + * Create a `app/dev-app-update.yml` with: + * + * provider: generic + * url: http://127.0.0.1:7777/ + * + * and start a local webserver in some directory: + * + * python3 -m http.server 7777 + * + * In this directory, put `latest-mac.yml` and the DMG file that this YAML + * file refers to. + * + * Alternatively, `dev-app-update.yml` can point to some arbitrary GitHub + * repository too, e.g.: + * + * provider: github + * owner: ente-io + * repo: test-desktop-updates + * + * Now we can use the "Check for updates..." menu option to trigger the + * update flow. + */ + autoUpdater.forceDevUpdateConfig = isDev; if (isDev) return; + /** + * [Note: Testing auto updates - End to end checks] + * + * Since end-to-end update testing can only be done with signed builds, the + * easiest way is to create temporary builds in a test repository. + * + * Let us say we have v2.0.0 about to go out. We have builds artifacts for + * v2.0.0 also in some draft release in our normal release repository. + * + * Create a new test repository, say `ente-io/test-desktop-updates`. In this + * repository, create a release v2.0.0, attaching the actual build + * artifacts. Make this release the latest. + * + * Now we need to create a old signed build. + * + * First, modify `package.json` to put in a version number older than the + * new version number that we want to test updating to, e.g. `v1.0.0-test`. + * + * Then uncomment the following block of code. This tells the auto updater + * to use `ente-io/test-desktop-updates` to get updates. + * + * With these two changes (older version and setFeedURL), create a new + * release signed build on CI. Install this build - it will check for + * updates in the temporary feed URL that we set, and we'll be able to check + * the full update flow. + */ + + /* + autoUpdater.setFeedURL({ + provider: "github", + owner: "ente-io", + repo: "test-desktop-updates", + }); + */ + const oneDay = 1 * 24 * 60 * 60 * 1000; setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay); void checkForUpdatesAndNotify(mainWindow); @@ -67,27 +142,28 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { const showUpdateDialog = (update: AppUpdate) => mainWindow.webContents.send("appUpdateAvailable", update); - log.debug(() => "Attempting auto update"); - await autoUpdater.downloadUpdate(); - - let timeoutId: ReturnType; + let timeout: ReturnType; const fiveMinutes = 5 * 60 * 1000; autoUpdater.on("update-downloaded", () => { - timeoutId = setTimeout( + log.info(`Update downloaded ${version}`); + timeout = setTimeout( () => showUpdateDialog({ autoUpdatable: true, version }), fiveMinutes, ); }); autoUpdater.on("error", (error) => { - clearTimeout(timeoutId); + clearTimeout(timeout); log.error("Auto update failed", error); showUpdateDialog({ autoUpdatable: false, version }); }); + + log.info(`Downloading update ${version}`); + await autoUpdater.downloadUpdate(); }; /** - * Return the version of the desktop app + * Return the version of the desktop app. * * The return value is of the form `v1.2.3`. */ diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts index 293a720f01..4b1f748fe5 100644 --- a/desktop/src/main/services/dir.ts +++ b/desktop/src/main/services/dir.ts @@ -1,7 +1,5 @@ import { shell } from "electron/common"; import { app, dialog } from "electron/main"; -import { existsSync } from "fs"; -import fs from "node:fs/promises"; import path from "node:path"; import { posixPath } from "../utils/electron"; @@ -53,37 +51,19 @@ export const openLogDirectory = () => openDirectory(logDirectoryPath()); * "userData" directory. This is the **primary** place applications are meant to * store user's data, e.g. various configuration files and saved state. * - * During development, our app name is "Electron", so this'd be, for example, - * `~/Library/Application Support/Electron` if we run using `yarn dev`. For the - * packaged production app, our app name is "ente", so this would be: - * - * - Windows: `%APPDATA%\ente`, e.g. `C:\Users\\AppData\Local\ente` - * - Linux: `~/.config/ente` - * - macOS: `~/Library/Application Support/ente` - * * Note that Chromium also stores the browser state, e.g. localStorage or disk * caches, in userData. * + * https://www.electronjs.org/docs/latest/api/app + * + * [Note: App log path] + * * Finally, there is the "logs" directory. This is not within "appData" but has * a slightly different OS specific path. Since our log file is named * "ente.log", it can be found at: * * - macOS: ~/Library/Logs/ente/ente.log (production) - * - macOS: ~/Library/Logs/Electron/ente.log (dev) - * - * https://www.electronjs.org/docs/latest/api/app + * - Linux: ~/.config/ente/logs/ente.log + * - Windows: %USERPROFILE%\AppData\Roaming\ente\logs\ente.log */ const logDirectoryPath = () => app.getPath("logs"); - -/** - * See: [Note: Legacy face crops] - */ -export const legacyFaceCrop = async ( - faceID: string, -): Promise => { - // See: [Note: Getting the cache path] - // @ts-expect-error "cache" works but is not part of the public API. - const cacheDir = path.join(app.getPath("cache"), "ente"); - const filePath = path.join(cacheDir, "face-crops", faceID); - return existsSync(filePath) ? await fs.readFile(filePath) : undefined; -}; diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index 0a5c4eed2c..4803fd6f0c 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,11 +1,10 @@ import pathToFfmpeg from "ffmpeg-static"; import fs from "node:fs/promises"; import type { ZipItem } from "../../types/ipc"; -import log from "../log"; -import { ensure, withTimeout } from "../utils/common"; +import { ensure } from "../utils/common"; import { execAsync } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -46,13 +45,7 @@ export const ffmpegExec = async ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ): Promise => { - // TODO (MR): This currently copies files for both input (when - // dataOrPathOrZipItem is data) and output. This needs to be tested - // extremely large video files when invoked downstream of `convertToMP4` in - // the web code. - const { path: inputFilePath, isFileTemporary: isInputFileTemporary, @@ -69,17 +62,13 @@ export const ffmpegExec = async ( outputFilePath, ); - if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000); - else await execAsync(cmd); + await execAsync(cmd); return fs.readFile(outputFilePath); } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -112,3 +101,32 @@ const ffmpegBinaryPath = () => { // https://github.com/eugeneware/ffmpeg-static/issues/16 return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked"); }; + +/** + * A variant of {@link ffmpegExec} adapted to work with streams so that it can + * handle the MP4 conversion of large video files. + * + * See: [Note: Convert to MP4] + + * @param inputFilePath The path to a file on the user's local file system. This + * is the video we want to convert. + * @param inputFilePath The path to a file on the user's local file system where + * we should write the converted MP4 video. + */ +export const ffmpegConvertToMP4 = async ( + inputFilePath: string, + outputFilePath: string, +): Promise => { + const command = [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ]; + + const cmd = substitutePlaceholders(command, inputFilePath, outputFilePath); + + await execAsync(cmd); +}; diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts index 957fe81200..fca4628b63 100644 --- a/desktop/src/main/services/image.ts +++ b/desktop/src/main/services/image.ts @@ -3,10 +3,9 @@ import fs from "node:fs/promises"; import path from "node:path"; import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; -import log from "../log"; import { execAsync, isDev } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -23,12 +22,8 @@ export const convertToJPEG = async (imageData: Uint8Array) => { await execAsync(command); return new Uint8Array(await fs.readFile(outputFilePath)); } finally { - try { - await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -49,6 +44,9 @@ const convertToJPEGCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), inputFilePath, @@ -79,7 +77,7 @@ export const generateImageThumbnail = async ( const outputFilePath = await makeTempFilePath("jpeg"); - // Construct the command first, it may throw `NotAvailable` on win32. + // Construct the command first, it may throw `NotAvailable`. let quality = 70; let command = generateImageThumbnailCommand( inputFilePath, @@ -105,12 +103,9 @@ export const generateImageThumbnail = async ( } while (thumbnail.length > maxSize && quality > 50); return thumbnail; } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -138,14 +133,17 @@ const generateImageThumbnailCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), - inputFilePath, - "-auto-orient", "-define", `jpeg:size=${2 * maxDimension}x${2 * maxDimension}`, + inputFilePath, + "-auto-orient", "-thumbnail", - `${maxDimension}x${maxDimension}>`, + `${maxDimension}x${maxDimension}`, "-unsharp", "0x.5", "-quality", diff --git a/desktop/src/main/services/logout.ts b/desktop/src/main/services/logout.ts new file mode 100644 index 0000000000..e6cb7666ca --- /dev/null +++ b/desktop/src/main/services/logout.ts @@ -0,0 +1,30 @@ +import type { FSWatcher } from "chokidar"; +import log from "../log"; +import { clearConvertToMP4Results } from "../stream"; +import { clearStores } from "./store"; +import { watchReset } from "./watch"; + +/** + * Perform the native side logout sequence. + * + * This function is guaranteed not to throw any errors. + * + * See: [Note: Do not throw during logout]. + */ +export const logout = (watcher: FSWatcher) => { + try { + watchReset(watcher); + } catch (e) { + log.error("Ignoring error during logout (FS watch)", e); + } + try { + clearConvertToMP4Results(); + } catch (e) { + log.error("Ignoring error during logout (convert-to-mp4)", e); + } + try { + clearStores(); + } catch (e) { + log.error("Ignoring error during logout (native stores)", e); + } +}; diff --git a/desktop/src/main/services/ml-clip.ts b/desktop/src/main/services/ml-clip.ts index e3dd99204a..cea1574e0d 100644 --- a/desktop/src/main/services/ml-clip.ts +++ b/desktop/src/main/services/ml-clip.ts @@ -11,7 +11,7 @@ import * as ort from "onnxruntime-node"; import Tokenizer from "../../thirdparty/clip-bpe-ts/mod"; import log from "../log"; import { writeStream } from "../stream"; -import { ensure } from "../utils/common"; +import { ensure, wait } from "../utils/common"; import { deleteTempFile, makeTempFilePath } from "../utils/temp"; import { makeCachedInferenceSession } from "./ml"; @@ -20,7 +20,7 @@ const cachedCLIPImageSession = makeCachedInferenceSession( 351468764 /* 335.2 MB */, ); -export const clipImageEmbedding = async (jpegImageData: Uint8Array) => { +export const computeCLIPImageEmbedding = async (jpegImageData: Uint8Array) => { const tempFilePath = await makeTempFilePath(); const imageStream = new Response(jpegImageData.buffer).body; await writeStream(tempFilePath, ensure(imageStream)); @@ -42,7 +42,7 @@ const clipImageEmbedding_ = async (jpegFilePath: string) => { const results = await session.run(feeds); log.debug( () => - `onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, + `ONNX/CLIP image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); /* Need these model specific casts to type the result */ const imageEmbedding = ensure(results.output).data as Float32Array; @@ -140,21 +140,23 @@ const getTokenizer = () => { return _tokenizer; }; -export const clipTextEmbeddingIfAvailable = async (text: string) => { - const sessionOrStatus = await Promise.race([ +export const computeCLIPTextEmbeddingIfAvailable = async (text: string) => { + const sessionOrSkip = await Promise.race([ cachedCLIPTextSession(), - "downloading-model", + // Wait for a tick to get the session promise to resolved the first time + // this code runs on each app start (and the model has been downloaded). + wait(0).then(() => 1), ]); - // Don't wait for the download to complete - if (typeof sessionOrStatus == "string") { + // Don't wait for the download to complete. + if (typeof sessionOrSkip == "number") { log.info( "Ignoring CLIP text embedding request because model download is pending", ); return undefined; } - const session = sessionOrStatus; + const session = sessionOrSkip; const t1 = Date.now(); const tokenizer = getTokenizer(); const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text)); @@ -165,7 +167,7 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => { const results = await session.run(feeds); log.debug( () => - `onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, + `ONNX/CLIP text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); const textEmbedding = ensure(results.output).data as Float32Array; return normalizeEmbedding(textEmbedding); diff --git a/desktop/src/main/services/ml-face.ts b/desktop/src/main/services/ml-face.ts index 9765252555..b6fb5c90f3 100644 --- a/desktop/src/main/services/ml-face.ts +++ b/desktop/src/main/services/ml-face.ts @@ -23,7 +23,7 @@ export const detectFaces = async (input: Float32Array) => { input: new ort.Tensor("float32", input, [1, 3, 640, 640]), }; const results = await session.run(feeds); - log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`); + log.debug(() => `ONNX/YOLO face detection took ${Date.now() - t} ms`); return ensure(results.output).data; }; @@ -32,7 +32,7 @@ const cachedFaceEmbeddingSession = makeCachedInferenceSession( 5286998 /* 5 MB */, ); -export const faceEmbedding = async (input: Float32Array) => { +export const computeFaceEmbeddings = async (input: Float32Array) => { // Dimension of each face (alias) const mobileFaceNetFaceSize = 112; // Smaller alias @@ -45,7 +45,7 @@ export const faceEmbedding = async (input: Float32Array) => { const t = Date.now(); const feeds = { img_inputs: inputTensor }; const results = await session.run(feeds); - log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`); + log.debug(() => `ONNX/MFNT face embedding took ${Date.now() - t} ms`); /* Need these model specific casts to extract and type the result */ return (results.embeddings as unknown as Record) .cpuData as Float32Array; diff --git a/desktop/src/main/services/store.ts b/desktop/src/main/services/store.ts index 471928d76c..253c2cbf0c 100644 --- a/desktop/src/main/services/store.ts +++ b/desktop/src/main/services/store.ts @@ -18,10 +18,7 @@ export const clearStores = () => { * [Note: Safe storage keys] * * On macOS, `safeStorage` stores our data under a Keychain entry named - * " Safe Storage". Which resolves to: - * - * - Electron Safe Storage (dev) - * - ente Safe Storage (prod) + * " Safe Storage". In our case, "ente Safe Storage". */ export const saveEncryptionKey = (encryptionKey: string) => { const encryptedKey = safeStorage.encryptString(encryptionKey); diff --git a/desktop/src/main/services/upload.ts b/desktop/src/main/services/upload.ts index f7d0436c0b..516fbe6dde 100644 --- a/desktop/src/main/services/upload.ts +++ b/desktop/src/main/services/upload.ts @@ -3,6 +3,7 @@ import fs from "node:fs/promises"; import path from "node:path"; import { existsSync } from "original-fs"; import type { PendingUploads, ZipItem } from "../../types/ipc"; +import log from "../log"; import { uploadStatusStore } from "../stores/upload-status"; export const listZipItems = async (zipPath: string): Promise => { @@ -64,11 +65,16 @@ export const pendingUploads = async (): Promise => { // file, but the dedup logic will kick in at that point so no harm will come // of it. if (allZipItems === undefined) { - const allZipPaths = uploadStatusStore.get("filePaths") ?? []; + const allZipPaths = uploadStatusStore.get("zipPaths") ?? []; const zipPaths = allZipPaths.filter((f) => existsSync(f)); zipItems = []; - for (const zip of zipPaths) - zipItems = zipItems.concat(await listZipItems(zip)); + for (const zip of zipPaths) { + try { + zipItems = zipItems.concat(await listZipItems(zip)); + } catch (e) { + log.error("Ignoring items in malformed zip", e); + } + } } else { zipItems = allZipItems.filter(([z]) => existsSync(z)); } diff --git a/desktop/src/main/services/watch.ts b/desktop/src/main/services/watch.ts index de66dcca1c..e9629ff703 100644 --- a/desktop/src/main/services/watch.ts +++ b/desktop/src/main/services/watch.ts @@ -151,6 +151,15 @@ export const watchFindFiles = async (dirPath: string) => { return paths; }; +/** + * Stop watching all existing folder watches and remove any callbacks. + * + * This function is meant to be called when the user logs out. It stops + * all existing folder watches and forgets about any "on*" callback + * functions that have been registered. + * + * The persisted state itself gets cleared via {@link clearStores}. + */ export const watchReset = (watcher: FSWatcher) => { watcher.unwatch(folderWatches().map((watch) => watch.folderPath)); }; diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index bae13aa121..c11fb1121c 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -3,13 +3,20 @@ */ import { net, protocol } from "electron/main"; import StreamZip from "node-stream-zip"; +import { randomUUID } from "node:crypto"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; import { ReadableStream } from "node:stream/web"; import { pathToFileURL } from "node:url"; import log from "./log"; +import { ffmpegConvertToMP4 } from "./services/ffmpeg"; import { ensure } from "./utils/common"; +import { + deleteTempFile, + deleteTempFileIgnoringErrors, + makeTempFilePath, +} from "./utils/temp"; /** * Register a protocol handler that we use for streaming large files between the @@ -34,119 +41,119 @@ import { ensure } from "./utils/common"; * Depends on {@link registerPrivilegedSchemes}. */ export const registerStreamProtocol = () => { - protocol.handle("stream", async (request: Request) => { - const url = request.url; - // The request URL contains the command to run as the host, and the - // pathname of the file(s) as the search params. - const { host, searchParams } = new URL(url); - switch (host) { - case "read": - return handleRead(ensure(searchParams.get("path"))); - case "read-zip": - return handleReadZip( - ensure(searchParams.get("zipPath")), - ensure(searchParams.get("entryName")), - ); - case "write": - return handleWrite(ensure(searchParams.get("path")), request); - default: - return new Response("", { status: 404 }); + protocol.handle("stream", (request: Request) => { + try { + return handleStreamRequest(request); + } catch (e) { + log.error(`Failed to handle stream request for ${request.url}`, e); + return new Response(String(e), { status: 500 }); } }); }; -const handleRead = async (path: string) => { - try { - const res = await net.fetch(pathToFileURL(path).toString()); - if (res.ok) { - // net.fetch already seems to add "Content-Type" and "Last-Modified" - // headers, but I couldn't find documentation for this. In any case, - // since we already are stat-ting the file for the "Content-Length", - // we explicitly add the "X-Last-Modified-Ms" too, - // - // 1. Guaranteeing its presence, - // - // 2. Having it be in the exact format we want (no string <-> date - // conversions), - // - // 3. Retaining milliseconds. +const handleStreamRequest = async (request: Request): Promise => { + const url = request.url; + // The request URL contains the command to run as the host, and the + // pathname of the file(s) as the search params. + const { host, searchParams } = new URL(url); + switch (host) { + case "read": + return handleRead(ensure(searchParams.get("path"))); - const stat = await fs.stat(path); + case "read-zip": + return handleReadZip( + ensure(searchParams.get("zipPath")), + ensure(searchParams.get("entryName")), + ); - // Add the file's size as the Content-Length header. - const fileSize = stat.size; - res.headers.set("Content-Length", `${fileSize}`); + case "write": + return handleWrite(ensure(searchParams.get("path")), request); - // Add the file's last modified time (as epoch milliseconds). - const mtimeMs = stat.mtimeMs; - res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + case "convert-to-mp4": { + const token = searchParams.get("token"); + const done = searchParams.get("done") !== null; + return token + ? done + ? handleConvertToMP4ReadDone(token) + : handleConvertToMP4Read(token) + : handleConvertToMP4Write(request); } - return res; - } catch (e) { - log.error(`Failed to read stream at ${path}`, e); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); + + default: + return new Response("", { status: 404 }); } }; +const handleRead = async (path: string) => { + const res = await net.fetch(pathToFileURL(path).toString()); + if (res.ok) { + // net.fetch already seems to add "Content-Type" and "Last-Modified" + // headers, but I couldn't find documentation for this. In any case, + // since we already are stat-ting the file for the "Content-Length", we + // explicitly add the "X-Last-Modified-Ms" too, + // + // 1. Guaranteeing its presence, + // + // 2. Having it be in the exact format we want (no string <-> date + // conversions), + // + // 3. Retaining milliseconds. + + const stat = await fs.stat(path); + + // Add the file's size as the Content-Length header. + const fileSize = stat.size; + res.headers.set("Content-Length", `${fileSize}`); + + // Add the file's last modified time (as epoch milliseconds). + const mtimeMs = stat.mtime.getTime(); + res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + } + return res; +}; + const handleReadZip = async (zipPath: string, entryName: string) => { - try { - const zip = new StreamZip.async({ file: zipPath }); - const entry = await zip.entry(entryName); - if (!entry) return new Response("", { status: 404 }); + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) return new Response("", { status: 404 }); - // This returns an "old style" NodeJS.ReadableStream. - const stream = await zip.stream(entry); - // Convert it into a new style NodeJS.Readable. - const nodeReadable = new Readable().wrap(stream); - // Then convert it into a Web stream. - const webReadableStreamAny = Readable.toWeb(nodeReadable); - // However, we get a ReadableStream now. This doesn't go into the - // `BodyInit` expected by the Response constructor, which wants a - // ReadableStream. Force a cast. - const webReadableStream = - webReadableStreamAny as ReadableStream; + // This returns an "old style" NodeJS.ReadableStream. + const stream = await zip.stream(entry); + // Convert it into a new style NodeJS.Readable. + const nodeReadable = new Readable().wrap(stream); + // Then convert it into a Web stream. + const webReadableStreamAny = Readable.toWeb(nodeReadable); + // However, we get a ReadableStream now. This doesn't go into the + // `BodyInit` expected by the Response constructor, which wants a + // ReadableStream. Force a cast. + const webReadableStream = + webReadableStreamAny as ReadableStream; - // Close the zip handle when the underlying stream closes. - stream.on("end", () => void zip.close()); + // Close the zip handle when the underlying stream closes. + stream.on("end", () => void zip.close()); - return new Response(webReadableStream, { - headers: { - // We don't know the exact type, but it doesn't really matter, - // just set it to a generic binary content-type so that the - // browser doesn't tinker with it thinking of it as text. - "Content-Type": "application/octet-stream", - "Content-Length": `${entry.size}`, - // While it is documented that entry.time is the modification - // time, the units are not mentioned. By seeing the source code, - // we can verify that it is indeed epoch milliseconds. See - // `parseZipTime` in the node-stream-zip source, - // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js - "X-Last-Modified-Ms": `${entry.time}`, - }, - }); - } catch (e) { - log.error( - `Failed to read entry ${entryName} from zip file at ${zipPath}`, - e, - ); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); - } + // While it is documented that entry.time is the modification time, + // the units are not mentioned. By seeing the source code, we can + // verify that it is indeed epoch milliseconds. See `parseZipTime` + // in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + const modifiedMs = entry.time; + + return new Response(webReadableStream, { + headers: { + // We don't know the exact type, but it doesn't really matter, just + // set it to a generic binary content-type so that the browser + // doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + "X-Last-Modified-Ms": `${modifiedMs}`, + }, + }); }; const handleWrite = async (path: string, request: Request) => { - try { - await writeStream(path, ensure(request.body)); - return new Response("", { status: 200 }); - } catch (e) { - log.error(`Failed to write stream to ${path}`, e); - return new Response(`Failed to write stream: ${String(e)}`, { - status: 500, - }); - } + await writeStream(path, ensure(request.body)); + return new Response("", { status: 200 }); }; /** @@ -154,7 +161,7 @@ const handleWrite = async (path: string, request: Request) => { * * The returned promise resolves when the write completes. * - * @param filePath The local filesystem path where the file should be written. + * @param filePath The local file system path where the file should be written. * * @param readableStream A web * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream). @@ -181,3 +188,84 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => { }); }); }; + +/** + * A map from token to file paths for convert-to-mp4 requests that we have + * received. + */ +const convertToMP4Results = new Map(); + +/** + * Clear any in-memory state for in-flight convert-to-mp4 requests. Meant to be + * called during logout. + */ +export const clearConvertToMP4Results = () => convertToMP4Results.clear(); + +/** + * [Note: Convert to MP4] + * + * When we want to convert a video to MP4, if we were to send the entire + * contents of the video from the renderer to the main process over IPC, it just + * causes the renderer to run out of memory and restart when the videos are very + * large. So we need to stream the original video renderer → main and then + * stream back the converted video renderer ← main. + * + * Currently Chromium does not support bi-directional streaming ("full" duplex + * mode for the Web fetch API). So we need to simulate that using two different + * streaming requests. + * + * renderer → main stream://convert-to-mp4 + * → request.body is the original video + * ← response is a token + * + * renderer → main stream://convert-to-mp4?token= + * ← response.body is the converted video + * + * renderer → main stream://convert-to-mp4?token=&done + * ← 200 OK + * + * Note that the conversion itself is not streaming. The conversion still + * happens in a single shot, we are just streaming the data across the IPC + * boundary to allow us to pass large amounts of data without running out of + * memory. + * + * See also: [Note: IPC streams] + */ +const handleConvertToMP4Write = async (request: Request) => { + const inputTempFilePath = await makeTempFilePath(); + await writeStream(inputTempFilePath, ensure(request.body)); + + const outputTempFilePath = await makeTempFilePath("mp4"); + try { + await ffmpegConvertToMP4(inputTempFilePath, outputTempFilePath); + } catch (e) { + log.error("Conversion to MP4 failed", e); + await deleteTempFileIgnoringErrors(outputTempFilePath); + throw e; + } finally { + await deleteTempFileIgnoringErrors(inputTempFilePath); + } + + const token = randomUUID(); + convertToMP4Results.set(token, outputTempFilePath); + return new Response(token, { status: 200 }); +}; + +const handleConvertToMP4Read = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + return net.fetch(pathToFileURL(filePath).toString()); +}; + +const handleConvertToMP4ReadDone = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + await deleteTempFile(filePath); + + convertToMP4Results.delete(token); + return new Response("", { status: 200 }); +}; diff --git a/desktop/src/main/utils/common.ts b/desktop/src/main/utils/common.ts index 1f5016e617..929281d740 100644 --- a/desktop/src/main/utils/common.ts +++ b/desktop/src/main/utils/common.ts @@ -22,23 +22,3 @@ export const ensure = (v: T | null | undefined): T => { */ export const wait = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); - -/** - * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it - * does not resolve within {@link timeoutMS}, then reject with a timeout error. - */ -export const withTimeout = async (promise: Promise, ms: number) => { - let timeoutId: ReturnType; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutId = setTimeout( - () => reject(new Error("Operation timed out")), - ms, - ); - }); - const promiseAndCancelTimeout = async () => { - const result = await promise; - clearTimeout(timeoutId); - return result; - }; - return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); -}; diff --git a/desktop/src/main/utils/electron.ts b/desktop/src/main/utils/electron.ts index 93e8565ef2..c11391dd65 100644 --- a/desktop/src/main/utils/electron.ts +++ b/desktop/src/main/utils/electron.ts @@ -49,15 +49,13 @@ export const posixPath = (platformPath: string) => * > output, this might not be the best option and it might be better to use the * > underlying functions. */ -export const execAsync = (command: string | string[]) => { +export const execAsync = async (command: string | string[]) => { const escapedCommand = Array.isArray(command) ? shellescape(command) : command; const startTime = Date.now(); - const result = execAsync_(escapedCommand); - log.debug( - () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`, - ); + const result = await execAsync_(escapedCommand); + log.debug(() => `${escapedCommand} (${Date.now() - startTime} ms)`); return result; }; diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts index 11f7a5d845..70dec844d6 100644 --- a/desktop/src/main/utils/temp.ts +++ b/desktop/src/main/utils/temp.ts @@ -4,6 +4,7 @@ import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import path from "node:path"; import type { ZipItem } from "../../types/ipc"; +import log from "../log"; import { ensure } from "./common"; /** @@ -62,6 +63,19 @@ export const deleteTempFile = async (tempFilePath: string) => { await fs.rm(tempFilePath, { force: true }); }; +/** + * A variant of {@link deleteTempFile} that supresses any errors, making it + * safe to call them in a sequence without needing to handle the scenario where + * one of them failing causes the rest to be skipped. + */ +export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => { + try { + await deleteTempFile(tempFilePath); + } catch (e) { + log.error(`Could not delete temporary file at path ${tempFilePath}`, e); + } +}; + /** The result of {@link makeFileForDataOrPathOrZipItem}. */ interface FileForDataOrPathOrZipItem { /** diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index f9147e2883..85475031d3 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -63,7 +63,10 @@ const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory"); const selectDirectory = () => ipcRenderer.invoke("selectDirectory"); -const clearStores = () => ipcRenderer.send("clearStores"); +const logout = () => { + watchRemoveListeners(); + return ipcRenderer.invoke("logout"); +}; const encryptionKey = () => ipcRenderer.invoke("encryptionKey"); @@ -140,32 +143,27 @@ const ffmpegExec = ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ) => ipcRenderer.invoke( "ffmpegExec", command, dataOrPathOrZipItem, outputFileExtension, - timeoutMS, ); // - ML -const clipImageEmbedding = (jpegImageData: Uint8Array) => - ipcRenderer.invoke("clipImageEmbedding", jpegImageData); +const computeCLIPImageEmbedding = (jpegImageData: Uint8Array) => + ipcRenderer.invoke("computeCLIPImageEmbedding", jpegImageData); -const clipTextEmbeddingIfAvailable = (text: string) => - ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text); +const computeCLIPTextEmbeddingIfAvailable = (text: string) => + ipcRenderer.invoke("computeCLIPTextEmbeddingIfAvailable", text); const detectFaces = (input: Float32Array) => ipcRenderer.invoke("detectFaces", input); -const faceEmbedding = (input: Float32Array) => - ipcRenderer.invoke("faceEmbedding", input); - -const legacyFaceCrop = (faceID: string) => - ipcRenderer.invoke("legacyFaceCrop", faceID); +const computeFaceEmbeddings = (input: Float32Array) => + ipcRenderer.invoke("computeFaceEmbeddings", input); // - Watch @@ -211,11 +209,10 @@ const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => { const watchFindFiles = (folderPath: string) => ipcRenderer.invoke("watchFindFiles", folderPath); -const watchReset = async () => { +const watchRemoveListeners = () => { ipcRenderer.removeAllListeners("watchAddFile"); ipcRenderer.removeAllListeners("watchRemoveFile"); ipcRenderer.removeAllListeners("watchRemoveDir"); - await ipcRenderer.invoke("watchReset"); }; // - Upload @@ -307,7 +304,7 @@ contextBridge.exposeInMainWorld("electron", { openDirectory, openLogDirectory, selectDirectory, - clearStores, + logout, encryptionKey, saveEncryptionKey, onMainWindowFocus, @@ -340,11 +337,10 @@ contextBridge.exposeInMainWorld("electron", { // - ML - clipImageEmbedding, - clipTextEmbeddingIfAvailable, + computeCLIPImageEmbedding, + computeCLIPTextEmbeddingIfAvailable, detectFaces, - faceEmbedding, - legacyFaceCrop, + computeFaceEmbeddings, // - Watch @@ -358,7 +354,6 @@ contextBridge.exposeInMainWorld("electron", { onRemoveFile: watchOnRemoveFile, onRemoveDir: watchOnRemoveDir, findFiles: watchFindFiles, - reset: watchReset, }, // - Upload diff --git a/desktop/yarn.lock b/desktop/yarn.lock index 833b623a7e..2aa060efc0 100644 --- a/desktop/yarn.lock +++ b/desktop/yarn.lock @@ -7,29 +7,6 @@ resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d" integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A== -"@babel/code-frame@^7.0.0": - version "7.24.2" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae" - integrity sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ== - dependencies: - "@babel/highlight" "^7.24.2" - picocolors "^1.0.0" - -"@babel/helper-validator-identifier@^7.24.5": - version "7.24.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62" - integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA== - -"@babel/highlight@^7.24.2": - version "7.24.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e" - integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw== - dependencies: - "@babel/helper-validator-identifier" "^7.24.5" - chalk "^2.4.2" - js-tokens "^4.0.0" - picocolors "^1.0.0" - "@babel/runtime@^7.21.0": version "7.24.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" @@ -339,9 +316,9 @@ integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g== "@types/node@*", "@types/node@^20.9.0": - version "20.12.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.7.tgz#04080362fa3dd6c5822061aa3124f5c152cff384" - integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg== + version "20.12.12" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.12.tgz#7cbecdf902085cec634fdb362172dfe12b8f2050" + integrity sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw== dependencies: undici-types "~5.26.4" @@ -350,11 +327,6 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b" integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw== -"@types/normalize-package-data@^2.4.0": - version "2.4.4" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901" - integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA== - "@types/plist@^3.0.1": version "3.0.5" resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.5.tgz#9a0c49c0f9886c8c8696a7904dd703f6284036e0" @@ -557,13 +529,6 @@ ansi-regex@^5.0.1: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" @@ -641,13 +606,6 @@ are-we-there-yet@^3.0.0: delegates "^1.0.0" readable-stream "^3.6.0" -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - argparse@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" @@ -875,15 +833,6 @@ caseless@^0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== -chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" @@ -973,13 +922,6 @@ clone@^1.0.2: resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" @@ -987,11 +929,6 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" @@ -1259,11 +1196,6 @@ dotenv-expand@^5.1.0: resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== -dotenv@^8.2.0: - version "8.6.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" - integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== - dotenv@^9.0.2: version "9.0.2" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05" @@ -1276,16 +1208,6 @@ ejs@^3.1.8: dependencies: jake "^10.8.5" -electron-builder-notarize@^1.5: - version "1.5.2" - resolved "https://registry.yarnpkg.com/electron-builder-notarize/-/electron-builder-notarize-1.5.2.tgz#540185b57a336fc6eec01bfe092a3b4764459255" - integrity sha512-vo6RGgIFYxMk2yp59N4NsvmAYfB7ncYi6gV9Fcq2TVKxEn2tPXrSjIKB2e/pu+5iXIY6BHNZNXa75F3DHgOOLA== - dependencies: - dotenv "^8.2.0" - electron-notarize "^1.1.1" - js-yaml "^3.14.0" - read-pkg-up "^7.0.0" - electron-builder@25.0.0-alpha.6: version "25.0.0-alpha.6" resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-25.0.0-alpha.6.tgz#a72f96f7029539ac28f92ce5c83f872ba3b6e7c1" @@ -1308,14 +1230,6 @@ electron-log@^5.1: resolved "https://registry.yarnpkg.com/electron-log/-/electron-log-5.1.2.tgz#fb40ad7f4ae694dd0e4c02c662d1a65c03e1243e" integrity sha512-Cpg4hAZ27yM9wzE77c4TvgzxzavZ+dVltCczParXN+Vb3jocojCSAuSMCVOI9fhFuuOR+iuu3tZLX1cu0y0kgQ== -electron-notarize@^1.1.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/electron-notarize/-/electron-notarize-1.2.2.tgz#ebf2b258e8e08c1c9f8ff61dc53d5b16b439daf4" - integrity sha512-ZStVWYcWI7g87/PgjPJSIIhwQXOaw4/XeXU+pWqMMktSLHaGMLHdyPPN7Cmao7+Cr7fYufA16npdtMndYciHNw== - dependencies: - debug "^4.1.1" - fs-extra "^9.0.1" - electron-publish@25.0.0-alpha.6: version "25.0.0-alpha.6" resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-25.0.0-alpha.6.tgz#8af3cb6e2435c00b8c71de43c330483808df5924" @@ -1352,9 +1266,9 @@ electron-updater@^6.1: tiny-typed-emitter "^2.1.0" electron@^30: - version "30.0.2" - resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733" - integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ== + version "30.0.6" + resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.6.tgz#9ddea5f68396ecca88ad7c2c466a30fc9c16144b" + integrity sha512-PkhEPFdpYcTzjAO3gMHZ+map7g2+xCrMDedo/L1i0ir2BRXvAB93IkTJX497U6Srb/09r2cFt+k20VPNVCdw3Q== dependencies: "@electron/get" "^2.0.0" "@types/node" "^20.9.0" @@ -1389,13 +1303,6 @@ err-code@^2.0.2: resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - es-define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" @@ -1418,11 +1325,6 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -1494,11 +1396,6 @@ espree@^9.6.0, espree@^9.6.1: acorn-jsx "^5.3.2" eslint-visitor-keys "^3.4.1" -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - esquery@^1.4.2: version "1.5.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" @@ -1622,14 +1519,6 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" -find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - find-up@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" @@ -1907,11 +1796,6 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - has-flag@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" @@ -1946,11 +1830,6 @@ hasown@^2.0.0: dependencies: function-bind "^1.1.2" -hosted-git-info@^2.1.4: - version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" - integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== - hosted-git-info@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" @@ -2081,11 +1960,6 @@ ip-address@^9.0.5: jsbn "1.1.0" sprintf-js "^1.1.3" -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -2198,19 +2072,6 @@ jpeg-js@^0.4: resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.4.tgz#a9f1c6f1f9f0fa80cdb3484ed9635054d28936aa" integrity sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg== -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.14.0: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" @@ -2228,11 +2089,6 @@ json-buffer@3.0.1: resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" @@ -2299,11 +2155,6 @@ levn@^0.4.1: prelude-ls "^1.2.1" type-check "~0.4.0" -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" @@ -2312,13 +2163,6 @@ locate-path@^3.0.0: p-locate "^3.0.0" path-exists "^3.0.0" -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - locate-path@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" @@ -2643,16 +2487,6 @@ nopt@^6.0.0: dependencies: abbrev "^1.0.0" -normalize-package-data@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -2737,7 +2571,7 @@ p-cancelable@^2.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== @@ -2758,13 +2592,6 @@ p-locate@^3.0.0: dependencies: p-limit "^2.0.0" -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - p-locate@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" @@ -2796,16 +2623,6 @@ parse-cache-control@^1.0.1: resolved "https://registry.yarnpkg.com/parse-cache-control/-/parse-cache-control-1.0.1.tgz#8eeab3e54fa56920fe16ba38f77fa21aacc2d74e" integrity sha512-60zvsJReQPX5/QP0Kzfd/VrpjScIQ7SHBW6bFCYfEP+fp0Eppr1SHhIO5nd1PjZtvclzSzES9D/p5nFJurwfWg== -parse-json@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -2849,11 +2666,6 @@ pend@~1.2.0: resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -2958,25 +2770,6 @@ read-config-file@6.3.2: json5 "^2.2.0" lazy-val "^1.0.4" -read-pkg-up@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" - integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== - dependencies: - find-up "^4.1.0" - read-pkg "^5.2.0" - type-fest "^0.8.1" - -read-pkg@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" - integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== - dependencies: - "@types/normalize-package-data" "^2.4.0" - normalize-package-data "^2.5.0" - parse-json "^5.0.0" - type-fest "^0.6.0" - readable-stream@^3.0.2, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" @@ -3025,7 +2818,7 @@ resolve-from@^4.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== -resolve@^1.1.6, resolve@^1.10.0: +resolve@^1.1.6: version "1.22.8" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== @@ -3126,17 +2919,17 @@ semver-compare@^1.0.0: resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow== -"semver@2 || 3 || 4 || 5": - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== - semver@^6.2.0: version "6.3.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: +semver@^7.3.2: + version "7.6.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" + integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== + +semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== @@ -3284,42 +3077,11 @@ spawn-command@0.0.2: resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e" integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ== -spdx-correct@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" - integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66" - integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.17" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c" - integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg== - sprintf-js@^1.1.2, sprintf-js@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - ssri@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057" @@ -3367,13 +3129,6 @@ sumchecker@^3.0.1: dependencies: debug "^4.1.0" -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -3501,16 +3256,6 @@ type-fest@^0.20.2: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== -type-fest@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" - integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== - -type-fest@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" - integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== - type-fest@^2.17.0: version "2.19.0" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" @@ -3577,14 +3322,6 @@ util-deprecate@^1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - verror@^1.10.0: version "1.10.1" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.1.tgz#4bf09eeccf4563b109ed4b3d458380c972b0cdeb" diff --git a/docs/docs/.vitepress/sidebar.ts b/docs/docs/.vitepress/sidebar.ts index 6af9e3556a..84ae5e0fa8 100644 --- a/docs/docs/.vitepress/sidebar.ts +++ b/docs/docs/.vitepress/sidebar.ts @@ -123,6 +123,10 @@ export const sidebar = [ text: "Troubleshooting", collapsed: true, items: [ + { + text: "Desktop install", + link: "/photos/troubleshooting/desktop-install/", + }, { text: "Files not uploading", link: "/photos/troubleshooting/files-not-uploading", @@ -197,6 +201,10 @@ export const sidebar = [ text: "System requirements", link: "/self-hosting/guides/system-requirements", }, + { + text: "Configuring S3", + link: "/self-hosting/guides/configuring-s3", + }, { text: "Using external S3", link: "/self-hosting/guides/external-s3", diff --git a/docs/docs/photos/features/cast/index.md b/docs/docs/photos/features/cast/index.md index 89dc801f6d..ecd91cb7ce 100644 --- a/docs/docs/photos/features/cast/index.md +++ b/docs/docs/photos/features/cast/index.md @@ -1,19 +1,13 @@ --- -title: Archive -description: | - Archiving photos and albums in Ente Photos to remove them from your home - timeline +title: Cast +description: + Casting your photos on to a large screen or a TV or a Chromecast device --- -> [!CAUTION] -> -> This is preview documentation for an upcoming feature. This feature has not -> yet been released yet, so the steps below will not work currently. - # Cast With Ente Cast, you can play a slideshow of your favourite albums on your Google -Chromecast TVs or other Internet-connected large screen devices. +Chromecast TVs or any other internet-connected large screen devices. ## Get Started diff --git a/docs/docs/photos/troubleshooting/desktop-install/index.md b/docs/docs/photos/troubleshooting/desktop-install/index.md new file mode 100644 index 0000000000..7410c7818e --- /dev/null +++ b/docs/docs/photos/troubleshooting/desktop-install/index.md @@ -0,0 +1,75 @@ +--- +title: Desktop installation +description: Troubleshooting issues when installing the Ente Photos desktop app +--- + +# Desktop app installation + +The latest version of the Ente Photos desktop app can be downloaded from +[ente.io/download](https://ente.io/download). If you're having trouble, please +see if any of the following cases apply. + +## Windows + +If the app stops with an "A JavaScript error occurred in the main process - The +specified module could not be found" error on your Windows machine when you +start it, then you might need to install the VC++ runtime from Microsoft. + +This is what the error looks like: + +![Error when VC++ runtime is not installed](windows-vc.png){width=500px} + +You can install the Microsoft VC++ redistributable runtime from here:
+https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version + +## AppImages on ARM64 Linux + +If you're on an ARM64 machine running Linux, and the AppImages doesn't do +anything when you run it, you will need to run the following command on your +machine: + +```sh +sudo ln -s /usr/lib/aarch64-linux-gnu/libz.so{.1,} +``` + +It is possible that the exact path might be different on your machine. Briefly, +what we need to do is create `libz.so` as an alias for `libz.so.1`. For more +details, see the following upstream issues: + +- libz.so cannot open shared object file on ARM64 - + [AppImage/AppImageKit/issues/1092](https://github.com/AppImage/AppImageKit/issues/1092) + +- libz.so: cannot open shared object file with Ubuntu arm64 - + [electron-userland/electron-builder/issues/7835](https://github.com/electron-userland/electron-builder/issues/7835) + +## AppImage says it requires FUSE + +See +[docs.appimage.org](https://docs.appimage.org/user-guide/troubleshooting/fuse.html#the-appimage-tells-me-it-needs-fuse-to-run). + +tl;dr; for example, on Ubuntu, + +```sh +sudo apt install libfuse2 +``` + +## Linux SUID error + +On some Linux distributions, if you run the AppImage from the CLI, it might fail +with the following error: + +> The SUID sandbox helper binary was found, but is not configured correctly. + +This happens when you try to run the AppImage from the command line. If you +instead double click on the AppImage in your Files browser, then it should start +properly. + +If you do want to run it from the command line, you can do so by passing the +`--no-sandbox` flag when executing the AppImage. e.g. + +```sh +./ente.AppImage --no-sandbox +``` + +For more details, see this upstream issue on +[electron](https://github.com/electron/electron/issues/17972). diff --git a/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png new file mode 100644 index 0000000000..852c037d57 Binary files /dev/null and b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png differ diff --git a/docs/docs/self-hosting/guides/configuring-s3.md b/docs/docs/self-hosting/guides/configuring-s3.md new file mode 100644 index 0000000000..8e823ed2ae --- /dev/null +++ b/docs/docs/self-hosting/guides/configuring-s3.md @@ -0,0 +1,80 @@ +--- +title: Configuring S3 buckets +description: + Configure S3 endpoints to fix upload errors or use your self hosted ente + from outside localhost +--- + +# Configuring S3 + +There are three components involved in uploading: + +1. The client (e.g. the web app or the mobile app) +2. Ente's server (museum) +3. The S3-compatible object storage (e.g. minio in the default starter) + +For the uploads to work, all three of them need to be able to reach each other. +This is because the client uploads directly to the object storage. The +interaction goes something like this: + +1. Client wants to upload, it asks museum where it should upload to. +2. Museum creates pre-signed URLs for the S3 bucket that was configured. +3. Client directly uploads to the S3 buckets these URLs. + +The upshot of this is that _both_ the client and museum should be able to reach +your S3 bucket. + +The URL for the S3 bucket is configured in +[scripts/compose/credentials.yaml](https://github.com/ente-io/ente/blob/main/server/scripts/compose/credentials.yaml#L10). +You can edit this file directly when testing, though it is just simpler and more +robust to create a `museum.yaml` (in the same folder as the Docker compose file) +and put your custom configuration there (in your case, you can put an entire +`s3` config object in your `museum.yaml`). + +> [!TIP] +> +> For more details about these configuration objects, see the documentaion for +> the `s3` object in +> [configurations/local.yaml](https://github.com/ente-io/ente/blob/main/server/configurations/local.yaml). + +By default, you only need to configure the endpoint for the first bucket. + +> [!NOTE] +> +> If you're wondering why there are 3 buckets - that's because our production +> instance uses these to perform replication. +> +> However, in a self hosted setup replication is off by default (you can turn it +> on if you want). When replication is turned off, only the first bucket is +> used, and you can remove the other two if you wish or just ignore them. + +The `endpoint` for the first bucket in the starter `credentials.yaml` is +`localhost:3200`. The way this works then is that both museum (`2`) and minio +(`3`) are running within the same Docker compose cluster, so are able to reach +each other. If at this point we were to run the web app (`1`) on localhost (say +using `yarn dev:photos`), it would also run on localhost and thus would be able +to reach `3`. + +If you were to try and connect from a mobile app, this would not work since +`localhost:3200` would not resolve on your mobile. So you'll need to modify this +endpoint to a value, say `yourserverip:3200`, so that the mobile app can also +reach it. + +The same principle applies if you're deploying to your custom domain. + +> [!NOTE] +> +> If you need to configure SSL, for example if you're running over the internet, +> you'll need to turn off `s3.are_local_buckets` (which disables SSL in the +> default starter compose template). +> +> Disabling `s3.are_local_buckets` also switches to the subdomain style URLs for +> the buckets. However, not all S3 providers support these, in particular, minio +> does not work with these in default configuration. So in such cases you'll +> also need to then enable `s3.use_path_style_urls`. + +To summarize: + +Set the S3 bucket `endpoint` in `credentials.yaml` to a `yourserverip:3200` or +some such IP/hostname that accessible from both where you are running the Ente +clients (e.g. the mobile app) and also from within the Docker compose cluster. diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md index 8e16004a12..110e3dbb88 100644 --- a/docs/docs/self-hosting/guides/custom-server/index.md +++ b/docs/docs/self-hosting/guides/custom-server/index.md @@ -34,7 +34,8 @@ endpoint: api: "http://localhost:8080" ``` -(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) +(Another +[example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) ## Web appps and Photos desktop app @@ -46,5 +47,5 @@ connect to. For example: NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos ``` -For more details, see [hosting the web -app](https://help.ente.io/self-hosting/guides/web-app). +For more details, see +[hosting the web app](https://help.ente.io/self-hosting/guides/web-app). diff --git a/docs/docs/self-hosting/guides/external-s3.md b/docs/docs/self-hosting/guides/external-s3.md index 505ae6fe92..87a48de277 100644 --- a/docs/docs/self-hosting/guides/external-s3.md +++ b/docs/docs/self-hosting/guides/external-s3.md @@ -164,6 +164,27 @@ EOF RUN chmod +x /docker-entrypoint.d/replace_ente_endpoints.sh ``` +This runs nginx inside to handle both the web & album URLs so we don't have to +make two web images with different port. + +- `DOCKER_RUNTIME_REPLACE_ENDPOINT` this is your public museum API URL. +- `DOCKER_RUNTIME_REPLACE_ALBUMS_ENDPOINT` this is the shared albums URL (for + more details about configuring shared albums, see + [faq/sharing](/self-hosting/faq/sharing)). + +Note how above we had updated the `compose.yaml` file for the server with + +```yaml +web: + build: + context: web + ports: + - 8081:80 + - 8082:80 +``` + +so that web and album both point to the same container and nginx will handle it. + ## 2. Set up the `.credentials.env` file Create a `.credentials.env` file at the root of the project with the following diff --git a/docs/docs/self-hosting/guides/index.md b/docs/docs/self-hosting/guides/index.md index a8a64d9605..b8a73d7eb0 100644 --- a/docs/docs/self-hosting/guides/index.md +++ b/docs/docs/self-hosting/guides/index.md @@ -16,5 +16,8 @@ See the sidebar for existing guides. In particular: - For various admin related tasks, e.g. increasing the storage quota on your self hosted instance, see [administering your custom server](admin). -- For self hosting both the server and web app using external S3 buckets for - object storage, see [using external S3](external-s3). +- For configuring your S3 buckets to get the object storage to work from your + mobile device or for fixing an upload errors, see + [configuring S3](configuring-s3). There is also a longer + [community contributed guide](external-s3) for a more self hosted setup of + both the server and web app using external S3 buckets for object storage. diff --git a/docs/docs/self-hosting/guides/web-app.md b/docs/docs/self-hosting/guides/web-app.md index 49dfdd114b..28802c457d 100644 --- a/docs/docs/self-hosting/guides/web-app.md +++ b/docs/docs/self-hosting/guides/web-app.md @@ -1,6 +1,8 @@ --- title: Hosting the web app -description: Building and hosting Ente's web app, connecting it to your self-hosted server +description: + Building and hosting Ente's web app, connecting it to your self-hosted + server --- # Web app diff --git a/docs/docs/self-hosting/troubleshooting/uploads.md b/docs/docs/self-hosting/troubleshooting/uploads.md index 4f7273e943..435a5e93c6 100644 --- a/docs/docs/self-hosting/troubleshooting/uploads.md +++ b/docs/docs/self-hosting/troubleshooting/uploads.md @@ -5,9 +5,9 @@ description: Fixing upload errors when trying to self host Ente # Uploads failing -If uploads to your self-hosted server are failing, make sure that -`credentials.yaml` has `yourserverip:3200` for all three minio locations. +If uploads to your minio are failing, you need to ensure that you've configured +the S3 bucket `endpoint` in `credentials.yaml` (or `museum.yaml`) to, say, +`yourserverip:3200`. This can be any host or port, it just need to be a value +that is reachable from both your client and from museum. -By default it is `localhost:3200`, and it needs to be changed to an IP that is -accessible from both where you are running the Ente clients (e.g. the mobile -app) and also from within the Docker compose cluster. +For more details, see [configuring-s3](/self-hosting/guides/configuring-s3). diff --git a/docs/docs/self-hosting/troubleshooting/yarn.md b/docs/docs/self-hosting/troubleshooting/yarn.md index 7d8d13b002..de2b550291 100644 --- a/docs/docs/self-hosting/troubleshooting/yarn.md +++ b/docs/docs/self-hosting/troubleshooting/yarn.md @@ -8,3 +8,6 @@ description: Fixing yarn install errors when trying to self host Ente If your `yarn install` is failing, make sure you are using Yarn Classic - https://classic.yarnpkg.com/lang/en/docs/install + +For more details, see the +[getting started instructions](https://github.com/ente-io/ente/blob/main/web/docs/new.md). diff --git a/docs/package.json b/docs/package.json index 5d4dc3b196..015d79eea2 100644 --- a/docs/package.json +++ b/docs/package.json @@ -10,5 +10,6 @@ "devDependencies": { "prettier": "^3", "vitepress": "^1.0.0-rc.45" - } + }, + "packageManager": "yarn@1.22.21" } diff --git a/mobile/README.md b/mobile/README.md index fc17f6b26e..6d86ad5344 100644 --- a/mobile/README.md +++ b/mobile/README.md @@ -46,7 +46,7 @@ You can alternatively install the build from PlayStore or F-Droid. ## 🧑‍💻 Building from source -1. [Install Flutter v3.19.3](https://flutter.dev/docs/get-started/install). +1. [Install Flutter v3.22.0](https://flutter.dev/docs/get-started/install). 2. Pull in all submodules with `git submodule update --init --recursive` diff --git a/mobile/android/app/build.gradle b/mobile/android/app/build.gradle index 01ec11ff8a..b5225db8ef 100644 --- a/mobile/android/app/build.gradle +++ b/mobile/android/app/build.gradle @@ -43,7 +43,7 @@ android { defaultConfig { applicationId "io.ente.photos" - minSdkVersion 21 + minSdkVersion 26 targetSdkVersion 33 versionCode flutterVersionCode.toInteger() versionName flutterVersionName @@ -70,6 +70,10 @@ android { dimension "default" applicationIdSuffix ".dev" } + face { + dimension "default" + applicationIdSuffix ".face" + } playstore { dimension "default" } diff --git a/mobile/android/app/src/face/AndroidManifest.xml b/mobile/android/app/src/face/AndroidManifest.xml new file mode 100644 index 0000000000..cbf1924b23 --- /dev/null +++ b/mobile/android/app/src/face/AndroidManifest.xml @@ -0,0 +1,10 @@ + + + + + + + diff --git a/mobile/android/app/src/face/res/values/strings.xml b/mobile/android/app/src/face/res/values/strings.xml new file mode 100644 index 0000000000..4932deb961 --- /dev/null +++ b/mobile/android/app/src/face/res/values/strings.xml @@ -0,0 +1,4 @@ + + ente face + backup face + diff --git a/mobile/assets/models/cocossd/labels.txt b/mobile/assets/models/cocossd/labels.txt deleted file mode 100644 index fc674c0b99..0000000000 --- a/mobile/assets/models/cocossd/labels.txt +++ /dev/null @@ -1,91 +0,0 @@ -unknown -person -bicycle -car -motorcycle -airplane -bus -train -truck -boat -traffic light -fire hydrant -unknown -stop sign -parking meter -bench -bird -cat -dog -horse -sheep -cow -elephant -bear -zebra -giraffe -unknown -backpack -umbrella -unknown -unknown -handbag -tie -suitcase -frisbee -skis -snowboard -sports ball -kite -baseball bat -baseball glove -skateboard -surfboard -tennis racket -bottle -unknown -wine glass -cup -fork -knife -spoon -bowl -banana -apple -sandwich -orange -broccoli -carrot -hot dog -pizza -donut -cake -chair -couch -potted plant -bed -unknown -dining table -unknown -unknown -toilet -unknown -tv -laptop -mouse -remote -keyboard -cell phone -microwave -oven -toaster -sink -refrigerator -unknown -book -clock -vase -scissors -teddy bear -hair drier -toothbrush diff --git a/mobile/assets/models/cocossd/model.tflite b/mobile/assets/models/cocossd/model.tflite deleted file mode 100644 index 8015ee5d8a..0000000000 Binary files a/mobile/assets/models/cocossd/model.tflite and /dev/null differ diff --git a/mobile/assets/models/mobilenet/labels_mobilenet_quant_v1_224.txt b/mobile/assets/models/mobilenet/labels_mobilenet_quant_v1_224.txt deleted file mode 100644 index fe811239d8..0000000000 --- a/mobile/assets/models/mobilenet/labels_mobilenet_quant_v1_224.txt +++ /dev/null @@ -1,1001 +0,0 @@ -background -tench -goldfish -great white shark -tiger shark -hammerhead -electric ray -stingray -cock -hen -ostrich -brambling -goldfinch -house finch -junco -indigo bunting -robin -bulbul -jay -magpie -chickadee -water ouzel -kite -bald eagle -vulture -great grey owl -European fire salamander -common newt -eft -spotted salamander -axolotl -bullfrog -tree frog -tailed frog -loggerhead -leatherback turtle -mud turtle -terrapin -box turtle -banded gecko -common iguana -American chameleon -whiptail -agama -frilled lizard -alligator lizard -Gila monster -green lizard -African chameleon -Komodo dragon -African crocodile -American alligator -triceratops -thunder snake -ringneck snake -hognose snake -green snake -king snake -garter snake -water snake -vine snake -night snake -boa constrictor -rock python -Indian cobra -green mamba -sea snake -horned viper -diamondback -sidewinder -trilobite -harvestman -scorpion -black and gold garden spider -barn spider -garden spider -black widow -tarantula -wolf spider -tick -centipede -black grouse -ptarmigan -ruffed grouse -prairie chicken -peacock -quail -partridge -African grey -macaw -sulphur-crested cockatoo -lorikeet -coucal -bee eater -hornbill -hummingbird -jacamar -toucan -drake -red-breasted merganser -goose -black swan -tusker -echidna -platypus -wallaby -koala -wombat -jellyfish -sea anemone -brain coral -flatworm -nematode -conch -snail -slug -sea slug -chiton -chambered nautilus -Dungeness crab -rock crab -fiddler crab -king crab -American lobster -spiny lobster -crayfish -hermit crab -isopod -white stork -black stork -spoonbill -flamingo -little blue heron -American egret -bittern -crane -limpkin -European gallinule -American coot -bustard -ruddy turnstone -red-backed sandpiper -redshank -dowitcher -oystercatcher -pelican -king penguin -albatross -grey whale -killer whale -dugong -sea lion -Chihuahua -Japanese spaniel -Maltese dog -Pekinese -Shih-Tzu -Blenheim spaniel -papillon -toy terrier -Rhodesian ridgeback -Afghan hound -basset -beagle -bloodhound -bluetick -black-and-tan coonhound -Walker hound -English foxhound -redbone -borzoi -Irish wolfhound -Italian greyhound -whippet -Ibizan hound -Norwegian elkhound -otterhound -Saluki -Scottish deerhound -Weimaraner -Staffordshire bullterrier -American Staffordshire terrier -Bedlington terrier -Border terrier -Kerry blue terrier -Irish terrier -Norfolk terrier -Norwich terrier -Yorkshire terrier -wire-haired fox terrier -Lakeland terrier -Sealyham terrier -Airedale -cairn -Australian terrier -Dandie Dinmont -Boston bull -miniature schnauzer -giant schnauzer -standard schnauzer -Scotch terrier -Tibetan terrier -silky terrier -soft-coated wheaten terrier -West Highland white terrier -Lhasa -flat-coated retriever -curly-coated retriever -golden retriever -Labrador retriever -Chesapeake Bay retriever -German short-haired pointer -vizsla -English setter -Irish setter -Gordon setter -Brittany spaniel -clumber -English springer -Welsh springer spaniel -cocker spaniel -Sussex spaniel -Irish water spaniel -kuvasz -schipperke -groenendael -malinois -briard -kelpie -komondor -Old English sheepdog -Shetland sheepdog -collie -Border collie -Bouvier des Flandres -Rottweiler -German shepherd -Doberman -miniature pinscher -Greater Swiss Mountain dog -Bernese mountain dog -Appenzeller -EntleBucher -boxer -bull mastiff -Tibetan mastiff -French bulldog -Great Dane -Saint Bernard -Eskimo dog -malamute -Siberian husky -dalmatian -affenpinscher -basenji -pug -Leonberg -Newfoundland -Great Pyrenees -Samoyed -Pomeranian -chow -keeshond -Brabancon griffon -Pembroke -Cardigan -toy poodle -miniature poodle -standard poodle -Mexican hairless -timber wolf -white wolf -red wolf -coyote -dingo -dhole -African hunting dog -hyena -red fox -kit fox -Arctic fox -grey fox -tabby -tiger cat -Persian cat -Siamese cat -Egyptian cat -cougar -lynx -leopard -snow leopard -jaguar -lion -tiger -cheetah -brown bear -American black bear -ice bear -sloth bear -mongoose -meerkat -tiger beetle -ladybug -ground beetle -long-horned beetle -leaf beetle -dung beetle -rhinoceros beetle -weevil -fly -bee -ant -grasshopper -cricket -walking stick -cockroach -mantis -cicada -leafhopper -lacewing -dragonfly -damselfly -admiral -ringlet -monarch -cabbage butterfly -sulphur butterfly -lycaenid -starfish -sea urchin -sea cucumber -wood rabbit -hare -Angora -hamster -porcupine -fox squirrel -marmot -beaver -guinea pig -sorrel -zebra -hog -wild boar -warthog -hippopotamus -ox -water buffalo -bison -ram -bighorn -ibex -hartebeest -impala -gazelle -Arabian camel -llama -weasel -mink -polecat -black-footed ferret -otter -skunk -badger -armadillo -three-toed sloth -orangutan -gorilla -chimpanzee -gibbon -siamang -guenon -patas -baboon -macaque -langur -colobus -proboscis monkey -marmoset -capuchin -howler monkey -titi -spider monkey -squirrel monkey -Madagascar cat -indri -Indian elephant -African elephant -lesser panda -giant panda -barracouta -eel -coho -rock beauty -anemone fish -sturgeon -gar -lionfish -puffer -abacus -abaya -academic gown -accordion -acoustic guitar -aircraft carrier -airliner -airship -altar -ambulance -amphibian -analog clock -apiary -apron -ashcan -assault rifle -backpack -bakery -balance beam -balloon -ballpoint -Band Aid -banjo -bannister -barbell -barber chair -barbershop -barn -barometer -barrel -barrow -baseball -basketball -bassinet -bassoon -bathing cap -bath towel -bathtub -beach wagon -beacon -beaker -bearskin -beer bottle -beer glass -bell cote -bib -bicycle-built-for-two -bikini -binder -binoculars -birdhouse -boathouse -bobsled -bolo tie -bonnet -bookcase -bookshop -bottlecap -bow -bow tie -brass -brassiere -breakwater -breastplate -broom -bucket -buckle -bulletproof vest -bullet train -butcher shop -cab -caldron -candle -cannon -canoe -can opener -cardigan -car mirror -carousel -carpenter's kit -carton -car wheel -cash machine -cassette -cassette player -castle -catamaran -CD player -cello -cellular telephone -chain -chainlink fence -chain mail -chain saw -chest -chiffonier -chime -china cabinet -Christmas stocking -church -cinema -cleaver -cliff dwelling -cloak -clog -cocktail shaker -coffee mug -coffeepot -coil -combination lock -computer keyboard -confectionery -container ship -convertible -corkscrew -cornet -cowboy boot -cowboy hat -cradle -crane -crash helmet -crate -crib -Crock Pot -croquet ball -crutch -cuirass -dam -desk -desktop computer -dial telephone -diaper -digital clock -digital watch -dining table -dishrag -dishwasher -disk brake -dock -dogsled -dome -doormat -drilling platform -drum -drumstick -dumbbell -Dutch oven -electric fan -electric guitar -electric locomotive -entertainment center -envelope -espresso maker -face powder -feather boa -file -fireboat -fire engine -fire screen -flagpole -flute -folding chair -football helmet -forklift -fountain -fountain pen -four-poster -freight car -French horn -frying pan -fur coat -garbage truck -gasmask -gas pump -goblet -go-kart -golf ball -golfcart -gondola -gong -gown -grand piano -greenhouse -grille -grocery store -guillotine -hair slide -hair spray -half track -hammer -hamper -hand blower -hand-held computer -handkerchief -hard disc -harmonica -harp -harvester -hatchet -holster -home theater -honeycomb -hook -hoopskirt -horizontal bar -horse cart -hourglass -iPod -iron -jack-o'-lantern -jean -jeep -jersey -jigsaw puzzle -jinrikisha -joystick -kimono -knee pad -knot -lab coat -ladle -lampshade -laptop -lawn mower -lens cap -letter opener -library -lifeboat -lighter -limousine -liner -lipstick -Loafer -lotion -loudspeaker -loupe -lumbermill -magnetic compass -mailbag -mailbox -maillot -maillot -manhole cover -maraca -marimba -mask -matchstick -maypole -maze -measuring cup -medicine chest -megalith -microphone -microwave -military uniform -milk can -minibus -miniskirt -minivan -missile -mitten -mixing bowl -mobile home -Model T -modem -monastery -monitor -moped -mortar -mortarboard -mosque -mosquito net -motor scooter -mountain bike -mountain tent -mouse -mousetrap -moving van -muzzle -nail -neck brace -necklace -nipple -notebook -obelisk -oboe -ocarina -odometer -oil filter -organ -oscilloscope -overskirt -oxcart -oxygen mask -packet -paddle -paddlewheel -padlock -paintbrush -pajama -palace -panpipe -paper towel -parachute -parallel bars -park bench -parking meter -passenger car -patio -pay-phone -pedestal -pencil box -pencil sharpener -perfume -Petri dish -photocopier -pick -pickelhaube -picket fence -pickup -pier -piggy bank -pill bottle -pillow -ping-pong ball -pinwheel -pirate -pitcher -plane -planetarium -plastic bag -plate rack -plow -plunger -Polaroid camera -pole -police van -poncho -pool table -pop bottle -pot -potter's wheel -power drill -prayer rug -printer -prison -projectile -projector -puck -punching bag -purse -quill -quilt -racer -racket -radiator -radio -radio telescope -rain barrel -recreational vehicle -reel -reflex camera -refrigerator -remote control -restaurant -revolver -rifle -rocking chair -rotisserie -rubber eraser -rugby ball -rule -running shoe -safe -safety pin -saltshaker -sandal -sarong -sax -scabbard -scale -school bus -schooner -scoreboard -screen -screw -screwdriver -seat belt -sewing machine -shield -shoe shop -shoji -shopping basket -shopping cart -shovel -shower cap -shower curtain -ski -ski mask -sleeping bag -slide rule -sliding door -slot -snorkel -snowmobile -snowplow -soap dispenser -soccer ball -sock -solar dish -sombrero -soup bowl -space bar -space heater -space shuttle -spatula -speedboat -spider web -spindle -sports car -spotlight -stage -steam locomotive -steel arch bridge -steel drum -stethoscope -stole -stone wall -stopwatch -stove -strainer -streetcar -stretcher -studio couch -stupa -submarine -suit -sundial -sunglass -sunglasses -sunscreen -suspension bridge -swab -sweatshirt -swimming trunks -swing -switch -syringe -table lamp -tank -tape player -teapot -teddy -television -tennis ball -thatch -theater curtain -thimble -thresher -throne -tile roof -toaster -tobacco shop -toilet seat -torch -totem pole -tow truck -toyshop -tractor -trailer truck -tray -trench coat -tricycle -trimaran -tripod -triumphal arch -trolleybus -trombone -tub -turnstile -typewriter keyboard -umbrella -unicycle -upright -vacuum -vase -vault -velvet -vending machine -vestment -viaduct -violin -volleyball -waffle iron -wall clock -wallet -wardrobe -warplane -washbasin -washer -water bottle -water jug -water tower -whiskey jug -whistle -wig -window screen -window shade -Windsor tie -wine bottle -wing -wok -wooden spoon -wool -worm fence -wreck -yawl -yurt -web site -comic book -crossword puzzle -street sign -traffic light -book jacket -menu -plate -guacamole -consomme -hot pot -trifle -ice cream -ice lolly -French loaf -bagel -pretzel -cheeseburger -hotdog -mashed potato -head cabbage -broccoli -cauliflower -zucchini -spaghetti squash -acorn squash -butternut squash -cucumber -artichoke -bell pepper -cardoon -mushroom -Granny Smith -strawberry -orange -lemon -fig -pineapple -banana -jackfruit -custard apple -pomegranate -hay -carbonara -chocolate sauce -dough -meat loaf -pizza -potpie -burrito -red wine -espresso -cup -eggnog -alp -bubble -cliff -coral reef -geyser -lakeside -promontory -sandbar -seashore -valley -volcano -ballplayer -groom -scuba diver -rapeseed -daisy -yellow lady's slipper -corn -acorn -hip -buckeye -coral fungus -agaric -gyromitra -stinkhorn -earthstar -hen-of-the-woods -bolete -ear -toilet tissue diff --git a/mobile/assets/models/mobilenet/mobilenet_v1_1.0_224_quant.tflite b/mobile/assets/models/mobilenet/mobilenet_v1_1.0_224_quant.tflite deleted file mode 100644 index 437640b069..0000000000 Binary files a/mobile/assets/models/mobilenet/mobilenet_v1_1.0_224_quant.tflite and /dev/null differ diff --git a/mobile/assets/models/scenes/labels.txt b/mobile/assets/models/scenes/labels.txt deleted file mode 100644 index e0df140821..0000000000 --- a/mobile/assets/models/scenes/labels.txt +++ /dev/null @@ -1,30 +0,0 @@ -waterfall -snow -landscape -underwater -architecture -sunset / sunrise -blue sky -cloudy sky -greenery -autumn leaves -portrait -flower -night shot -stage concert -fireworks -candle light -neon lights -indoor -backlight -text documents -qr images -group portrait -computer screens -kids -dog -cat -macro -food -beach -mountain diff --git a/mobile/assets/models/scenes/model.tflite b/mobile/assets/models/scenes/model.tflite deleted file mode 100644 index f2c9423548..0000000000 Binary files a/mobile/assets/models/scenes/model.tflite and /dev/null differ diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock index 7315149574..9f74d552a7 100644 --- a/mobile/ios/Podfile.lock +++ b/mobile/ios/Podfile.lock @@ -6,6 +6,8 @@ PODS: - connectivity_plus (0.0.1): - Flutter - FlutterMacOS + - dart_ui_isolate (0.0.1): + - Flutter - device_info_plus (0.0.1): - Flutter - file_saver (0.0.1): @@ -226,6 +228,7 @@ DEPENDENCIES: - background_fetch (from `.symlinks/plugins/background_fetch/ios`) - battery_info (from `.symlinks/plugins/battery_info/ios`) - connectivity_plus (from `.symlinks/plugins/connectivity_plus/darwin`) + - dart_ui_isolate (from `.symlinks/plugins/dart_ui_isolate/ios`) - device_info_plus (from `.symlinks/plugins/device_info_plus/ios`) - file_saver (from `.symlinks/plugins/file_saver/ios`) - firebase_core (from `.symlinks/plugins/firebase_core/ios`) @@ -302,6 +305,8 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/battery_info/ios" connectivity_plus: :path: ".symlinks/plugins/connectivity_plus/darwin" + dart_ui_isolate: + :path: ".symlinks/plugins/dart_ui_isolate/ios" device_info_plus: :path: ".symlinks/plugins/device_info_plus/ios" file_saver: @@ -397,6 +402,7 @@ SPEC CHECKSUMS: background_fetch: 2319bf7e18237b4b269430b7f14d177c0df09c5a battery_info: 09f5c9ee65394f2291c8c6227bedff345b8a730c connectivity_plus: ddd7f30999e1faaef5967c23d5b6d503d10434db + dart_ui_isolate: d5bcda83ca4b04f129d70eb90110b7a567aece14 device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6 file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808 Firebase: 91fefd38712feb9186ea8996af6cbdef41473442 @@ -421,7 +427,7 @@ SPEC CHECKSUMS: home_widget: 0434835a4c9a75704264feff6be17ea40e0f0d57 image_editor_common: d6f6644ae4a6de80481e89fe6d0a8c49e30b4b43 in_app_purchase_storekit: 0e4b3c2e43ba1e1281f4f46dd71b0593ce529892 - integration_test: 13825b8a9334a850581300559b8839134b124670 + integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4 libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009 local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98 local_auth_ios: 5046a18c018dd973247a0564496c8898dbb5adf9 diff --git a/mobile/ios/Runner.xcodeproj/project.pbxproj b/mobile/ios/Runner.xcodeproj/project.pbxproj index c88f9da380..22d5e8e681 100644 --- a/mobile/ios/Runner.xcodeproj/project.pbxproj +++ b/mobile/ios/Runner.xcodeproj/project.pbxproj @@ -293,6 +293,7 @@ "${BUILT_PRODUCTS_DIR}/background_fetch/background_fetch.framework", "${BUILT_PRODUCTS_DIR}/battery_info/battery_info.framework", "${BUILT_PRODUCTS_DIR}/connectivity_plus/connectivity_plus.framework", + "${BUILT_PRODUCTS_DIR}/dart_ui_isolate/dart_ui_isolate.framework", "${BUILT_PRODUCTS_DIR}/device_info_plus/device_info_plus.framework", "${BUILT_PRODUCTS_DIR}/file_saver/file_saver.framework", "${BUILT_PRODUCTS_DIR}/fk_user_agent/fk_user_agent.framework", @@ -374,6 +375,7 @@ "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/background_fetch.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/battery_info.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/connectivity_plus.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/dart_ui_isolate.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/device_info_plus.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/file_saver.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/fk_user_agent.framework", diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index 9afb874e52..fe571afeb1 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -65,9 +65,9 @@ ITSAppUsesNonExemptEncryption FLTEnableImpeller - + FLTEnableWideGamut - + NSFaceIDUsageDescription Please allow ente to lock itself with FaceID or TouchID NSCameraUsageDescription diff --git a/mobile/lib/core/configuration.dart b/mobile/lib/core/configuration.dart index cde766b1e0..4809ba8634 100644 --- a/mobile/lib/core/configuration.dart +++ b/mobile/lib/core/configuration.dart @@ -19,6 +19,7 @@ import 'package:photos/db/upload_locks_db.dart'; import "package:photos/events/endpoint_updated_event.dart"; import 'package:photos/events/signed_in_event.dart'; import 'package:photos/events/user_logged_out_event.dart'; +import "package:photos/face/db.dart"; import 'package:photos/models/key_attributes.dart'; import 'package:photos/models/key_gen_result.dart'; import 'package:photos/models/private_key_attributes.dart'; @@ -72,8 +73,6 @@ class Configuration { static const anonymousUserIDKey = "anonymous_user_id"; static const endPointKey = "endpoint"; - final kTempFolderDeletionTimeBuffer = const Duration(hours: 6).inMicroseconds; - static final _logger = Logger("Configuration"); String? _cachedToken; @@ -103,20 +102,7 @@ class Configuration { _documentsDirectory = (await getApplicationDocumentsDirectory()).path; _tempDocumentsDirPath = _documentsDirectory + "/temp/"; final tempDocumentsDir = Directory(_tempDocumentsDirPath); - try { - final currentTime = DateTime.now().microsecondsSinceEpoch; - if (tempDocumentsDir.existsSync() && - (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < - (currentTime - kTempFolderDeletionTimeBuffer)) { - await tempDocumentsDir.delete(recursive: true); - await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); - _logger.info("Cleared temp folder"); - } else { - _logger.info("Skipping temp folder clear"); - } - } catch (e) { - _logger.warning(e); - } + await _cleanUpStaleFiles(tempDocumentsDir); tempDocumentsDir.createSync(recursive: true); final tempDirectoryPath = (await getTemporaryDirectory()).path; _thumbnailCacheDirectory = tempDirectoryPath + "/thumbnail-cache"; @@ -144,6 +130,42 @@ class Configuration { SuperLogging.setUserID(await _getOrCreateAnonymousUserID()).ignore(); } + // _cleanUpStaleFiles deletes all files in the temp directory that are older + // than kTempFolderDeletionTimeBuffer except the the temp encrypted files for upload. + // Those file are deleted by file uploader after the upload is complete or those + // files are not being used / tracked. + Future _cleanUpStaleFiles(Directory tempDocumentsDir) async { + try { + final currentTime = DateTime.now().microsecondsSinceEpoch; + if (tempDocumentsDir.existsSync() && + (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < + (currentTime - tempDirCleanUpInterval)) { + int skippedTempUploadFiles = 0; + final files = tempDocumentsDir.listSync(); + for (final file in files) { + if (file is File) { + if (file.path.contains(uploadTempFilePrefix)) { + skippedTempUploadFiles++; + continue; + } + _logger.info("Deleting file: ${file.path}"); + await file.delete(); + } else if (file is Directory) { + await file.delete(recursive: true); + } + } + await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); + _logger.info( + "Cleared temp folder except $skippedTempUploadFiles upload files", + ); + } else { + _logger.info("Skipping temp folder clear"); + } + } catch (e) { + _logger.warning(e); + } + } + Future logout({bool autoLogout = false}) async { if (SyncService.instance.isSyncInProgress()) { SyncService.instance.stopSync(); @@ -166,6 +188,7 @@ class Configuration { : null; await CollectionsDB.instance.clearTable(); await MemoriesDB.instance.clearTable(); + await FaceMLDataDB.instance.clearTable(); await UploadLocksDB.instance.clearTable(); await IgnoredFilesService.instance.reset(); diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart index c2d08d903a..02923b6c43 100644 --- a/mobile/lib/core/constants.dart +++ b/mobile/lib/core/constants.dart @@ -1,3 +1,5 @@ +import "package:flutter/foundation.dart"; + const int thumbnailSmallSize = 256; const int thumbnailQuality = 50; const int thumbnailLargeSize = 512; @@ -41,6 +43,7 @@ const supportEmail = 'support@ente.io'; // this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. const multipartPartSize = 20 * 1024 * 1024; +const multipartPartSizeInternal = 8 * 1024 * 1024; const kDefaultProductionEndpoint = 'https://api.ente.io'; @@ -95,3 +98,11 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB' 'KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' + 'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' + 'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k='; + +const localFileServer = + String.fromEnvironment("localFileServer", defaultValue: ""); + +const uploadTempFilePrefix = "upload_file_"; +final tempDirCleanUpInterval = kDebugMode + ? const Duration(seconds: 30).inMicroseconds + : const Duration(hours: 6).inMicroseconds; diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart index 0eb1d3f6d7..b5a6111f45 100644 --- a/mobile/lib/db/embeddings_db.dart +++ b/mobile/lib/db/embeddings_db.dart @@ -54,7 +54,7 @@ class EmbeddingsDB { Future clearTable() async { final db = await _database; - await db.execute('DELETE * FROM $tableName'); + await db.execute('DELETE FROM $tableName'); } Future> getAll(Model model) async { @@ -63,6 +63,19 @@ class EmbeddingsDB { return _convertToEmbeddings(results); } + // Get FileIDs for a specific model + Future> getFileIDs(Model model) async { + final db = await _database; + final results = await db.getAll( + 'SELECT $columnFileID FROM $tableName WHERE $columnModel = ?', + [modelToInt(model)!], + ); + if (results.isEmpty) { + return {}; + } + return results.map((e) => e[columnFileID] as int).toSet(); + } + Future put(Embedding embedding) async { final db = await _database; await db.execute( diff --git a/mobile/lib/db/entities_db.dart b/mobile/lib/db/entities_db.dart index b8b48fbe4a..cee32641a0 100644 --- a/mobile/lib/db/entities_db.dart +++ b/mobile/lib/db/entities_db.dart @@ -9,7 +9,7 @@ extension EntitiesDB on FilesDB { List data, { ConflictAlgorithm conflictAlgorithm = ConflictAlgorithm.replace, }) async { - debugPrint("Inserting missing PathIDToLocalIDMapping"); + debugPrint("entitiesDB: upsertEntities ${data.length} entities"); final db = await database; var batch = db.batch(); int batchCounter = 0; @@ -62,4 +62,17 @@ extension EntitiesDB on FilesDB { return LocalEntityData.fromJson(maps[i]); }); } + + Future getEntity(EntityType type, String id) async { + final db = await database; + final List> maps = await db.query( + "entities", + where: "type = ? AND id = ?", + whereArgs: [type.typeToString(), id], + ); + if (maps.isEmpty) { + return null; + } + return LocalEntityData.fromJson(maps.first); + } } diff --git a/mobile/lib/db/files_db.dart b/mobile/lib/db/files_db.dart index 7022100b73..f72ecb32a4 100644 --- a/mobile/lib/db/files_db.dart +++ b/mobile/lib/db/files_db.dart @@ -491,6 +491,18 @@ class FilesDB { return convertToFiles(results)[0]; } + Future getAnyUploadedFile(int uploadedID) async { + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + 'SELECT * FROM $filesTable WHERE $columnUploadedFileID = ?', + [uploadedID], + ); + if (results.isEmpty) { + return null; + } + return convertToFiles(results)[0]; + } + Future> getUploadedFileIDs(int collectionID) async { final db = await instance.sqliteAsyncDB; final results = await db.getAll( @@ -683,6 +695,17 @@ class FilesDB { return files; } + Future> getAllFilesFromCollections( + Iterable collectionID, + ) async { + final db = await instance.sqliteAsyncDB; + final String sql = + 'SELECT * FROM $filesTable WHERE $columnCollectionID IN (${collectionID.join(',')})'; + final results = await db.getAll(sql); + final files = convertToFiles(results); + return files; + } + Future> getNewFilesInCollection( int collectionID, int addedTime, @@ -1304,6 +1327,23 @@ class FilesDB { return result; } + Future> getFileIDToCreationTime() async { + final db = await instance.sqliteAsyncDB; + final rows = await db.getAll( + ''' + SELECT $columnUploadedFileID, $columnCreationTime + FROM $filesTable + WHERE + ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1); + ''', + ); + final result = {}; + for (final row in rows) { + result[row[columnUploadedFileID] as int] = row[columnCreationTime] as int; + } + return result; + } + // getCollectionFileFirstOrLast returns the first or last uploaded file in // the collection based on the given collectionID and the order. Future getCollectionFileFirstOrLast( @@ -1643,13 +1683,14 @@ class FilesDB { } Future> getOwnedFileIDs(int ownerID) async { - final db = await instance.database; - final results = await db.query( - filesTable, - columns: [columnUploadedFileID], - where: - '($columnOwnerID = $ownerID AND $columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', - distinct: true, + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + ''' + SELECT DISTINCT $columnUploadedFileID FROM $filesTable + WHERE ($columnOwnerID = ? AND $columnUploadedFileID IS NOT NULL AND + $columnUploadedFileID IS NOT -1) + ''', + [ownerID], ); final ids = []; for (final result in results) { @@ -1659,16 +1700,17 @@ class FilesDB { } Future> getUploadedFiles(List uploadedIDs) async { - final db = await instance.database; + final db = await instance.sqliteAsyncDB; String inParam = ""; for (final id in uploadedIDs) { inParam += "'" + id.toString() + "',"; } inParam = inParam.substring(0, inParam.length - 1); - final results = await db.query( - filesTable, - where: '$columnUploadedFileID IN ($inParam)', - groupBy: columnUploadedFileID, + final results = await db.getAll( + ''' + SELECT * FROM $filesTable WHERE $columnUploadedFileID IN ($inParam) + GROUP BY $columnUploadedFileID +''', ); if (results.isEmpty) { return []; diff --git a/mobile/lib/db/upload_locks_db.dart b/mobile/lib/db/upload_locks_db.dart index 11112d0cea..b32084b6ff 100644 --- a/mobile/lib/db/upload_locks_db.dart +++ b/mobile/lib/db/upload_locks_db.dart @@ -3,16 +3,60 @@ import 'dart:io'; import 'package:path/path.dart'; import 'package:path_provider/path_provider.dart'; +import "package:photos/module/upload/model/multipart.dart"; import 'package:sqflite/sqflite.dart'; +import "package:sqflite_migration/sqflite_migration.dart"; class UploadLocksDB { static const _databaseName = "ente.upload_locks.db"; - static const _databaseVersion = 1; - static const _table = "upload_locks"; - static const _columnID = "id"; - static const _columnOwner = "owner"; - static const _columnTime = "time"; + static const _uploadLocksTable = ( + table: "upload_locks", + columnID: "id", + columnOwner: "owner", + columnTime: "time", + ); + + static const _trackUploadTable = ( + table: "track_uploads", + columnID: "id", + columnLocalID: "local_id", + columnFileHash: "file_hash", + columnCollectionID: "collection_id", + columnEncryptedFileName: "encrypted_file_name", + columnEncryptedFileSize: "encrypted_file_size", + columnEncryptedFileKey: "encrypted_file_key", + columnFileEncryptionNonce: "file_encryption_nonce", + columnKeyEncryptionNonce: "key_encryption_nonce", + columnObjectKey: "object_key", + columnCompleteUrl: "complete_url", + columnStatus: "status", + columnPartSize: "part_size", + columnLastAttemptedAt: "last_attempted_at", + columnCreatedAt: "created_at", + ); + + static const _partsTable = ( + table: "upload_parts", + columnObjectKey: "object_key", + columnPartNumber: "part_number", + columnPartUrl: "part_url", + columnPartETag: "part_etag", + columnPartStatus: "part_status", + ); + + static final initializationScript = [ + ..._createUploadLocksTable(), + ]; + + static final migrationScripts = [ + ..._createTrackUploadsTable(), + ]; + + final dbConfig = MigrationConfig( + initializationScript: initializationScript, + migrationScripts: migrationScripts, + ); UploadLocksDB._privateConstructor(); static final UploadLocksDB instance = UploadLocksDB._privateConstructor(); @@ -27,44 +71,82 @@ class UploadLocksDB { final Directory documentsDirectory = await getApplicationDocumentsDirectory(); final String path = join(documentsDirectory.path, _databaseName); - return await openDatabase( - path, - version: _databaseVersion, - onCreate: _onCreate, - ); + + return await openDatabaseWithMigration(path, dbConfig); } - Future _onCreate(Database db, int version) async { - await db.execute( + static List _createUploadLocksTable() { + return [ ''' - CREATE TABLE $_table ( - $_columnID TEXT PRIMARY KEY NOT NULL, - $_columnOwner TEXT NOT NULL, - $_columnTime TEXT NOT NULL + CREATE TABLE ${_uploadLocksTable.table} ( + ${_uploadLocksTable.columnID} TEXT PRIMARY KEY NOT NULL, + ${_uploadLocksTable.columnOwner} TEXT NOT NULL, + ${_uploadLocksTable.columnTime} TEXT NOT NULL ) ''', - ); + ]; + } + + static List _createTrackUploadsTable() { + return [ + ''' + CREATE TABLE IF NOT EXISTS ${_trackUploadTable.table} ( + ${_trackUploadTable.columnID} INTEGER PRIMARY KEY, + ${_trackUploadTable.columnLocalID} TEXT NOT NULL, + ${_trackUploadTable.columnFileHash} TEXT NOT NULL, + ${_trackUploadTable.columnCollectionID} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileName} TEXT NOT NULL, + ${_trackUploadTable.columnEncryptedFileSize} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileKey} TEXT NOT NULL, + ${_trackUploadTable.columnFileEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnKeyEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnObjectKey} TEXT NOT NULL, + ${_trackUploadTable.columnCompleteUrl} TEXT NOT NULL, + ${_trackUploadTable.columnStatus} TEXT DEFAULT '${MultipartStatus.pending.name}' NOT NULL, + ${_trackUploadTable.columnPartSize} INTEGER NOT NULL, + ${_trackUploadTable.columnLastAttemptedAt} INTEGER NOT NULL, + ${_trackUploadTable.columnCreatedAt} INTEGER DEFAULT CURRENT_TIMESTAMP NOT NULL + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS ${_partsTable.table} ( + ${_partsTable.columnObjectKey} TEXT NOT NULL REFERENCES ${_trackUploadTable.table}(${_trackUploadTable.columnObjectKey}) ON DELETE CASCADE, + ${_partsTable.columnPartNumber} INTEGER NOT NULL, + ${_partsTable.columnPartUrl} TEXT NOT NULL, + ${_partsTable.columnPartETag} TEXT, + ${_partsTable.columnPartStatus} TEXT NOT NULL, + PRIMARY KEY (${_partsTable.columnObjectKey}, ${_partsTable.columnPartNumber}) + ) + ''', + ]; } Future clearTable() async { final db = await instance.database; - await db.delete(_table); + await db.delete(_uploadLocksTable.table); + await db.delete(_trackUploadTable.table); + await db.delete(_partsTable.table); } Future acquireLock(String id, String owner, int time) async { final db = await instance.database; final row = {}; - row[_columnID] = id; - row[_columnOwner] = owner; - row[_columnTime] = time; - await db.insert(_table, row, conflictAlgorithm: ConflictAlgorithm.fail); + row[_uploadLocksTable.columnID] = id; + row[_uploadLocksTable.columnOwner] = owner; + row[_uploadLocksTable.columnTime] = time; + await db.insert( + _uploadLocksTable.table, + row, + conflictAlgorithm: ConflictAlgorithm.fail, + ); } Future isLocked(String id, String owner) async { final db = await instance.database; final rows = await db.query( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); return rows.length == 1; @@ -73,8 +155,9 @@ class UploadLocksDB { Future releaseLock(String id, String owner) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); } @@ -82,8 +165,9 @@ class UploadLocksDB { Future releaseLocksAcquiredByOwnerBefore(String owner, int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnOwner = ? AND $_columnTime < ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnOwner} = ? AND ${_uploadLocksTable.columnTime} < ?', whereArgs: [owner, time], ); } @@ -91,9 +175,251 @@ class UploadLocksDB { Future releaseAllLocksAcquiredBefore(int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnTime < ?', + _uploadLocksTable.table, + where: '${_uploadLocksTable.columnTime} < ?', whereArgs: [time], ); } + + Future<({String encryptedFileKey, String fileNonce, String keyNonce})> + getFileEncryptionData( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + + return ( + encryptedFileKey: row[_trackUploadTable.columnEncryptedFileKey] as String, + fileNonce: row[_trackUploadTable.columnFileEncryptionNonce] as String, + keyNonce: row[_trackUploadTable.columnKeyEncryptionNonce] as String, + ); + } + + Future updateLastAttempted( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [ + localId, + fileHash, + collectionID, + ], + ); + } + + Future getCachedLinks( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + final objectKey = row[_trackUploadTable.columnObjectKey] as String; + final partsStatus = await db.query( + _partsTable.table, + where: '${_partsTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + + final List partUploadStatus = []; + final List partsURLs = List.generate( + partsStatus.length, + (index) => "", + ); + final Map partETags = {}; + + for (final part in partsStatus) { + final partNumber = part[_partsTable.columnPartNumber] as int; + final partUrl = part[_partsTable.columnPartUrl] as String; + final partStatus = part[_partsTable.columnPartStatus] as String; + partsURLs[partNumber] = partUrl; + if (part[_partsTable.columnPartETag] != null) { + partETags[partNumber] = part[_partsTable.columnPartETag] as String; + } + partUploadStatus.add(partStatus == "uploaded"); + } + final urls = MultipartUploadURLs( + objectKey: objectKey, + completeURL: row[_trackUploadTable.columnCompleteUrl] as String, + partsURLs: partsURLs, + ); + + return MultipartInfo( + urls: urls, + status: MultipartStatus.values + .byName(row[_trackUploadTable.columnStatus] as String), + partUploadStatus: partUploadStatus, + partETags: partETags, + partSize: row[_trackUploadTable.columnPartSize] as int, + ); + } + + Future createTrackUploadsEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + String fileKey, + String fileNonce, + String keyNonce, { + required int partSize, + }) async { + final db = await UploadLocksDB.instance.database; + final objectKey = urls.objectKey; + + await db.insert( + _trackUploadTable.table, + { + _trackUploadTable.columnLocalID: localId, + _trackUploadTable.columnFileHash: fileHash, + _trackUploadTable.columnCollectionID: collectionID, + _trackUploadTable.columnObjectKey: objectKey, + _trackUploadTable.columnCompleteUrl: urls.completeURL, + _trackUploadTable.columnEncryptedFileName: encryptedFileName, + _trackUploadTable.columnEncryptedFileSize: fileSize, + _trackUploadTable.columnEncryptedFileKey: fileKey, + _trackUploadTable.columnFileEncryptionNonce: fileNonce, + _trackUploadTable.columnKeyEncryptionNonce: keyNonce, + _trackUploadTable.columnPartSize: partSize, + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + ); + + final partsURLs = urls.partsURLs; + final partsLength = partsURLs.length; + + for (int i = 0; i < partsLength; i++) { + await db.insert( + _partsTable.table, + { + _partsTable.columnObjectKey: objectKey, + _partsTable.columnPartNumber: i, + _partsTable.columnPartUrl: partsURLs[i], + _partsTable.columnPartStatus: PartStatus.pending.name, + }, + ); + } + } + + Future updatePartStatus( + String objectKey, + int partNumber, + String etag, + ) async { + final db = await instance.database; + await db.update( + _partsTable.table, + { + _partsTable.columnPartStatus: PartStatus.uploaded.name, + _partsTable.columnPartETag: etag, + }, + where: + '${_partsTable.columnObjectKey} = ? AND ${_partsTable.columnPartNumber} = ?', + whereArgs: [objectKey, partNumber], + ); + } + + Future updateTrackUploadStatus( + String objectKey, + MultipartStatus status, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnStatus: status.name, + }, + where: '${_trackUploadTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + } + + Future deleteMultipartTrack( + String localId, + ) async { + final db = await instance.database; + return await db.delete( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?', + whereArgs: [localId], + ); + } + + // getFileNameToLastAttemptedAtMap returns a map of encrypted file name to last attempted at time + Future> getFileNameToLastAttemptedAtMap() { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + columns: [ + _trackUploadTable.columnEncryptedFileName, + _trackUploadTable.columnLastAttemptedAt, + ], + ); + final map = {}; + for (final row in rows) { + map[row[_trackUploadTable.columnEncryptedFileName] as String] = + row[_trackUploadTable.columnLastAttemptedAt] as int; + } + return map; + }); + } + + Future getEncryptedFileName( + String localId, + String fileHash, + int collectionID, + ) { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + return null; + } + final row = rows.first; + return row[_trackUploadTable.columnEncryptedFileName] as String; + }); + } } diff --git a/mobile/lib/events/embedding_updated_event.dart b/mobile/lib/events/embedding_updated_event.dart index 9021b8b50c..736b85c17c 100644 --- a/mobile/lib/events/embedding_updated_event.dart +++ b/mobile/lib/events/embedding_updated_event.dart @@ -1,3 +1,5 @@ import "package:photos/events/event.dart"; class EmbeddingUpdatedEvent extends Event {} + +class EmbeddingCacheUpdatedEvent extends Event {} diff --git a/mobile/lib/events/files_updated_event.dart b/mobile/lib/events/files_updated_event.dart index 18aa8757bc..2fc67d6460 100644 --- a/mobile/lib/events/files_updated_event.dart +++ b/mobile/lib/events/files_updated_event.dart @@ -26,4 +26,6 @@ enum EventType { hide, unhide, coverChanged, + peopleChanged, + peopleClusterChanged, } diff --git a/mobile/lib/events/people_changed_event.dart b/mobile/lib/events/people_changed_event.dart new file mode 100644 index 0000000000..51f4eaeefe --- /dev/null +++ b/mobile/lib/events/people_changed_event.dart @@ -0,0 +1,22 @@ +import "package:photos/events/event.dart"; +import "package:photos/models/file/file.dart"; + +class PeopleChangedEvent extends Event { + final List? relevantFiles; + final PeopleEventType type; + final String source; + + PeopleChangedEvent({ + this.relevantFiles, + this.type = PeopleEventType.defaultType, + this.source = "", + }); + + @override + String get reason => '$runtimeType{type: ${type.name}, "via": $source}'; +} + +enum PeopleEventType { + defaultType, + removedFilesFromCluster, +} \ No newline at end of file diff --git a/mobile/lib/extensions/ml_linalg_extensions.dart b/mobile/lib/extensions/ml_linalg_extensions.dart new file mode 100644 index 0000000000..85a980855f --- /dev/null +++ b/mobile/lib/extensions/ml_linalg_extensions.dart @@ -0,0 +1,193 @@ +import 'dart:math' as math show sin, cos, atan2, sqrt, pow; +import 'package:ml_linalg/linalg.dart'; + +extension SetVectorValues on Vector { + Vector setValues(int start, int end, Iterable values) { + if (values.length > length) { + throw Exception('Values cannot be larger than vector'); + } else if (end - start != values.length) { + throw Exception('Values must be same length as range'); + } else if (start < 0 || end > length) { + throw Exception('Range must be within vector'); + } + final tempList = toList(); + tempList.replaceRange(start, end, values); + final newVector = Vector.fromList(tempList); + return newVector; + } +} + +extension SetMatrixValues on Matrix { + Matrix setSubMatrix( + int startRow, + int endRow, + int startColumn, + int endColumn, + Iterable> values, + ) { + if (values.length > rowCount) { + throw Exception('New values cannot have more rows than original matrix'); + } else if (values.elementAt(0).length > columnCount) { + throw Exception( + 'New values cannot have more columns than original matrix', + ); + } else if (endRow - startRow != values.length) { + throw Exception('Values (number of rows) must be same length as range'); + } else if (endColumn - startColumn != values.elementAt(0).length) { + throw Exception( + 'Values (number of columns) must be same length as range', + ); + } else if (startRow < 0 || + endRow > rowCount || + startColumn < 0 || + endColumn > columnCount) { + throw Exception('Range must be within matrix'); + } + final tempList = asFlattenedList + .toList(); // You need `.toList()` here to make sure the list is growable, otherwise `replaceRange` will throw an error + for (var i = startRow; i < endRow; i++) { + tempList.replaceRange( + i * columnCount + startColumn, + i * columnCount + endColumn, + values.elementAt(i).toList(), + ); + } + final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount); + return newMatrix; + } + + Matrix setValues( + int startRow, + int endRow, + int startColumn, + int endColumn, + Iterable values, + ) { + if ((startRow - endRow) * (startColumn - endColumn) != values.length) { + throw Exception('Values must be same length as range'); + } else if (startRow < 0 || + endRow > rowCount || + startColumn < 0 || + endColumn > columnCount) { + throw Exception('Range must be within matrix'); + } + + final tempList = asFlattenedList + .toList(); // You need `.toList()` here to make sure the list is growable, otherwise `replaceRange` will throw an error + var index = 0; + for (var i = startRow; i < endRow; i++) { + for (var j = startColumn; j < endColumn; j++) { + tempList[i * columnCount + j] = values.elementAt(index); + index++; + } + } + final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount); + return newMatrix; + } + + Matrix setValue(int row, int column, double value) { + if (row < 0 || row > rowCount || column < 0 || column > columnCount) { + throw Exception('Index must be within range of matrix'); + } + final tempList = asFlattenedList; + tempList[row * columnCount + column] = value; + final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount); + return newMatrix; + } + + Matrix appendRow(List row) { + final oldNumberOfRows = rowCount; + final oldNumberOfColumns = columnCount; + if (row.length != oldNumberOfColumns) { + throw Exception('Row must have same number of columns as matrix'); + } + final flatListMatrix = asFlattenedList; + flatListMatrix.addAll(row); + return Matrix.fromFlattenedList( + flatListMatrix, + oldNumberOfRows + 1, + oldNumberOfColumns, + ); + } +} + +extension MatrixCalculations on Matrix { + double determinant() { + final int length = rowCount; + if (length != columnCount) { + throw Exception('Matrix must be square'); + } + if (length == 1) { + return this[0][0]; + } else if (length == 2) { + return this[0][0] * this[1][1] - this[0][1] * this[1][0]; + } else { + throw Exception('Determinant for Matrix larger than 2x2 not implemented'); + } + } + + /// Computes the singular value decomposition of a matrix, using https://lucidar.me/en/mathematics/singular-value-decomposition-of-a-2x2-matrix/ as reference, but with slightly different signs for the second columns of U and V + Map svd() { + if (rowCount != 2 || columnCount != 2) { + throw Exception('Matrix must be 2x2'); + } + final a = this[0][0]; + final b = this[0][1]; + final c = this[1][0]; + final d = this[1][1]; + + // Computation of U matrix + final tempCalc = a * a + b * b - c * c - d * d; + final theta = 0.5 * math.atan2(2 * a * c + 2 * b * d, tempCalc); + final U = Matrix.fromList([ + [math.cos(theta), math.sin(theta)], + [math.sin(theta), -math.cos(theta)], + ]); + + // Computation of S matrix + // ignore: non_constant_identifier_names + final S1 = a * a + b * b + c * c + d * d; + // ignore: non_constant_identifier_names + final S2 = + math.sqrt(math.pow(tempCalc, 2) + 4 * math.pow(a * c + b * d, 2)); + final sigma1 = math.sqrt((S1 + S2) / 2); + final sigma2 = math.sqrt((S1 - S2) / 2); + final S = Vector.fromList([sigma1, sigma2]); + + // Computation of V matrix + final tempCalc2 = a * a - b * b + c * c - d * d; + final phi = 0.5 * math.atan2(2 * a * b + 2 * c * d, tempCalc2); + final s11 = (a * math.cos(theta) + c * math.sin(theta)) * math.cos(phi) + + (b * math.cos(theta) + d * math.sin(theta)) * math.sin(phi); + final s22 = (a * math.sin(theta) - c * math.cos(theta)) * math.sin(phi) + + (-b * math.sin(theta) + d * math.cos(theta)) * math.cos(phi); + final V = Matrix.fromList([ + [s11.sign * math.cos(phi), s22.sign * math.sin(phi)], + [s11.sign * math.sin(phi), -s22.sign * math.cos(phi)], + ]); + + return { + 'U': U, + 'S': S, + 'V': V, + }; + } + + int matrixRank() { + final svdResult = svd(); + final Vector S = svdResult['S']!; + final rank = S.toList().where((element) => element > 1e-10).length; + return rank; + } +} + +extension TransformMatrix on Matrix { + List> to2DList() { + final List> outerList = []; + for (var i = 0; i < rowCount; i++) { + final innerList = this[i].toList(); + outerList.add(innerList); + } + return outerList; + } +} diff --git a/mobile/lib/extensions/stop_watch.dart b/mobile/lib/extensions/stop_watch.dart index a381fcbc14..708af081be 100644 --- a/mobile/lib/extensions/stop_watch.dart +++ b/mobile/lib/extensions/stop_watch.dart @@ -23,4 +23,9 @@ class EnteWatch extends Stopwatch { reset(); previousElapsed = 0; } + + void stopWithLog(String msg) { + log(msg); + stop(); + } } diff --git a/mobile/lib/face/db.dart b/mobile/lib/face/db.dart new file mode 100644 index 0000000000..c72b197b46 --- /dev/null +++ b/mobile/lib/face/db.dart @@ -0,0 +1,1012 @@ +import 'dart:async'; +import "dart:math"; + +import "package:collection/collection.dart"; +import "package:flutter/foundation.dart"; +import 'package:logging/logging.dart'; +import 'package:path/path.dart' show join; +import 'package:path_provider/path_provider.dart'; +import "package:photos/extensions/stop_watch.dart"; +import 'package:photos/face/db_fields.dart'; +import "package:photos/face/db_model_mappers.dart"; +import "package:photos/face/model/face.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart"; +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; +import 'package:sqlite_async/sqlite_async.dart'; + +/// Stores all data for the FacesML-related features. The database can be accessed by `FaceMLDataDB.instance.database`. +/// +/// This includes: +/// [facesTable] - Stores all the detected faces and its embeddings in the images. +/// [createFaceClustersTable] - Stores all the mappings from the faces (faceID) to the clusters (clusterID). +/// [clusterPersonTable] - Stores all the clusters that are mapped to a certain person. +/// [clusterSummaryTable] - Stores a summary of each cluster, containg the mean embedding and the number of faces in the cluster. +/// [notPersonFeedback] - Stores the clusters that are confirmed not to belong to a certain person by the user +class FaceMLDataDB { + static final Logger _logger = Logger("FaceMLDataDB"); + + static const _databaseName = "ente.face_ml_db.db"; + // static const _databaseVersion = 1; + + FaceMLDataDB._privateConstructor(); + + static final FaceMLDataDB instance = FaceMLDataDB._privateConstructor(); + + // only have a single app-wide reference to the database + static Future? _sqliteAsyncDBFuture; + + Future get asyncDB async { + _sqliteAsyncDBFuture ??= _initSqliteAsyncDatabase(); + return _sqliteAsyncDBFuture!; + } + + Future _initSqliteAsyncDatabase() async { + final documentsDirectory = await getApplicationDocumentsDirectory(); + final String databaseDirectory = + join(documentsDirectory.path, _databaseName); + _logger.info("Opening sqlite_async access: DB path " + databaseDirectory); + final asyncDBConnection = + SqliteDatabase(path: databaseDirectory, maxReaders: 2); + await _onCreate(asyncDBConnection); + return asyncDBConnection; + } + + Future _onCreate(SqliteDatabase asyncDBConnection) async { + await asyncDBConnection.execute(createFacesTable); + await asyncDBConnection.execute(createFaceClustersTable); + await asyncDBConnection.execute(createClusterPersonTable); + await asyncDBConnection.execute(createClusterSummaryTable); + await asyncDBConnection.execute(createNotPersonFeedbackTable); + await asyncDBConnection.execute(fcClusterIDIndex); + } + + // bulkInsertFaces inserts the faces in the database in batches of 1000. + // This is done to avoid the error "too many SQL variables" when inserting + // a large number of faces. + Future bulkInsertFaces(List faces) async { + final db = await instance.asyncDB; + const batchSize = 500; + final numBatches = (faces.length / batchSize).ceil(); + for (int i = 0; i < numBatches; i++) { + final start = i * batchSize; + final end = min((i + 1) * batchSize, faces.length); + final batch = faces.sublist(start, end); + + const String sql = ''' + INSERT INTO $facesTable ( + $fileIDColumn, $faceIDColumn, $faceDetectionColumn, $faceEmbeddingBlob, $faceScore, $faceBlur, $isSideways, $imageHeight, $imageWidth, $mlVersionColumn + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT($fileIDColumn, $faceIDColumn) DO UPDATE SET $faceIDColumn = excluded.$faceIDColumn, $faceDetectionColumn = excluded.$faceDetectionColumn, $faceEmbeddingBlob = excluded.$faceEmbeddingBlob, $faceScore = excluded.$faceScore, $faceBlur = excluded.$faceBlur, $isSideways = excluded.$isSideways, $imageHeight = excluded.$imageHeight, $imageWidth = excluded.$imageWidth, $mlVersionColumn = excluded.$mlVersionColumn + '''; + final parameterSets = batch.map((face) { + final map = mapRemoteToFaceDB(face); + return [ + map[fileIDColumn], + map[faceIDColumn], + map[faceDetectionColumn], + map[faceEmbeddingBlob], + map[faceScore], + map[faceBlur], + map[isSideways], + map[imageHeight], + map[imageWidth], + map[mlVersionColumn], + ]; + }).toList(); + + await db.executeBatch(sql, parameterSets); + } + } + + Future updateFaceIdToClusterId( + Map faceIDToClusterID, + ) async { + final db = await instance.asyncDB; + const batchSize = 500; + final numBatches = (faceIDToClusterID.length / batchSize).ceil(); + for (int i = 0; i < numBatches; i++) { + final start = i * batchSize; + final end = min((i + 1) * batchSize, faceIDToClusterID.length); + final batch = faceIDToClusterID.entries.toList().sublist(start, end); + + const String sql = ''' + INSERT INTO $faceClustersTable ($fcFaceId, $fcClusterID) + VALUES (?, ?) + ON CONFLICT($fcFaceId) DO UPDATE SET $fcClusterID = excluded.$fcClusterID + '''; + final parameterSets = batch.map((e) => [e.key, e.value]).toList(); + + await db.executeBatch(sql, parameterSets); + } + } + + /// Returns a map of fileID to the indexed ML version + Future> getIndexedFileIds({int? minimumMlVersion}) async { + final db = await instance.asyncDB; + String query = ''' + SELECT $fileIDColumn, $mlVersionColumn + FROM $facesTable + '''; + if (minimumMlVersion != null) { + query += ' WHERE $mlVersionColumn >= $minimumMlVersion'; + } + final List> maps = await db.getAll(query); + final Map result = {}; + for (final map in maps) { + result[map[fileIDColumn] as int] = map[mlVersionColumn] as int; + } + return result; + } + + Future getIndexedFileCount({int? minimumMlVersion}) async { + final db = await instance.asyncDB; + String query = + 'SELECT COUNT(DISTINCT $fileIDColumn) as count FROM $facesTable'; + if (minimumMlVersion != null) { + query += ' WHERE $mlVersionColumn >= $minimumMlVersion'; + } + final List> maps = await db.getAll(query); + return maps.first['count'] as int; + } + + Future> clusterIdToFaceCount() async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $fcClusterID, COUNT(*) as count FROM $faceClustersTable where $fcClusterID IS NOT NULL GROUP BY $fcClusterID ', + ); + final Map result = {}; + for (final map in maps) { + result[map[fcClusterID] as int] = map['count'] as int; + } + return result; + } + + Future> getPersonIgnoredClusters(String personID) async { + final db = await instance.asyncDB; + // find out clusterIds that are assigned to other persons using the clusters table + final List> otherPersonMaps = await db.getAll( + 'SELECT $clusterIDColumn FROM $clusterPersonTable WHERE $personIdColumn != ? AND $personIdColumn IS NOT NULL', + [personID], + ); + final Set ignoredClusterIDs = + otherPersonMaps.map((e) => e[clusterIDColumn] as int).toSet(); + final List> rejectMaps = await db.getAll( + 'SELECT $clusterIDColumn FROM $notPersonFeedback WHERE $personIdColumn = ?', + [personID], + ); + final Set rejectClusterIDs = + rejectMaps.map((e) => e[clusterIDColumn] as int).toSet(); + return ignoredClusterIDs.union(rejectClusterIDs); + } + + Future> getPersonClusterIDs(String personID) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $clusterIDColumn FROM $clusterPersonTable WHERE $personIdColumn = ?', + [personID], + ); + return maps.map((e) => e[clusterIDColumn] as int).toSet(); + } + + Future clearTable() async { + final db = await instance.asyncDB; + + await db.execute(deleteFacesTable); + await db.execute(dropClusterPersonTable); + await db.execute(dropClusterSummaryTable); + await db.execute(deletePersonTable); + await db.execute(dropNotPersonFeedbackTable); + } + + Future> getFaceEmbeddingsForCluster( + int clusterID, { + int? limit, + }) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $faceEmbeddingBlob FROM $facesTable WHERE $faceIDColumn in (SELECT $fcFaceId from $faceClustersTable where $fcClusterID = ?) ${limit != null ? 'LIMIT $limit' : ''}', + [clusterID], + ); + return maps.map((e) => e[faceEmbeddingBlob] as Uint8List); + } + + Future>> getFaceEmbeddingsForClusters( + Iterable clusterIDs, { + int? limit, + }) async { + final db = await instance.asyncDB; + final Map> result = {}; + + final selectQuery = ''' + SELECT fc.$fcClusterID, fe.$faceEmbeddingBlob + FROM $faceClustersTable fc + INNER JOIN $facesTable fe ON fc.$fcFaceId = fe.$faceIDColumn + WHERE fc.$fcClusterID IN (${clusterIDs.join(',')}) + ${limit != null ? 'LIMIT $limit' : ''} + '''; + + final List> maps = await db.getAll(selectQuery); + + for (final map in maps) { + final clusterID = map[fcClusterID] as int; + final faceEmbedding = map[faceEmbeddingBlob] as Uint8List; + result.putIfAbsent(clusterID, () => []).add(faceEmbedding); + } + + return result; + } + + Future getCoverFaceForPerson({ + required int recentFileID, + String? personID, + String? avatarFaceId, + int? clusterID, + }) async { + // read person from db + final db = await instance.asyncDB; + if (personID != null) { + final List fileId = [recentFileID]; + int? avatarFileId; + if (avatarFaceId != null) { + avatarFileId = int.tryParse(avatarFaceId.split('_')[0]); + if (avatarFileId != null) { + fileId.add(avatarFileId); + } + } + const String queryClusterID = ''' + SELECT $clusterIDColumn + FROM $clusterPersonTable + WHERE $personIdColumn = ? + '''; + final clusterRows = await db.getAll( + queryClusterID, + [personID], + ); + final clusterIDs = + clusterRows.map((e) => e[clusterIDColumn] as int).toList(); + final List> faceMaps = await db.getAll( + 'SELECT * FROM $facesTable where ' + '$faceIDColumn in (SELECT $fcFaceId from $faceClustersTable where $fcClusterID IN (${clusterIDs.join(",")}))' + 'AND $fileIDColumn in (${fileId.join(",")}) AND $faceScore > $kMinimumQualityFaceScore ORDER BY $faceScore DESC', + ); + if (faceMaps.isNotEmpty) { + if (avatarFileId != null) { + final row = faceMaps.firstWhereOrNull( + (element) => (element[fileIDColumn] as int) == avatarFileId, + ); + if (row != null) { + return mapRowToFace(row); + } + } + return mapRowToFace(faceMaps.first); + } + } + if (clusterID != null) { + const String queryFaceID = ''' + SELECT $fcFaceId + FROM $faceClustersTable + WHERE $fcClusterID = ? + '''; + final List> faceMaps = await db.getAll( + queryFaceID, + [clusterID], + ); + final List? faces = await getFacesForGivenFileID(recentFileID); + if (faces != null) { + for (final face in faces) { + if (faceMaps + .any((element) => (element[fcFaceId] as String) == face.faceID)) { + return face; + } + } + } + } + if (personID == null && clusterID == null) { + throw Exception("personID and clusterID cannot be null"); + } + return null; + } + + Future?> getFacesForGivenFileID(int fileUploadID) async { + final db = await instance.asyncDB; + const String query = ''' + SELECT * FROM $facesTable + WHERE $fileIDColumn = ? + '''; + final List> maps = await db.getAll( + query, + [fileUploadID], + ); + if (maps.isEmpty) { + return null; + } + return maps.map((e) => mapRowToFace(e)).toList(); + } + + Future getFaceForFaceID(String faceID) async { + final db = await instance.asyncDB; + final result = await db.getAll( + 'SELECT * FROM $facesTable where $faceIDColumn = ?', + [faceID], + ); + if (result.isEmpty) { + return null; + } + return mapRowToFace(result.first); + } + + Future>> getClusterToFaceIDs( + Set clusterIDs, + ) async { + final db = await instance.asyncDB; + final Map> result = {}; + final List> maps = await db.getAll( + 'SELECT $fcClusterID, $fcFaceId FROM $faceClustersTable WHERE $fcClusterID IN (${clusterIDs.join(",")})', + ); + for (final map in maps) { + final clusterID = map[fcClusterID] as int; + final faceID = map[fcFaceId] as String; + result.putIfAbsent(clusterID, () => []).add(faceID); + } + return result; + } + + Future getClusterIDForFaceID(String faceID) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $fcClusterID FROM $faceClustersTable WHERE $fcFaceId = ?', + [faceID], + ); + if (maps.isEmpty) { + return null; + } + return maps.first[fcClusterID] as int; + } + + Future>> getAllClusterIdToFaceIDs() async { + final db = await instance.asyncDB; + final Map> result = {}; + final List> maps = await db.getAll( + 'SELECT $fcClusterID, $fcFaceId FROM $faceClustersTable', + ); + for (final map in maps) { + final clusterID = map[fcClusterID] as int; + final faceID = map[fcFaceId] as String; + result.putIfAbsent(clusterID, () => []).add(faceID); + } + return result; + } + + Future> getFaceIDsForCluster(int clusterID) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $fcFaceId FROM $faceClustersTable ' + 'WHERE $faceClustersTable.$fcClusterID = ?', + [clusterID], + ); + return maps.map((e) => e[fcFaceId] as String).toSet(); + } + + // Get Map of personID to Map of clusterID to faceIDs + Future>>> + getPersonToClusterIdToFaceIds() async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $personIdColumn, $faceClustersTable.$fcClusterID, $fcFaceId FROM $clusterPersonTable ' + 'LEFT JOIN $faceClustersTable ON $clusterPersonTable.$clusterIDColumn = $faceClustersTable.$fcClusterID', + ); + final Map>> result = {}; + for (final map in maps) { + final personID = map[personIdColumn] as String; + final clusterID = map[fcClusterID] as int; + final faceID = map[fcFaceId] as String; + result.putIfAbsent(personID, () => {}).putIfAbsent(clusterID, () => {}) + .add(faceID); + } + return result; + } + + Future> getFaceIDsForPerson(String personID) async { + final db = await instance.asyncDB; + final faceIdsResult = await db.getAll( + 'SELECT $fcFaceId FROM $faceClustersTable LEFT JOIN $clusterPersonTable ' + 'ON $faceClustersTable.$fcClusterID = $clusterPersonTable.$clusterIDColumn ' + 'WHERE $clusterPersonTable.$personIdColumn = ?', + [personID], + ); + return faceIdsResult.map((e) => e[fcFaceId] as String).toSet(); + } + + Future> getBlurValuesForCluster(int clusterID) async { + final db = await instance.asyncDB; + const String query = ''' + SELECT $facesTable.$faceBlur + FROM $facesTable + JOIN $faceClustersTable ON $facesTable.$faceIDColumn = $faceClustersTable.$fcFaceId + WHERE $faceClustersTable.$fcClusterID = ? + '''; + // const String query2 = ''' + // SELECT $faceBlur + // FROM $facesTable + // WHERE $faceIDColumn IN (SELECT $fcFaceId FROM $faceClustersTable WHERE $fcClusterID = ?) + // '''; + final List> maps = await db.getAll( + query, + [clusterID], + ); + return maps.map((e) => e[faceBlur] as double).toSet(); + } + + Future> getFaceIDsToBlurValues( + int maxBlurValue, + ) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $faceIDColumn, $faceBlur FROM $facesTable WHERE $faceBlur < $maxBlurValue AND $faceBlur > 1 ORDER BY $faceBlur ASC', + ); + final Map result = {}; + for (final map in maps) { + result[map[faceIDColumn] as String] = map[faceBlur] as double; + } + return result; + } + + Future> getFaceIdsToClusterIds( + Iterable faceIds, + ) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $fcFaceId, $fcClusterID FROM $faceClustersTable where $fcFaceId IN (${faceIds.map((id) => "'$id'").join(",")})', + ); + final Map result = {}; + for (final map in maps) { + result[map[fcFaceId] as String] = map[fcClusterID] as int?; + } + return result; + } + + Future>> getFileIdToClusterIds() async { + final Map> result = {}; + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $fcClusterID, $fcFaceId FROM $faceClustersTable', + ); + + for (final map in maps) { + final clusterID = map[fcClusterID] as int; + final faceID = map[fcFaceId] as String; + final x = faceID.split('_').first; + final fileID = int.parse(x); + result[fileID] = (result[fileID] ?? {})..add(clusterID); + } + return result; + } + + Future forceUpdateClusterIds( + Map faceIDToClusterID, + ) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $faceClustersTable ($fcFaceId, $fcClusterID) + VALUES (?, ?) + ON CONFLICT($fcFaceId) DO UPDATE SET $fcClusterID = excluded.$fcClusterID + '''; + final parameterSets = + faceIDToClusterID.entries.map((e) => [e.key, e.value]).toList(); + await db.executeBatch(sql, parameterSets); + } + + Future removePerson(String personID) async { + final db = await instance.asyncDB; + + await db.writeTransaction((tx) async { + await tx.execute( + 'DELETE FROM $clusterPersonTable WHERE $personIdColumn = ?', + [personID], + ); + await tx.execute( + 'DELETE FROM $notPersonFeedback WHERE $personIdColumn = ?', + [personID], + ); + }); + } + + Future> getFaceInfoForClustering({ + double minScore = kMinimumQualityFaceScore, + int minClarity = kLaplacianHardThreshold, + int maxFaces = 20000, + int offset = 0, + int batchSize = 10000, + }) async { + try { + final EnteWatch w = EnteWatch("getFaceEmbeddingMap")..start(); + w.logAndReset( + 'reading as float offset: $offset, maxFaces: $maxFaces, batchSize: $batchSize', + ); + final db = await instance.asyncDB; + + final List result = []; + while (true) { + // Query a batch of rows + final List> maps = await db.getAll( + 'SELECT $faceIDColumn, $faceEmbeddingBlob, $faceScore, $faceBlur, $isSideways FROM $facesTable' + ' WHERE $faceScore > $minScore AND $faceBlur > $minClarity' + ' ORDER BY $faceIDColumn' + ' DESC LIMIT $batchSize OFFSET $offset', + ); + // Break the loop if no more rows + if (maps.isEmpty) { + break; + } + final List faceIds = []; + for (final map in maps) { + faceIds.add(map[faceIDColumn] as String); + } + final faceIdToClusterId = await getFaceIdsToClusterIds(faceIds); + for (final map in maps) { + final faceID = map[faceIDColumn] as String; + final faceInfo = FaceInfoForClustering( + faceID: faceID, + clusterId: faceIdToClusterId[faceID], + embeddingBytes: map[faceEmbeddingBlob] as Uint8List, + faceScore: map[faceScore] as double, + blurValue: map[faceBlur] as double, + isSideways: (map[isSideways] as int) == 1, + ); + result.add(faceInfo); + } + if (result.length >= maxFaces) { + break; + } + offset += batchSize; + } + w.stopWithLog('done reading face embeddings ${result.length}'); + return result; + } catch (e) { + _logger.severe('err in getFaceInfoForClustering', e); + rethrow; + } + } + + Future> getFaceEmbeddingMapForFile( + List fileIDs, + ) async { + _logger.info('reading face embeddings for ${fileIDs.length} files'); + final db = await instance.asyncDB; + + // Define the batch size + const batchSize = 10000; + int offset = 0; + + final Map result = {}; + while (true) { + // Query a batch of rows + + final List> maps = await db.getAll(''' + SELECT $faceIDColumn, $faceEmbeddingBlob + FROM $facesTable + WHERE $faceScore > $kMinimumQualityFaceScore AND $faceBlur > $kLaplacianHardThreshold AND $fileIDColumn IN (${fileIDs.join(",")}) + ORDER BY $faceIDColumn DESC + LIMIT $batchSize OFFSET $offset + '''); + // final List> maps = await db.query( + // facesTable, + // columns: [faceIDColumn, faceEmbeddingBlob], + // where: + // '$faceScore > $kMinimumQualityFaceScore AND $faceBlur > $kLaplacianHardThreshold AND $fileIDColumn IN (${fileIDs.join(",")})', + // limit: batchSize, + // offset: offset, + // orderBy: '$faceIDColumn DESC', + // ); + // Break the loop if no more rows + if (maps.isEmpty) { + break; + } + for (final map in maps) { + final faceID = map[faceIDColumn] as String; + result[faceID] = map[faceEmbeddingBlob] as Uint8List; + } + if (result.length > 10000) { + break; + } + offset += batchSize; + } + _logger.info('done reading face embeddings for ${fileIDs.length} files'); + return result; + } + + Future> getFaceEmbeddingMapForFaces( + Iterable faceIDs, + ) async { + _logger.info('reading face embeddings for ${faceIDs.length} faces'); + final db = await instance.asyncDB; + + // Define the batch size + const batchSize = 10000; + int offset = 0; + + final Map result = {}; + while (true) { + // Query a batch of rows + final String query = ''' + SELECT $faceIDColumn, $faceEmbeddingBlob + FROM $facesTable + WHERE $faceIDColumn IN (${faceIDs.map((id) => "'$id'").join(",")}) + ORDER BY $faceIDColumn DESC + LIMIT $batchSize OFFSET $offset + '''; + final List> maps = await db.getAll(query); + // Break the loop if no more rows + if (maps.isEmpty) { + break; + } + for (final map in maps) { + final faceID = map[faceIDColumn] as String; + result[faceID] = map[faceEmbeddingBlob] as Uint8List; + } + if (result.length > 10000) { + break; + } + offset += batchSize; + } + _logger.info('done reading face embeddings for ${faceIDs.length} faces'); + return result; + } + + Future getTotalFaceCount({ + double minFaceScore = kMinimumQualityFaceScore, + }) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT COUNT(*) as count FROM $facesTable WHERE $faceScore > $minFaceScore AND $faceBlur > $kLaplacianHardThreshold', + ); + return maps.first['count'] as int; + } + + Future getClusteredFaceCount() async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT COUNT(DISTINCT $fcFaceId) as count FROM $faceClustersTable', + ); + return maps.first['count'] as int; + } + + Future getClusteredToTotalFacesRatio() async { + final int totalFaces = await getTotalFaceCount(); + final int clusteredFaces = await getClusteredFaceCount(); + + return clusteredFaces / totalFaces; + } + + Future getBlurryFaceCount([ + int blurThreshold = kLaplacianHardThreshold, + ]) async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT COUNT(*) as count FROM $facesTable WHERE $faceBlur <= $blurThreshold AND $faceScore > $kMinimumQualityFaceScore', + ); + return maps.first['count'] as int; + } + + /// WARNING: This method does not drop the persons and other feedback. Consider using [dropClustersAndPersonTable] instead. + Future resetClusterIDs() async { + try { + final db = await instance.asyncDB; + + await db.execute(dropFaceClustersTable); + await db.execute(createFaceClustersTable); + await db.execute(fcClusterIDIndex); + } catch (e, s) { + _logger.severe('Error resetting clusterIDs', e, s); + } + } + + Future assignClusterToPerson({ + required String personID, + required int clusterID, + }) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $clusterPersonTable ($personIdColumn, $clusterIDColumn) VALUES (?, ?) ON CONFLICT($personIdColumn, $clusterIDColumn) DO NOTHING + '''; + await db.execute(sql, [personID, clusterID]); + } + + Future bulkAssignClusterToPersonID( + Map clusterToPersonID, + ) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $clusterPersonTable ($personIdColumn, $clusterIDColumn) VALUES (?, ?) ON CONFLICT($personIdColumn, $clusterIDColumn) DO NOTHING + '''; + final parameterSets = + clusterToPersonID.entries.map((e) => [e.value, e.key]).toList(); + await db.executeBatch(sql, parameterSets); + // final batch = db.batch(); + // for (final entry in clusterToPersonID.entries) { + // final clusterID = entry.key; + // final personID = entry.value; + // batch.insert( + // clusterPersonTable, + // { + // personIdColumn: personID, + // clusterIDColumn: clusterID, + // }, + // conflictAlgorithm: ConflictAlgorithm.replace, + // ); + // } + // await batch.commit(noResult: true); + } + + Future captureNotPersonFeedback({ + required String personID, + required int clusterID, + }) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $notPersonFeedback ($personIdColumn, $clusterIDColumn) VALUES (?, ?) ON CONFLICT DO NOTHING + '''; + await db.execute(sql, [personID, clusterID]); + } + + Future bulkCaptureNotPersonFeedback( + Map clusterToPersonID, + ) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $notPersonFeedback ($personIdColumn, $clusterIDColumn) VALUES (?, ?) ON CONFLICT DO NOTHING + '''; + final parameterSets = + clusterToPersonID.entries.map((e) => [e.value, e.key]).toList(); + + await db.executeBatch(sql, parameterSets); + } + + Future removeClusterToPerson({ + required String personID, + required int clusterID, + }) async { + final db = await instance.asyncDB; + + const String sql = ''' + DELETE FROM $clusterPersonTable WHERE $personIdColumn = ? AND $clusterIDColumn = ? + '''; + await db.execute(sql, [personID, clusterID]); + } + + // for a given personID, return a map of clusterID to fileIDs using join query + Future>> getFileIdToClusterIDSet(String personID) { + final db = instance.asyncDB; + return db.then((db) async { + final List> maps = await db.getAll( + 'SELECT $faceClustersTable.$fcClusterID, $fcFaceId FROM $faceClustersTable ' + 'INNER JOIN $clusterPersonTable ' + 'ON $faceClustersTable.$fcClusterID = $clusterPersonTable.$clusterIDColumn ' + 'WHERE $clusterPersonTable.$personIdColumn = ?', + [personID], + ); + final Map> result = {}; + for (final map in maps) { + final clusterID = map[clusterIDColumn] as int; + final String faceID = map[fcFaceId] as String; + final fileID = int.parse(faceID.split('_').first); + result[fileID] = (result[fileID] ?? {})..add(clusterID); + } + return result; + }); + } + + Future>> getFileIdToClusterIDSetForCluster( + Set clusterIDs, + ) { + final db = instance.asyncDB; + return db.then((db) async { + final List> maps = await db.getAll( + 'SELECT $fcClusterID, $fcFaceId FROM $faceClustersTable ' + 'WHERE $fcClusterID IN (${clusterIDs.join(",")})', + ); + final Map> result = {}; + for (final map in maps) { + final clusterID = map[fcClusterID] as int; + final faceId = map[fcFaceId] as String; + final fileID = int.parse(faceId.split("_").first); + result[fileID] = (result[fileID] ?? {})..add(clusterID); + } + return result; + }); + } + + Future clusterSummaryUpdate(Map summary) async { + final db = await instance.asyncDB; + + const String sql = ''' + INSERT INTO $clusterSummaryTable ($clusterIDColumn, $avgColumn, $countColumn) VALUES (?, ?, ?) ON CONFLICT($clusterIDColumn) DO UPDATE SET $avgColumn = excluded.$avgColumn, $countColumn = excluded.$countColumn + '''; + final List> parameterSets = []; + int batchCounter = 0; + for (final entry in summary.entries) { + if (batchCounter == 400) { + await db.executeBatch(sql, parameterSets); + batchCounter = 0; + parameterSets.clear(); + } + final int clusterID = entry.key; + final int count = entry.value.$2; + final Uint8List avg = entry.value.$1; + parameterSets.add([clusterID, avg, count]); + batchCounter++; + } + await db.executeBatch(sql, parameterSets); + } + + Future deleteClusterSummary(int clusterID) async { + final db = await instance.asyncDB; + const String sqlDelete = + 'DELETE FROM $clusterSummaryTable WHERE $clusterIDColumn = ?'; + await db.execute(sqlDelete, [clusterID]); + } + + /// Returns a map of clusterID to (avg embedding, count) + Future> getAllClusterSummary([ + int? minClusterSize, + ]) async { + final db = await instance.asyncDB; + final Map result = {}; + final rows = await db.getAll( + 'SELECT * FROM $clusterSummaryTable${minClusterSize != null ? ' WHERE $countColumn >= $minClusterSize' : ''}', + ); + for (final r in rows) { + final id = r[clusterIDColumn] as int; + final avg = r[avgColumn] as Uint8List; + final count = r[countColumn] as int; + result[id] = (avg, count); + } + return result; + } + + Future> getClusterToClusterSummary( + Iterable clusterIDs, + ) async { + final db = await instance.asyncDB; + final Map result = {}; + final rows = await db.getAll( + 'SELECT * FROM $clusterSummaryTable WHERE $clusterIDColumn IN (${clusterIDs.join(",")})', + ); + for (final r in rows) { + final id = r[clusterIDColumn] as int; + final avg = r[avgColumn] as Uint8List; + final count = r[countColumn] as int; + result[id] = (avg, count); + } + return result; + } + + Future> getClusterIDToPersonID() async { + final db = await instance.asyncDB; + final List> maps = await db.getAll( + 'SELECT $personIdColumn, $clusterIDColumn FROM $clusterPersonTable', + ); + final Map result = {}; + for (final map in maps) { + result[map[clusterIDColumn] as int] = map[personIdColumn] as String; + } + return result; + } + + /// WARNING: This will delete ALL data in the database! Only use this for debug/testing purposes! + Future dropClustersAndPersonTable({bool faces = false}) async { + try { + final db = await instance.asyncDB; + if (faces) { + await db.execute(deleteFacesTable); + await db.execute(createFacesTable); + await db.execute(dropFaceClustersTable); + await db.execute(createFaceClustersTable); + await db.execute(fcClusterIDIndex); + } + + await db.execute(deletePersonTable); + await db.execute(dropClusterPersonTable); + await db.execute(dropNotPersonFeedbackTable); + await db.execute(dropClusterSummaryTable); + await db.execute(dropFaceClustersTable); + + await db.execute(createClusterPersonTable); + await db.execute(createNotPersonFeedbackTable); + await db.execute(createClusterSummaryTable); + await db.execute(createFaceClustersTable); + await db.execute(fcClusterIDIndex); + } catch (e, s) { + _logger.severe('Error dropping clusters and person table', e, s); + } + } + + /// WARNING: This will delete ALL data in the tables! Only use this for debug/testing purposes! + Future dropFeedbackTables() async { + try { + final db = await instance.asyncDB; + + // Drop the tables + await db.execute(deletePersonTable); + await db.execute(dropClusterPersonTable); + await db.execute(dropNotPersonFeedbackTable); + + // Recreate the tables + await db.execute(createClusterPersonTable); + await db.execute(createNotPersonFeedbackTable); + } catch (e) { + _logger.severe('Error dropping feedback tables', e); + } + } + + Future removeFilesFromPerson( + List files, + String personID, + ) async { + final db = await instance.asyncDB; + final faceIdsResult = await db.getAll( + 'SELECT $fcFaceId FROM $faceClustersTable LEFT JOIN $clusterPersonTable ' + 'ON $faceClustersTable.$fcClusterID = $clusterPersonTable.$clusterIDColumn ' + 'WHERE $clusterPersonTable.$personIdColumn = ?', + [personID], + ); + final Set fileIds = {}; + for (final enteFile in files) { + fileIds.add(enteFile.uploadedFileID.toString()); + } + int maxClusterID = DateTime.now().microsecondsSinceEpoch; + final Map faceIDToClusterID = {}; + for (final row in faceIdsResult) { + final faceID = row[fcFaceId] as String; + if (fileIds.contains(faceID.split('_').first)) { + maxClusterID += 1; + faceIDToClusterID[faceID] = maxClusterID; + } + } + await forceUpdateClusterIds(faceIDToClusterID); + } + + Future removeFilesFromCluster( + List files, + int clusterID, + ) async { + final db = await instance.asyncDB; + final faceIdsResult = await db.getAll( + 'SELECT $fcFaceId FROM $faceClustersTable ' + 'WHERE $faceClustersTable.$fcClusterID = ?', + [clusterID], + ); + final Set fileIds = {}; + for (final enteFile in files) { + fileIds.add(enteFile.uploadedFileID.toString()); + } + int maxClusterID = DateTime.now().microsecondsSinceEpoch; + final Map faceIDToClusterID = {}; + for (final row in faceIdsResult) { + final faceID = row[fcFaceId] as String; + if (fileIds.contains(faceID.split('_').first)) { + maxClusterID += 1; + faceIDToClusterID[faceID] = maxClusterID; + } + } + await forceUpdateClusterIds(faceIDToClusterID); + } + + Future addFacesToCluster( + List faceIDs, + int clusterID, + ) async { + final faceIDToClusterID = {}; + for (final faceID in faceIDs) { + faceIDToClusterID[faceID] = clusterID; + } + + await forceUpdateClusterIds(faceIDToClusterID); + } +} diff --git a/mobile/lib/face/db_fields.dart b/mobile/lib/face/db_fields.dart new file mode 100644 index 0000000000..e6a70a7d4e --- /dev/null +++ b/mobile/lib/face/db_fields.dart @@ -0,0 +1,103 @@ +// Faces Table Fields & Schema Queries +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; + +const facesTable = 'faces'; +const fileIDColumn = 'file_id'; +const faceIDColumn = 'face_id'; +const faceDetectionColumn = 'detection'; +const faceEmbeddingBlob = 'eBlob'; +const faceScore = 'score'; +const faceBlur = 'blur'; +const isSideways = 'is_sideways'; +const imageWidth = 'width'; +const imageHeight = 'height'; +const faceClusterId = 'cluster_id'; +const mlVersionColumn = 'ml_version'; + +const createFacesTable = '''CREATE TABLE IF NOT EXISTS $facesTable ( + $fileIDColumn INTEGER NOT NULL, + $faceIDColumn TEXT NOT NULL UNIQUE, + $faceDetectionColumn TEXT NOT NULL, + $faceEmbeddingBlob BLOB NOT NULL, + $faceScore REAL NOT NULL, + $faceBlur REAL NOT NULL DEFAULT $kLapacianDefault, + $isSideways INTEGER NOT NULL DEFAULT 0, + $imageHeight INTEGER NOT NULL DEFAULT 0, + $imageWidth INTEGER NOT NULL DEFAULT 0, + $mlVersionColumn INTEGER NOT NULL DEFAULT -1, + PRIMARY KEY($fileIDColumn, $faceIDColumn) + ); + '''; + +const deleteFacesTable = 'DROP TABLE IF EXISTS $facesTable'; +// End of Faces Table Fields & Schema Queries + +//##region Face Clusters Table Fields & Schema Queries +const faceClustersTable = 'face_clusters'; +const fcClusterID = 'cluster_id'; +const fcFaceId = 'face_id'; + +// fcClusterId & fcFaceId are the primary keys and fcClusterId is a foreign key to faces table +const createFaceClustersTable = ''' +CREATE TABLE IF NOT EXISTS $faceClustersTable ( + $fcFaceId TEXT NOT NULL, + $fcClusterID INTEGER NOT NULL, + PRIMARY KEY($fcFaceId) +); +'''; +// -- Creating a non-unique index on clusterID for query optimization +const fcClusterIDIndex = + '''CREATE INDEX IF NOT EXISTS idx_fcClusterID ON $faceClustersTable($fcClusterID);'''; +const dropFaceClustersTable = 'DROP TABLE IF EXISTS $faceClustersTable'; +//##endregion + +// People Table Fields & Schema Queries +const personTable = 'person'; + +const deletePersonTable = 'DROP TABLE IF EXISTS $personTable'; +//End People Table Fields & Schema Queries + +// Clusters Table Fields & Schema Queries +const clusterPersonTable = 'cluster_person'; +const personIdColumn = 'person_id'; +const clusterIDColumn = 'cluster_id'; + +const createClusterPersonTable = ''' +CREATE TABLE IF NOT EXISTS $clusterPersonTable ( + $personIdColumn TEXT NOT NULL, + $clusterIDColumn INTEGER NOT NULL, + PRIMARY KEY($personIdColumn, $clusterIDColumn) +); +'''; +const dropClusterPersonTable = 'DROP TABLE IF EXISTS $clusterPersonTable'; +// End Clusters Table Fields & Schema Queries + +/// Cluster Summary Table Fields & Schema Queries +const clusterSummaryTable = 'cluster_summary'; +const avgColumn = 'avg'; +const countColumn = 'count'; +const createClusterSummaryTable = ''' +CREATE TABLE IF NOT EXISTS $clusterSummaryTable ( + $clusterIDColumn INTEGER NOT NULL, + $avgColumn BLOB NOT NULL, + $countColumn INTEGER NOT NULL, + PRIMARY KEY($clusterIDColumn) +); +'''; + +const dropClusterSummaryTable = 'DROP TABLE IF EXISTS $clusterSummaryTable'; + +/// End Cluster Summary Table Fields & Schema Queries + +/// notPersonFeedback Table Fields & Schema Queries +const notPersonFeedback = 'not_person_feedback'; + +const createNotPersonFeedbackTable = ''' +CREATE TABLE IF NOT EXISTS $notPersonFeedback ( + $personIdColumn TEXT NOT NULL, + $clusterIDColumn INTEGER NOT NULL, + PRIMARY KEY($personIdColumn, $clusterIDColumn) +); +'''; +const dropNotPersonFeedbackTable = 'DROP TABLE IF EXISTS $notPersonFeedback'; +// End Clusters Table Fields & Schema Queries diff --git a/mobile/lib/face/db_model_mappers.dart b/mobile/lib/face/db_model_mappers.dart new file mode 100644 index 0000000000..70dc779157 --- /dev/null +++ b/mobile/lib/face/db_model_mappers.dart @@ -0,0 +1,57 @@ +import "dart:convert"; + +import 'package:photos/face/db_fields.dart'; +import "package:photos/face/model/detection.dart"; +import "package:photos/face/model/face.dart"; +import "package:photos/generated/protos/ente/common/vector.pb.dart"; +import "package:photos/models/ml/ml_versions.dart"; + +int boolToSQLInt(bool? value, {bool defaultValue = false}) { + final bool v = value ?? defaultValue; + if (v == false) { + return 0; + } else { + return 1; + } +} + +bool sqlIntToBool(int? value, {bool defaultValue = false}) { + final int v = value ?? (defaultValue ? 1 : 0); + if (v == 0) { + return false; + } else { + return true; + } +} + +Map mapRemoteToFaceDB(Face face) { + return { + faceIDColumn: face.faceID, + fileIDColumn: face.fileID, + faceDetectionColumn: json.encode(face.detection.toJson()), + faceEmbeddingBlob: EVector( + values: face.embedding, + ).writeToBuffer(), + faceScore: face.score, + faceBlur: face.blur, + isSideways: face.detection.faceIsSideways() ? 1 : 0, + mlVersionColumn: faceMlVersion, + imageWidth: face.fileInfo?.imageWidth ?? 0, + imageHeight: face.fileInfo?.imageHeight ?? 0, + }; +} + +Face mapRowToFace(Map row) { + return Face( + row[faceIDColumn] as String, + row[fileIDColumn] as int, + EVector.fromBuffer(row[faceEmbeddingBlob] as List).values, + row[faceScore] as double, + Detection.fromJson(json.decode(row[faceDetectionColumn] as String)), + row[faceBlur] as double, + fileInfo: FileInfo( + imageWidth: row[imageWidth] as int, + imageHeight: row[imageHeight] as int, + ), + ); +} diff --git a/mobile/lib/face/model/box.dart b/mobile/lib/face/model/box.dart new file mode 100644 index 0000000000..3c5be3f9f3 --- /dev/null +++ b/mobile/lib/face/model/box.dart @@ -0,0 +1,35 @@ +/// Bounding box of a face. +/// +/// [ x] and [y] are the minimum coordinates, so the top left corner of the box. +/// [width] and [height] are the width and height of the box. +/// +/// WARNING: All values are relative to the original image size, so in the range [0, 1]. +class FaceBox { + final double x; + final double y; + final double width; + final double height; + + FaceBox({ + required this.x, + required this.y, + required this.width, + required this.height, + }); + + factory FaceBox.fromJson(Map json) { + return FaceBox( + x: (json['x'] as double?) ?? (json['xMin'] as double), + y: (json['y'] as double?) ?? (json['yMin'] as double), + width: json['width'] as double, + height: json['height'] as double, + ); + } + + Map toJson() => { + 'x': x, + 'y': y, + 'width': width, + 'height': height, + }; +} diff --git a/mobile/lib/face/model/detection.dart b/mobile/lib/face/model/detection.dart new file mode 100644 index 0000000000..44329196a4 --- /dev/null +++ b/mobile/lib/face/model/detection.dart @@ -0,0 +1,120 @@ +import "dart:math" show min, max; + +import "package:photos/face/model/box.dart"; +import "package:photos/face/model/landmark.dart"; +import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart"; + +/// Stores the face detection data, notably the bounding box and landmarks. +/// +/// - Bounding box: [FaceBox] with x, y (minimum, so top left corner), width, height +/// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth +/// +/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]! +class Detection { + FaceBox box; + List landmarks; + + Detection({ + required this.box, + required this.landmarks, + }); + + bool get isEmpty => box.width == 0 && box.height == 0 && landmarks.isEmpty; + + // empty box + Detection.empty() + : box = FaceBox( + x: 0, + y: 0, + width: 0, + height: 0, + ), + landmarks = []; + + Map toJson() => { + 'box': box.toJson(), + 'landmarks': landmarks.map((x) => x.toJson()).toList(), + }; + + factory Detection.fromJson(Map json) { + return Detection( + box: FaceBox.fromJson(json['box'] as Map), + landmarks: List.from( + json['landmarks'] + .map((x) => Landmark.fromJson(x as Map)), + ), + ); + } + + int getFaceArea(int imageWidth, int imageHeight) { + return (box.width * imageWidth * box.height * imageHeight).toInt(); + } + + FaceDirection getFaceDirection() { + if (isEmpty) { + return FaceDirection.straight; + } + final leftEye = [landmarks[0].x, landmarks[0].y]; + final rightEye = [landmarks[1].x, landmarks[1].y]; + final nose = [landmarks[2].x, landmarks[2].y]; + final leftMouth = [landmarks[3].x, landmarks[3].y]; + final rightMouth = [landmarks[4].x, landmarks[4].y]; + + final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs(); + final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs(); + final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs(); + + final bool faceIsUpright = + (max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) && + (nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1])); + + final bool noseStickingOutLeft = (nose[0] < min(leftEye[0], rightEye[0])) && + (nose[0] < min(leftMouth[0], rightMouth[0])); + final bool noseStickingOutRight = + (nose[0] > max(leftEye[0], rightEye[0])) && + (nose[0] > max(leftMouth[0], rightMouth[0])); + + final bool noseCloseToLeftEye = + (nose[0] - leftEye[0]).abs() < 0.2 * eyeDistanceX; + final bool noseCloseToRightEye = + (nose[0] - rightEye[0]).abs() < 0.2 * eyeDistanceX; + + // if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) { + if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { + return FaceDirection.left; + // } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) { + } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { + return FaceDirection.right; + } + + return FaceDirection.straight; + } + + bool faceIsSideways() { + if (isEmpty) { + return false; + } + final leftEye = [landmarks[0].x, landmarks[0].y]; + final rightEye = [landmarks[1].x, landmarks[1].y]; + final nose = [landmarks[2].x, landmarks[2].y]; + final leftMouth = [landmarks[3].x, landmarks[3].y]; + final rightMouth = [landmarks[4].x, landmarks[4].y]; + + final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs(); + final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs(); + final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs(); + + final bool faceIsUpright = + (max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) && + (nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1])); + + final bool noseStickingOutLeft = + (nose[0] < min(leftEye[0], rightEye[0]) - 0.5 * eyeDistanceX) && + (nose[0] < min(leftMouth[0], rightMouth[0])); + final bool noseStickingOutRight = + (nose[0] > max(leftEye[0], rightEye[0]) + 0.5 * eyeDistanceX) && + (nose[0] > max(leftMouth[0], rightMouth[0])); + + return faceIsUpright && (noseStickingOutLeft || noseStickingOutRight); + } +} diff --git a/mobile/lib/face/model/dimension.dart b/mobile/lib/face/model/dimension.dart new file mode 100644 index 0000000000..d4ae7a3bc0 --- /dev/null +++ b/mobile/lib/face/model/dimension.dart @@ -0,0 +1,25 @@ +class Dimensions { + final int width; + final int height; + + const Dimensions({required this.width, required this.height}); + + @override + String toString() { + return 'Dimensions(width: $width, height: $height})'; + } + + Map toJson() { + return { + 'width': width, + 'height': height, + }; + } + + factory Dimensions.fromJson(Map json) { + return Dimensions( + width: json['width'] as int, + height: json['height'] as int, + ); + } +} diff --git a/mobile/lib/face/model/face.dart b/mobile/lib/face/model/face.dart new file mode 100644 index 0000000000..c215389491 --- /dev/null +++ b/mobile/lib/face/model/face.dart @@ -0,0 +1,85 @@ +import "package:photos/face/model/detection.dart"; +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; +import "package:photos/services/machine_learning/face_ml/face_ml_result.dart"; + +// FileInfo contains the image width and height of the image the face was detected in. +class FileInfo { + int? imageWidth; + int? imageHeight; + FileInfo({ + this.imageWidth, + this.imageHeight, + }); +} + +class Face { + final String faceID; + final List embedding; + Detection detection; + final double score; + final double blur; + + ///#region Local DB fields + // This is not stored on the server, using it for local DB row + FileInfo? fileInfo; + final int fileID; + ///#endregion + + bool get isBlurry => blur < kLaplacianHardThreshold; + + bool get hasHighScore => score > kMinimumQualityFaceScore; + + bool get isHighQuality => (!isBlurry) && hasHighScore; + + int area({int? w, int? h}) { + return detection.getFaceArea( + fileInfo?.imageWidth ?? w ?? 0, + fileInfo?.imageHeight ?? h ?? 0, + ); + } + + Face( + this.faceID, + this.fileID, + this.embedding, + this.score, + this.detection, + this.blur, { + this.fileInfo, + }); + + factory Face.empty(int fileID, {bool error = false}) { + return Face( + "$fileID-0", + fileID, + [], + error ? -1.0 : 0.0, + Detection.empty(), + 0.0, + ); + } + + factory Face.fromJson(Map json) { + final String faceID = json['faceID'] as String; + final int fileID = getFileIdFromFaceId(faceID); + return Face( + faceID, + fileID, + List.from((json['embedding'] ?? json['embeddings']) as List), + json['score'] as double, + Detection.fromJson(json['detection'] as Map), + // high value means t + (json['blur'] ?? kLapacianDefault) as double, + ); + } + + // Note: Keep the information in toJson minimum. Keep in sync with desktop. + // Derive fields like fileID from other values whenever possible + Map toJson() => { + 'faceID': faceID, + 'embedding': embedding, + 'detection': detection.toJson(), + 'score': score, + 'blur': blur, + }; +} diff --git a/mobile/lib/face/model/landmark.dart b/mobile/lib/face/model/landmark.dart new file mode 100644 index 0000000000..320afbabd1 --- /dev/null +++ b/mobile/lib/face/model/landmark.dart @@ -0,0 +1,33 @@ +/// Landmark coordinate data. +/// +/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]! +class Landmark { + double x; + double y; + + Landmark({ + required this.x, + required this.y, + }); + + Map toJson() => { + 'x': x, + 'y': y, + }; + + factory Landmark.fromJson(Map json) { + return Landmark( + x: (json['x'] is int + ? (json['x'] as int).toDouble() + : json['x'] as double), + y: (json['y'] is int + ? (json['y'] as int).toDouble() + : json['y'] as double), + ); + } + + @override + toString() { + return '(x: ${x.toStringAsFixed(4)}, y: ${y.toStringAsFixed(4)})'; + } +} diff --git a/mobile/lib/face/model/person.dart b/mobile/lib/face/model/person.dart new file mode 100644 index 0000000000..cedec7a0dc --- /dev/null +++ b/mobile/lib/face/model/person.dart @@ -0,0 +1,139 @@ +// PersonEntity represents information about a Person in the context of FaceClustering that is stored. +// On the remote server, the PersonEntity is stored as {Entity} with type person. +// On the device, this information is stored as [LocalEntityData] with type person. +import "package:flutter/foundation.dart"; + +class PersonEntity { + final String remoteID; + final PersonData data; + PersonEntity( + this.remoteID, + this.data, + ); + + // copyWith + PersonEntity copyWith({ + String? remoteID, + PersonData? data, + }) { + return PersonEntity( + remoteID ?? this.remoteID, + data ?? this.data, + ); + } +} + +class ClusterInfo { + final int id; + final Set faces; + ClusterInfo({ + required this.id, + required this.faces, + }); + + // toJson + Map toJson() => { + 'id': id, + 'faces': faces.toList(), + }; + + // from Json + factory ClusterInfo.fromJson(Map json) { + return ClusterInfo( + id: json['id'] as int, + faces: (json['faces'] as List).map((e) => e as String).toSet(), + ); + } +} + +class PersonData { + final String name; + final bool isHidden; + String? avatarFaceId; + List? assigned = List.empty(); + List? rejected = List.empty(); + final String? birthDate; + + bool hasAvatar() => avatarFaceId != null; + + bool get isIgnored => + (name.isEmpty || name == '(hidden)' || name == '(ignored)'); + + PersonData({ + required this.name, + this.assigned, + this.rejected, + this.avatarFaceId, + this.isHidden = false, + this.birthDate, + }); + // copyWith + PersonData copyWith({ + String? name, + List? assigned, + String? avatarFaceId, + bool? isHidden, + int? version, + String? birthDate, + }) { + return PersonData( + name: name ?? this.name, + assigned: assigned ?? this.assigned, + avatarFaceId: avatarFaceId ?? this.avatarFaceId, + isHidden: isHidden ?? this.isHidden, + birthDate: birthDate ?? this.birthDate, + ); + } + + void logStats() { + if (kDebugMode == false) return; + // log number of assigned and rejected clusters and total number of faces in each cluster + final StringBuffer sb = StringBuffer(); + sb.writeln('Person: $name'); + int assignedCount = 0; + for (final a in (assigned ?? [])) { + assignedCount += a.faces.length; + } + sb.writeln('Assigned: ${assigned?.length} withFaces $assignedCount'); + sb.writeln('Rejected: ${rejected?.length}'); + if (assigned != null) { + for (var cluster in assigned!) { + sb.writeln('Cluster: ${cluster.id} - ${cluster.faces.length}'); + } + } + debugPrint(sb.toString()); + } + + // toJson + Map toJson() => { + 'name': name, + 'assigned': assigned?.map((e) => e.toJson()).toList(), + 'rejected': rejected?.map((e) => e.toJson()).toList(), + 'avatarFaceId': avatarFaceId, + 'isHidden': isHidden, + 'birthDate': birthDate, + }; + + // fromJson + factory PersonData.fromJson(Map json) { + final assigned = (json['assigned'] == null || json['assigned'].length == 0) + ? [] + : List.from( + json['assigned'].map((x) => ClusterInfo.fromJson(x)), + ); + + final rejected = (json['rejected'] == null || json['rejected'].length == 0) + ? [] + : List.from( + json['rejected'].map((x) => ClusterInfo.fromJson(x)), + ); + return PersonData( + name: json['name'] as String, + assigned: assigned, + rejected: rejected, + avatarFaceId: json['avatarFaceId'] as String?, + isHidden: json['isHidden'] as bool? ?? false, + birthDate: json['birthDate'] as String?, + ); + } +} diff --git a/mobile/lib/generated/intl/messages_cs.dart b/mobile/lib/generated/intl/messages_cs.dart index 8db8489d33..4506011b15 100644 --- a/mobile/lib/generated/intl/messages_cs.dart +++ b/mobile/lib/generated/intl/messages_cs.dart @@ -34,6 +34,8 @@ class MessageLookup extends MessageLookupByLibrary { "addViewers": m1, "changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage( "Change location of selected items?"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "contacts": MessageLookupByLibrary.simpleMessage("Contacts"), "createCollaborativeLink": MessageLookupByLibrary.simpleMessage("Create collaborative link"), @@ -44,7 +46,14 @@ class MessageLookup extends MessageLookupByLibrary { "editsToLocationWillOnlyBeSeenWithinEnte": MessageLookupByLibrary.simpleMessage( "Edits to location will only be seen within Ente"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "fileTypes": MessageLookupByLibrary.simpleMessage("File types"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"), "locations": MessageLookupByLibrary.simpleMessage("Locations"), "longPressAnEmailToVerifyEndToEndEncryption": @@ -55,6 +64,8 @@ class MessageLookup extends MessageLookupByLibrary { "Modify your query, or try searching for"), "moveToHiddenAlbum": MessageLookupByLibrary.simpleMessage("Move to hidden album"), + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "search": MessageLookupByLibrary.simpleMessage("Search"), "selectALocation": MessageLookupByLibrary.simpleMessage("Select a location"), diff --git a/mobile/lib/generated/intl/messages_de.dart b/mobile/lib/generated/intl/messages_de.dart index 442cae9198..0ff50cfa45 100644 --- a/mobile/lib/generated/intl/messages_de.dart +++ b/mobile/lib/generated/intl/messages_de.dart @@ -227,6 +227,7 @@ class MessageLookup extends MessageLookupByLibrary { "Ich verstehe, dass ich meine Daten verlieren kann, wenn ich mein Passwort vergesse, da meine Daten Ende-zu-Ende-verschlüsselt sind."), "activeSessions": MessageLookupByLibrary.simpleMessage("Aktive Sitzungen"), + "addAName": MessageLookupByLibrary.simpleMessage("Add a name"), "addANewEmail": MessageLookupByLibrary.simpleMessage( "Neue E-Mail-Adresse hinzufügen"), "addCollaborator": @@ -435,6 +436,8 @@ class MessageLookup extends MessageLookupByLibrary { "Nach Aufnahmezeit gruppieren"), "clubByFileName": MessageLookupByLibrary.simpleMessage("Nach Dateiname gruppieren"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Code eingelöst"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -675,6 +678,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Passwort eingeben"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Gib ein Passwort ein, mit dem wir deine Daten verschlüsseln können"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterReferralCode": MessageLookupByLibrary.simpleMessage( "Gib den Weiterempfehlungs-Code ein"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -699,6 +704,10 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Protokolle exportieren"), "exportYourData": MessageLookupByLibrary.simpleMessage("Daten exportieren"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "faces": MessageLookupByLibrary.simpleMessage("Gesichter"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage( "Der Code konnte nicht aktiviert werden"), @@ -738,11 +747,14 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Dateien gelöscht"), + "findPeopleByName": MessageLookupByLibrary.simpleMessage( + "Find people quickly by searching by name"), "flip": MessageLookupByLibrary.simpleMessage("Spiegeln"), "forYourMemories": MessageLookupByLibrary.simpleMessage("Als Erinnerung"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Passwort vergessen"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage( "Kostenlos hinzugefügter Speicherplatz"), "freeStorageOnReferralSuccess": m24, @@ -1164,6 +1176,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Teilnehmer entfernen"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Öffentlichen Link entfernen"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( diff --git a/mobile/lib/generated/intl/messages_en.dart b/mobile/lib/generated/intl/messages_en.dart index aab7f47bd8..ee799aeb94 100644 --- a/mobile/lib/generated/intl/messages_en.dart +++ b/mobile/lib/generated/intl/messages_en.dart @@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary { "Please talk to ${providerName} support if you were charged"; static String m38(endDate) => - "Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards."; + "Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards."; static String m39(toEmail) => "Please email us at ${toEmail}"; @@ -225,6 +225,7 @@ class MessageLookup extends MessageLookupByLibrary { "I understand that if I lose my password, I may lose my data since my data is end-to-end encrypted."), "activeSessions": MessageLookupByLibrary.simpleMessage("Active sessions"), + "addAName": MessageLookupByLibrary.simpleMessage("Add a name"), "addANewEmail": MessageLookupByLibrary.simpleMessage("Add a new email"), "addCollaborator": MessageLookupByLibrary.simpleMessage("Add collaborator"), @@ -434,6 +435,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Club by capture time"), "clubByFileName": MessageLookupByLibrary.simpleMessage("Club by file name"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Code applied"), "codeCopiedToClipboard": @@ -675,6 +678,8 @@ class MessageLookup extends MessageLookupByLibrary { "enterPassword": MessageLookupByLibrary.simpleMessage("Enter password"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Enter a password we can use to encrypt your data"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterReferralCode": MessageLookupByLibrary.simpleMessage("Enter referral code"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -697,6 +702,10 @@ class MessageLookup extends MessageLookupByLibrary { "exportLogs": MessageLookupByLibrary.simpleMessage("Export logs"), "exportYourData": MessageLookupByLibrary.simpleMessage("Export your data"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "faces": MessageLookupByLibrary.simpleMessage("Faces"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage("Failed to apply code"), @@ -736,11 +745,14 @@ class MessageLookup extends MessageLookupByLibrary { "filesDeleted": MessageLookupByLibrary.simpleMessage("Files deleted"), "filesSavedToGallery": MessageLookupByLibrary.simpleMessage("Files saved to gallery"), + "findPeopleByName": + MessageLookupByLibrary.simpleMessage("Find people quickly by name"), "flip": MessageLookupByLibrary.simpleMessage("Flip"), "forYourMemories": MessageLookupByLibrary.simpleMessage("for your memories"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Forgot password"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage("Free storage claimed"), "freeStorageOnReferralSuccess": m24, @@ -1022,6 +1034,7 @@ class MessageLookup extends MessageLookupByLibrary { "paymentFailedTalkToProvider": m37, "pendingItems": MessageLookupByLibrary.simpleMessage("Pending items"), "pendingSync": MessageLookupByLibrary.simpleMessage("Pending sync"), + "people": MessageLookupByLibrary.simpleMessage("People"), "peopleUsingYourCode": MessageLookupByLibrary.simpleMessage("People using your code"), "permDeleteWarning": MessageLookupByLibrary.simpleMessage( @@ -1151,6 +1164,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Remove participant"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Remove public link"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( @@ -1208,8 +1223,8 @@ class MessageLookup extends MessageLookupByLibrary { "Add descriptions like \"#trip\" in photo info to quickly find them here"), "searchDatesEmptySection": MessageLookupByLibrary.simpleMessage( "Search by a date, month or year"), - "searchFaceEmptySection": - MessageLookupByLibrary.simpleMessage("Find all photos of a person"), + "searchFaceEmptySection": MessageLookupByLibrary.simpleMessage( + "Persons will be shown here once indexing is done"), "searchFileTypesAndNamesEmptySection": MessageLookupByLibrary.simpleMessage("File types and names"), "searchHint1": diff --git a/mobile/lib/generated/intl/messages_es.dart b/mobile/lib/generated/intl/messages_es.dart index a6294d4a43..879f0f8c16 100644 --- a/mobile/lib/generated/intl/messages_es.dart +++ b/mobile/lib/generated/intl/messages_es.dart @@ -367,6 +367,8 @@ class MessageLookup extends MessageLookupByLibrary { "close": MessageLookupByLibrary.simpleMessage("Cerrar"), "clubByCaptureTime": MessageLookupByLibrary.simpleMessage( "Agrupar por tiempo de captura"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Código aplicado"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -585,6 +587,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Introduzca contraseña"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Introduzca una contraseña que podamos usar para cifrar sus datos"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterReferralCode": MessageLookupByLibrary.simpleMessage( "Ingresar código de referencia"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -609,6 +613,10 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Exportar registros"), "exportYourData": MessageLookupByLibrary.simpleMessage("Exportar tus datos"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "failedToApplyCode": MessageLookupByLibrary.simpleMessage("Error al aplicar el código"), "failedToCancel": @@ -647,6 +655,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("para tus recuerdos"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Olvidé mi contraseña"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage( "Almacenamiento gratuito reclamado"), "freeStorageOnReferralSuccess": m24, @@ -997,6 +1006,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Quitar participante"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Quitar enlace público"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( diff --git a/mobile/lib/generated/intl/messages_fr.dart b/mobile/lib/generated/intl/messages_fr.dart index 82125afcca..47817371ed 100644 --- a/mobile/lib/generated/intl/messages_fr.dart +++ b/mobile/lib/generated/intl/messages_fr.dart @@ -425,6 +425,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Grouper par durée"), "clubByFileName": MessageLookupByLibrary.simpleMessage("Grouper par nom de fichier"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Code appliqué"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -665,6 +667,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Saisissez le mot de passe"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Entrez un mot de passe que nous pouvons utiliser pour chiffrer vos données"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterReferralCode": MessageLookupByLibrary.simpleMessage( "Entrez le code de parrainage"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -688,6 +692,10 @@ class MessageLookup extends MessageLookupByLibrary { "exportLogs": MessageLookupByLibrary.simpleMessage("Exporter les logs"), "exportYourData": MessageLookupByLibrary.simpleMessage("Exportez vos données"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "faces": MessageLookupByLibrary.simpleMessage("Visages"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage( "Impossible d\'appliquer le code"), @@ -732,6 +740,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("pour vos souvenirs"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Mot de passe oublié"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage("Stockage gratuit réclamé"), "freeStorageOnReferralSuccess": m24, @@ -1129,6 +1138,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Supprimer le participant"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Supprimer le lien public"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( diff --git a/mobile/lib/generated/intl/messages_it.dart b/mobile/lib/generated/intl/messages_it.dart index e6db5b380f..6dbae342cd 100644 --- a/mobile/lib/generated/intl/messages_it.dart +++ b/mobile/lib/generated/intl/messages_it.dart @@ -411,6 +411,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Club per tempo di cattura"), "clubByFileName": MessageLookupByLibrary.simpleMessage("Unisci per nome file"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Codice applicato"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -644,6 +646,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Inserisci password"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Inserisci una password per criptare i tuoi dati"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterReferralCode": MessageLookupByLibrary.simpleMessage( "Inserisci il codice di invito"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -665,6 +669,10 @@ class MessageLookup extends MessageLookupByLibrary { "Questo link è scaduto. Si prega di selezionare un nuovo orario di scadenza o disabilitare la scadenza del link."), "exportLogs": MessageLookupByLibrary.simpleMessage("Esporta log"), "exportYourData": MessageLookupByLibrary.simpleMessage("Esporta dati"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "failedToApplyCode": MessageLookupByLibrary.simpleMessage( "Impossibile applicare il codice"), "failedToCancel": @@ -704,6 +712,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("per i tuoi ricordi"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Password dimenticata"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage("Spazio gratuito richiesto"), "freeStorageOnReferralSuccess": m24, @@ -1090,6 +1099,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Rimuovi partecipante"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Rimuovi link pubblico"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( diff --git a/mobile/lib/generated/intl/messages_ko.dart b/mobile/lib/generated/intl/messages_ko.dart index c91d849f6e..65e26e6312 100644 --- a/mobile/lib/generated/intl/messages_ko.dart +++ b/mobile/lib/generated/intl/messages_ko.dart @@ -34,6 +34,8 @@ class MessageLookup extends MessageLookupByLibrary { "addViewers": m1, "changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage( "Change location of selected items?"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "contacts": MessageLookupByLibrary.simpleMessage("Contacts"), "createCollaborativeLink": MessageLookupByLibrary.simpleMessage("Create collaborative link"), @@ -44,7 +46,14 @@ class MessageLookup extends MessageLookupByLibrary { "editsToLocationWillOnlyBeSeenWithinEnte": MessageLookupByLibrary.simpleMessage( "Edits to location will only be seen within Ente"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "fileTypes": MessageLookupByLibrary.simpleMessage("File types"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"), "locations": MessageLookupByLibrary.simpleMessage("Locations"), "longPressAnEmailToVerifyEndToEndEncryption": @@ -55,6 +64,8 @@ class MessageLookup extends MessageLookupByLibrary { "Modify your query, or try searching for"), "moveToHiddenAlbum": MessageLookupByLibrary.simpleMessage("Move to hidden album"), + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "search": MessageLookupByLibrary.simpleMessage("Search"), "selectALocation": MessageLookupByLibrary.simpleMessage("Select a location"), diff --git a/mobile/lib/generated/intl/messages_nl.dart b/mobile/lib/generated/intl/messages_nl.dart index f6987973c3..b0f7b601fa 100644 --- a/mobile/lib/generated/intl/messages_nl.dart +++ b/mobile/lib/generated/intl/messages_nl.dart @@ -447,6 +447,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Samenvoegen op tijd"), "clubByFileName": MessageLookupByLibrary.simpleMessage("Samenvoegen op bestandsnaam"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Code toegepast"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -723,6 +725,10 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Logboek exporteren"), "exportYourData": MessageLookupByLibrary.simpleMessage("Exporteer je gegevens"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "faces": MessageLookupByLibrary.simpleMessage("Gezichten"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage("Code toepassen mislukt"), @@ -771,6 +777,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("voor uw herinneringen"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Wachtwoord vergeten"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage("Gratis opslag geclaimd"), "freeStorageOnReferralSuccess": m24, diff --git a/mobile/lib/generated/intl/messages_no.dart b/mobile/lib/generated/intl/messages_no.dart index 0e5bd97b21..88d2b16328 100644 --- a/mobile/lib/generated/intl/messages_no.dart +++ b/mobile/lib/generated/intl/messages_no.dart @@ -39,6 +39,8 @@ class MessageLookup extends MessageLookupByLibrary { "cancel": MessageLookupByLibrary.simpleMessage("Avbryt"), "changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage( "Change location of selected items?"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "confirmAccountDeletion": MessageLookupByLibrary.simpleMessage("Bekreft sletting av konto"), "confirmDeletePrompt": MessageLookupByLibrary.simpleMessage( @@ -57,12 +59,19 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage( "Edits to location will only be seen within Ente"), "email": MessageLookupByLibrary.simpleMessage("E-post"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterValidEmail": MessageLookupByLibrary.simpleMessage( "Vennligst skriv inn en gyldig e-postadresse."), "enterYourEmailAddress": MessageLookupByLibrary.simpleMessage( "Skriv inn e-postadressen din"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "feedback": MessageLookupByLibrary.simpleMessage("Tilbakemelding"), "fileTypes": MessageLookupByLibrary.simpleMessage("File types"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "invalidEmailAddress": MessageLookupByLibrary.simpleMessage("Ugyldig e-postadresse"), "joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"), @@ -77,6 +86,8 @@ class MessageLookup extends MessageLookupByLibrary { "Modify your query, or try searching for"), "moveToHiddenAlbum": MessageLookupByLibrary.simpleMessage("Move to hidden album"), + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "search": MessageLookupByLibrary.simpleMessage("Search"), "selectALocation": MessageLookupByLibrary.simpleMessage("Select a location"), diff --git a/mobile/lib/generated/intl/messages_pl.dart b/mobile/lib/generated/intl/messages_pl.dart index b3a922b0ae..096a0eb659 100644 --- a/mobile/lib/generated/intl/messages_pl.dart +++ b/mobile/lib/generated/intl/messages_pl.dart @@ -49,6 +49,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Zmień hasło"), "checkInboxAndSpamFolder": MessageLookupByLibrary.simpleMessage( "Sprawdź swoją skrzynkę odbiorczą (i spam), aby zakończyć weryfikację"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( "Kod został skopiowany do schowka"), "confirm": MessageLookupByLibrary.simpleMessage("Potwierdź"), @@ -101,6 +103,8 @@ class MessageLookup extends MessageLookupByLibrary { "Wprowadź nowe hasło, którego możemy użyć do zaszyfrowania Twoich danych"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Wprowadź hasło, którego możemy użyć do zaszyfrowania Twoich danych"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Enter person name"), "enterValidEmail": MessageLookupByLibrary.simpleMessage( "Podaj poprawny adres e-mail."), "enterYourEmailAddress": @@ -109,10 +113,15 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Wprowadź hasło"), "enterYourRecoveryKey": MessageLookupByLibrary.simpleMessage( "Wprowadź swój klucz odzyskiwania"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "feedback": MessageLookupByLibrary.simpleMessage("Informacja zwrotna"), "fileTypes": MessageLookupByLibrary.simpleMessage("File types"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Nie pamiętam hasła"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "generatingEncryptionKeys": MessageLookupByLibrary.simpleMessage( "Generowanie kluczy szyfrujących..."), "howItWorks": MessageLookupByLibrary.simpleMessage("Jak to działa"), @@ -166,6 +175,8 @@ class MessageLookup extends MessageLookupByLibrary { "Jeśli zapomnisz hasła, jedynym sposobem odzyskania danych jest ten klucz."), "recoverySuccessful": MessageLookupByLibrary.simpleMessage("Odzyskano pomyślnie!"), + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remove person label"), "resendEmail": MessageLookupByLibrary.simpleMessage("Wyślij e-mail ponownie"), "resetPasswordTitle": diff --git a/mobile/lib/generated/intl/messages_pt.dart b/mobile/lib/generated/intl/messages_pt.dart index ef6dc5e54e..f4eaae9b20 100644 --- a/mobile/lib/generated/intl/messages_pt.dart +++ b/mobile/lib/generated/intl/messages_pt.dart @@ -98,7 +98,7 @@ class MessageLookup extends MessageLookupByLibrary { "${storageAmountInGB} GB cada vez que alguém se inscrever para um plano pago e aplica o seu código"; static String m25(freeAmount, storageUnit) => - "${freeAmount} ${storageUnit} grátis"; + "${freeAmount} ${storageUnit} livre"; static String m26(endDate) => "Teste gratuito acaba em ${endDate}"; @@ -225,6 +225,7 @@ class MessageLookup extends MessageLookupByLibrary { "Eu entendo que se eu perder minha senha, posso perder meus dados, já que meus dados são criptografados de ponta a ponta."), "activeSessions": MessageLookupByLibrary.simpleMessage("Sessões ativas"), + "addAName": MessageLookupByLibrary.simpleMessage("Adicione um nome"), "addANewEmail": MessageLookupByLibrary.simpleMessage("Adicionar um novo email"), "addCollaborator": @@ -445,6 +446,8 @@ class MessageLookup extends MessageLookupByLibrary { "Agrupar por tempo de captura"), "clubByFileName": MessageLookupByLibrary.simpleMessage( "Agrupar pelo nome de arquivo"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Progresso de agrupamento"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("Código aplicado"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage( @@ -587,7 +590,7 @@ class MessageLookup extends MessageLookupByLibrary { "descriptions": MessageLookupByLibrary.simpleMessage("Descrições"), "deselectAll": MessageLookupByLibrary.simpleMessage("Desmarcar todos"), "designedToOutlive": - MessageLookupByLibrary.simpleMessage("Feito para ter logenvidade"), + MessageLookupByLibrary.simpleMessage("Feito para ter longevidade"), "details": MessageLookupByLibrary.simpleMessage("Detalhes"), "devAccountChanged": MessageLookupByLibrary.simpleMessage( "A conta de desenvolvedor que usamos para publicar o Ente na App Store foi alterada. Por esse motivo, você precisará fazer entrar novamente.\n\nPedimos desculpas pelo inconveniente, mas isso era inevitável."), @@ -631,8 +634,9 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Dobre seu armazenamento"), "download": MessageLookupByLibrary.simpleMessage("Baixar"), "downloadFailed": - MessageLookupByLibrary.simpleMessage("Falha ao baixar"), - "downloading": MessageLookupByLibrary.simpleMessage("Baixando..."), + MessageLookupByLibrary.simpleMessage("Falha no download"), + "downloading": + MessageLookupByLibrary.simpleMessage("Fazendo download..."), "dropSupportEmail": m17, "duplicateFileCountWithStorageSaved": m18, "duplicateItemsGroup": m19, @@ -689,6 +693,8 @@ class MessageLookup extends MessageLookupByLibrary { "enterPassword": MessageLookupByLibrary.simpleMessage("Digite a senha"), "enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( "Insira a senha para criptografar seus dados"), + "enterPersonName": + MessageLookupByLibrary.simpleMessage("Inserir nome da pessoa"), "enterReferralCode": MessageLookupByLibrary.simpleMessage( "Insira o código de referência"), "enterThe6digitCodeFromnyourAuthenticatorApp": @@ -713,13 +719,17 @@ class MessageLookup extends MessageLookupByLibrary { "exportLogs": MessageLookupByLibrary.simpleMessage("Exportar logs"), "exportYourData": MessageLookupByLibrary.simpleMessage("Exportar seus dados"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Reconhecimento facial"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados."), "faces": MessageLookupByLibrary.simpleMessage("Rostos"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage("Falha ao aplicar o código"), "failedToCancel": MessageLookupByLibrary.simpleMessage("Falha ao cancelar"), - "failedToDownloadVideo": - MessageLookupByLibrary.simpleMessage("Falha ao baixar vídeo"), + "failedToDownloadVideo": MessageLookupByLibrary.simpleMessage( + "Falha ao fazer download do vídeo"), "failedToFetchOriginalForEdit": MessageLookupByLibrary.simpleMessage( "Falha ao obter original para edição"), "failedToFetchReferralDetails": MessageLookupByLibrary.simpleMessage( @@ -737,7 +747,7 @@ class MessageLookup extends MessageLookupByLibrary { "familyPlans": MessageLookupByLibrary.simpleMessage("Plano familiar"), "faq": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), "faqs": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), - "favorite": MessageLookupByLibrary.simpleMessage("Favoritar"), + "favorite": MessageLookupByLibrary.simpleMessage("Favorito"), "feedback": MessageLookupByLibrary.simpleMessage("Comentários"), "fileFailedToSaveToGallery": MessageLookupByLibrary.simpleMessage( "Falha ao salvar o arquivo na galeria"), @@ -754,11 +764,15 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Arquivos excluídos"), "filesSavedToGallery": MessageLookupByLibrary.simpleMessage("Arquivos salvos na galeria"), + "findPeopleByName": MessageLookupByLibrary.simpleMessage( + "Encontre pessoas rapidamente por nome"), "flip": MessageLookupByLibrary.simpleMessage("Inverter"), "forYourMemories": MessageLookupByLibrary.simpleMessage("para suas memórias"), "forgotPassword": MessageLookupByLibrary.simpleMessage("Esqueceu sua senha"), + "foundFaces": + MessageLookupByLibrary.simpleMessage("Rostos encontrados"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage( "Armazenamento gratuito reivindicado"), "freeStorageOnReferralSuccess": m24, @@ -904,8 +918,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Carregando galeria..."), "loadingMessage": MessageLookupByLibrary.simpleMessage("Carregando suas fotos..."), - "loadingModel": - MessageLookupByLibrary.simpleMessage("Baixando modelos..."), + "loadingModel": MessageLookupByLibrary.simpleMessage( + "Fazendo download de modelos..."), "localGallery": MessageLookupByLibrary.simpleMessage("Galeria local"), "location": MessageLookupByLibrary.simpleMessage("Local"), "locationName": MessageLookupByLibrary.simpleMessage("Nome do Local"), @@ -1056,6 +1070,7 @@ class MessageLookup extends MessageLookupByLibrary { "pendingItems": MessageLookupByLibrary.simpleMessage("Itens pendentes"), "pendingSync": MessageLookupByLibrary.simpleMessage("Sincronização pendente"), + "people": MessageLookupByLibrary.simpleMessage("Pessoas"), "peopleUsingYourCode": MessageLookupByLibrary.simpleMessage("Pessoas que usam seu código"), "permDeleteWarning": MessageLookupByLibrary.simpleMessage( @@ -1189,6 +1204,8 @@ class MessageLookup extends MessageLookupByLibrary { "removeParticipant": MessageLookupByLibrary.simpleMessage("Remover participante"), "removeParticipantBody": m43, + "removePersonLabel": + MessageLookupByLibrary.simpleMessage("Remover etiqueta da pessoa"), "removePublicLink": MessageLookupByLibrary.simpleMessage("Remover link público"), "removeShareItemsWarning": MessageLookupByLibrary.simpleMessage( @@ -1252,7 +1269,7 @@ class MessageLookup extends MessageLookupByLibrary { "searchDatesEmptySection": MessageLookupByLibrary.simpleMessage( "Pesquisar por data, mês ou ano"), "searchFaceEmptySection": MessageLookupByLibrary.simpleMessage( - "Encontre todas as fotos de uma pessoa"), + "Pessoas serão exibidas aqui uma vez que a indexação é feita"), "searchFileTypesAndNamesEmptySection": MessageLookupByLibrary.simpleMessage("Tipos de arquivo e nomes"), "searchHint1": MessageLookupByLibrary.simpleMessage( diff --git a/mobile/lib/generated/intl/messages_zh.dart b/mobile/lib/generated/intl/messages_zh.dart index 80cc135697..63b8668b50 100644 --- a/mobile/lib/generated/intl/messages_zh.dart +++ b/mobile/lib/generated/intl/messages_zh.dart @@ -124,7 +124,7 @@ class MessageLookup extends MessageLookupByLibrary { static String m37(providerName) => "如果您被收取费用,请用英语与 ${providerName} 的客服聊天"; - static String m38(endDate) => "免费试用有效期至 ${endDate}。\n之后您可以选择付费计划。"; + static String m38(endDate) => "免费试用有效期至 ${endDate}。\n您可以随后购买付费计划。"; static String m39(toEmail) => "请给我们发送电子邮件至 ${toEmail}"; @@ -382,6 +382,8 @@ class MessageLookup extends MessageLookupByLibrary { "close": MessageLookupByLibrary.simpleMessage("关闭"), "clubByCaptureTime": MessageLookupByLibrary.simpleMessage("按拍摄时间分组"), "clubByFileName": MessageLookupByLibrary.simpleMessage("按文件名排序"), + "clusteringProgress": + MessageLookupByLibrary.simpleMessage("Clustering progress"), "codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("代码已应用"), "codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage("代码已复制到剪贴板"), @@ -543,7 +545,7 @@ class MessageLookup extends MessageLookupByLibrary { "emailVerificationToggle": MessageLookupByLibrary.simpleMessage("电子邮件验证"), "emailYourLogs": MessageLookupByLibrary.simpleMessage("通过电子邮件发送您的日志"), - "empty": MessageLookupByLibrary.simpleMessage("空的"), + "empty": MessageLookupByLibrary.simpleMessage("清空"), "emptyTrash": MessageLookupByLibrary.simpleMessage("要清空回收站吗?"), "enableMaps": MessageLookupByLibrary.simpleMessage("启用地图"), "enableMapsDesc": MessageLookupByLibrary.simpleMessage( @@ -592,6 +594,10 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("此链接已过期。请选择新的过期时间或禁用链接有效期。"), "exportLogs": MessageLookupByLibrary.simpleMessage("导出日志"), "exportYourData": MessageLookupByLibrary.simpleMessage("导出您的数据"), + "faceRecognition": + MessageLookupByLibrary.simpleMessage("Face recognition"), + "faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage( + "Please note that this will result in a higher bandwidth and battery usage until all items are indexed."), "faces": MessageLookupByLibrary.simpleMessage("人脸"), "failedToApplyCode": MessageLookupByLibrary.simpleMessage("无法使用此代码"), "failedToCancel": MessageLookupByLibrary.simpleMessage("取消失败"), @@ -626,6 +632,7 @@ class MessageLookup extends MessageLookupByLibrary { "flip": MessageLookupByLibrary.simpleMessage("上下翻转"), "forYourMemories": MessageLookupByLibrary.simpleMessage("为您的回忆"), "forgotPassword": MessageLookupByLibrary.simpleMessage("忘记密码"), + "foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"), "freeStorageClaimed": MessageLookupByLibrary.simpleMessage("已领取的免费存储"), "freeStorageOnReferralSuccess": m24, "freeStorageSpace": m25, diff --git a/mobile/lib/generated/l10n.dart b/mobile/lib/generated/l10n.dart index 4c7679154f..4e2c53e29b 100644 --- a/mobile/lib/generated/l10n.dart +++ b/mobile/lib/generated/l10n.dart @@ -4034,10 +4034,10 @@ class S { ); } - /// `Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.` + /// `Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.` String playStoreFreeTrialValidTill(Object endDate) { return Intl.message( - 'Free trial valid till $endDate.\nYou can purchase a paid plan afterwards.', + 'Free trial valid till $endDate.\nYou can choose a paid plan afterwards.', name: 'playStoreFreeTrialValidTill', desc: '', args: [endDate], @@ -6969,10 +6969,10 @@ class S { ); } - /// `Find all photos of a person` + /// `Persons will be shown here once indexing is done` String get searchFaceEmptySection { return Intl.message( - 'Find all photos of a person', + 'Persons will be shown here once indexing is done', name: 'searchFaceEmptySection', desc: '', args: [], @@ -8168,6 +8168,16 @@ class S { ); } + /// `People` + String get people { + return Intl.message( + 'People', + name: 'people', + desc: '', + args: [], + ); + } + /// `Contents` String get contents { return Intl.message( @@ -8388,26 +8398,6 @@ class S { ); } - /// `Auto pair` - String get autoPair { - return Intl.message( - 'Auto pair', - name: 'autoPair', - desc: '', - args: [], - ); - } - - /// `Pair with PIN` - String get pairWithPin { - return Intl.message( - 'Pair with PIN', - name: 'pairWithPin', - desc: '', - args: [], - ); - } - /// `Device not found` String get deviceNotFound { return Intl.message( @@ -8468,6 +8458,26 @@ class S { ); } + /// `Add a name` + String get addAName { + return Intl.message( + 'Add a name', + name: 'addAName', + desc: '', + args: [], + ); + } + + /// `Find people quickly by name` + String get findPeopleByName { + return Intl.message( + 'Find people quickly by name', + name: 'findPeopleByName', + desc: '', + args: [], + ); + } + /// `{count, plural, zero {Add viewer} one {Add viewer} other {Add viewers}}` String addViewers(num count) { return Intl.plural( @@ -8594,6 +8604,26 @@ class S { ); } + /// `Enter person name` + String get enterPersonName { + return Intl.message( + 'Enter person name', + name: 'enterPersonName', + desc: '', + args: [], + ); + } + + /// `Remove person label` + String get removePersonLabel { + return Intl.message( + 'Remove person label', + name: 'removePersonLabel', + desc: '', + args: [], + ); + } + /// `Auto pair works only with devices that support Chromecast.` String get autoPairDesc { return Intl.message( @@ -8703,6 +8733,66 @@ class S { args: [], ); } + + /// `Auto pair` + String get autoPair { + return Intl.message( + 'Auto pair', + name: 'autoPair', + desc: '', + args: [], + ); + } + + /// `Pair with PIN` + String get pairWithPin { + return Intl.message( + 'Pair with PIN', + name: 'pairWithPin', + desc: '', + args: [], + ); + } + + /// `Face recognition` + String get faceRecognition { + return Intl.message( + 'Face recognition', + name: 'faceRecognition', + desc: '', + args: [], + ); + } + + /// `Please note that this will result in a higher bandwidth and battery usage until all items are indexed.` + String get faceRecognitionIndexingDescription { + return Intl.message( + 'Please note that this will result in a higher bandwidth and battery usage until all items are indexed.', + name: 'faceRecognitionIndexingDescription', + desc: '', + args: [], + ); + } + + /// `Found faces` + String get foundFaces { + return Intl.message( + 'Found faces', + name: 'foundFaces', + desc: '', + args: [], + ); + } + + /// `Clustering progress` + String get clusteringProgress { + return Intl.message( + 'Clustering progress', + name: 'clusteringProgress', + desc: '', + args: [], + ); + } } class AppLocalizationDelegate extends LocalizationsDelegate { diff --git a/mobile/lib/generated/protos/ente/common/box.pb.dart b/mobile/lib/generated/protos/ente/common/box.pb.dart new file mode 100644 index 0000000000..41518e9ae0 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/box.pb.dart @@ -0,0 +1,111 @@ +// +// Generated code. Do not modify. +// source: ente/common/box.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:core' as $core; + +import 'package:protobuf/protobuf.dart' as $pb; + +/// CenterBox is a box where x,y is the center of the box +class CenterBox extends $pb.GeneratedMessage { + factory CenterBox({ + $core.double? x, + $core.double? y, + $core.double? height, + $core.double? width, + }) { + final $result = create(); + if (x != null) { + $result.x = x; + } + if (y != null) { + $result.y = y; + } + if (height != null) { + $result.height = height; + } + if (width != null) { + $result.width = width; + } + return $result; + } + CenterBox._() : super(); + factory CenterBox.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory CenterBox.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'CenterBox', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create) + ..a<$core.double>(1, _omitFieldNames ? '' : 'x', $pb.PbFieldType.OF) + ..a<$core.double>(2, _omitFieldNames ? '' : 'y', $pb.PbFieldType.OF) + ..a<$core.double>(3, _omitFieldNames ? '' : 'height', $pb.PbFieldType.OF) + ..a<$core.double>(4, _omitFieldNames ? '' : 'width', $pb.PbFieldType.OF) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + CenterBox clone() => CenterBox()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + CenterBox copyWith(void Function(CenterBox) updates) => super.copyWith((message) => updates(message as CenterBox)) as CenterBox; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static CenterBox create() => CenterBox._(); + CenterBox createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static CenterBox getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static CenterBox? _defaultInstance; + + @$pb.TagNumber(1) + $core.double get x => $_getN(0); + @$pb.TagNumber(1) + set x($core.double v) { $_setFloat(0, v); } + @$pb.TagNumber(1) + $core.bool hasX() => $_has(0); + @$pb.TagNumber(1) + void clearX() => clearField(1); + + @$pb.TagNumber(2) + $core.double get y => $_getN(1); + @$pb.TagNumber(2) + set y($core.double v) { $_setFloat(1, v); } + @$pb.TagNumber(2) + $core.bool hasY() => $_has(1); + @$pb.TagNumber(2) + void clearY() => clearField(2); + + @$pb.TagNumber(3) + $core.double get height => $_getN(2); + @$pb.TagNumber(3) + set height($core.double v) { $_setFloat(2, v); } + @$pb.TagNumber(3) + $core.bool hasHeight() => $_has(2); + @$pb.TagNumber(3) + void clearHeight() => clearField(3); + + @$pb.TagNumber(4) + $core.double get width => $_getN(3); + @$pb.TagNumber(4) + set width($core.double v) { $_setFloat(3, v); } + @$pb.TagNumber(4) + $core.bool hasWidth() => $_has(3); + @$pb.TagNumber(4) + void clearWidth() => clearField(4); +} + + +const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names'); +const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names'); diff --git a/mobile/lib/generated/protos/ente/common/box.pbenum.dart b/mobile/lib/generated/protos/ente/common/box.pbenum.dart new file mode 100644 index 0000000000..7310e57a03 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/box.pbenum.dart @@ -0,0 +1,11 @@ +// +// Generated code. Do not modify. +// source: ente/common/box.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + diff --git a/mobile/lib/generated/protos/ente/common/box.pbjson.dart b/mobile/lib/generated/protos/ente/common/box.pbjson.dart new file mode 100644 index 0000000000..6c9ab3cb27 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/box.pbjson.dart @@ -0,0 +1,38 @@ +// +// Generated code. Do not modify. +// source: ente/common/box.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:convert' as $convert; +import 'dart:core' as $core; +import 'dart:typed_data' as $typed_data; + +@$core.Deprecated('Use centerBoxDescriptor instead') +const CenterBox$json = { + '1': 'CenterBox', + '2': [ + {'1': 'x', '3': 1, '4': 1, '5': 2, '9': 0, '10': 'x', '17': true}, + {'1': 'y', '3': 2, '4': 1, '5': 2, '9': 1, '10': 'y', '17': true}, + {'1': 'height', '3': 3, '4': 1, '5': 2, '9': 2, '10': 'height', '17': true}, + {'1': 'width', '3': 4, '4': 1, '5': 2, '9': 3, '10': 'width', '17': true}, + ], + '8': [ + {'1': '_x'}, + {'1': '_y'}, + {'1': '_height'}, + {'1': '_width'}, + ], +}; + +/// Descriptor for `CenterBox`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List centerBoxDescriptor = $convert.base64Decode( + 'CglDZW50ZXJCb3gSEQoBeBgBIAEoAkgAUgF4iAEBEhEKAXkYAiABKAJIAVIBeYgBARIbCgZoZW' + 'lnaHQYAyABKAJIAlIGaGVpZ2h0iAEBEhkKBXdpZHRoGAQgASgCSANSBXdpZHRoiAEBQgQKAl94' + 'QgQKAl95QgkKB19oZWlnaHRCCAoGX3dpZHRo'); + diff --git a/mobile/lib/generated/protos/ente/common/box.pbserver.dart b/mobile/lib/generated/protos/ente/common/box.pbserver.dart new file mode 100644 index 0000000000..1e8625388d --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/box.pbserver.dart @@ -0,0 +1,14 @@ +// +// Generated code. Do not modify. +// source: ente/common/box.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names +// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +export 'box.pb.dart'; + diff --git a/mobile/lib/generated/protos/ente/common/point.pb.dart b/mobile/lib/generated/protos/ente/common/point.pb.dart new file mode 100644 index 0000000000..47f9b87ce3 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/point.pb.dart @@ -0,0 +1,83 @@ +// +// Generated code. Do not modify. +// source: ente/common/point.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:core' as $core; + +import 'package:protobuf/protobuf.dart' as $pb; + +/// EPoint is a point in 2D space +class EPoint extends $pb.GeneratedMessage { + factory EPoint({ + $core.double? x, + $core.double? y, + }) { + final $result = create(); + if (x != null) { + $result.x = x; + } + if (y != null) { + $result.y = y; + } + return $result; + } + EPoint._() : super(); + factory EPoint.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory EPoint.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'EPoint', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create) + ..a<$core.double>(1, _omitFieldNames ? '' : 'x', $pb.PbFieldType.OF) + ..a<$core.double>(2, _omitFieldNames ? '' : 'y', $pb.PbFieldType.OF) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + EPoint clone() => EPoint()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + EPoint copyWith(void Function(EPoint) updates) => super.copyWith((message) => updates(message as EPoint)) as EPoint; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static EPoint create() => EPoint._(); + EPoint createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static EPoint getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static EPoint? _defaultInstance; + + @$pb.TagNumber(1) + $core.double get x => $_getN(0); + @$pb.TagNumber(1) + set x($core.double v) { $_setFloat(0, v); } + @$pb.TagNumber(1) + $core.bool hasX() => $_has(0); + @$pb.TagNumber(1) + void clearX() => clearField(1); + + @$pb.TagNumber(2) + $core.double get y => $_getN(1); + @$pb.TagNumber(2) + set y($core.double v) { $_setFloat(1, v); } + @$pb.TagNumber(2) + $core.bool hasY() => $_has(1); + @$pb.TagNumber(2) + void clearY() => clearField(2); +} + + +const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names'); +const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names'); diff --git a/mobile/lib/generated/protos/ente/common/point.pbenum.dart b/mobile/lib/generated/protos/ente/common/point.pbenum.dart new file mode 100644 index 0000000000..3c242a2fcd --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/point.pbenum.dart @@ -0,0 +1,11 @@ +// +// Generated code. Do not modify. +// source: ente/common/point.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + diff --git a/mobile/lib/generated/protos/ente/common/point.pbjson.dart b/mobile/lib/generated/protos/ente/common/point.pbjson.dart new file mode 100644 index 0000000000..44d2d0712a --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/point.pbjson.dart @@ -0,0 +1,33 @@ +// +// Generated code. Do not modify. +// source: ente/common/point.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:convert' as $convert; +import 'dart:core' as $core; +import 'dart:typed_data' as $typed_data; + +@$core.Deprecated('Use ePointDescriptor instead') +const EPoint$json = { + '1': 'EPoint', + '2': [ + {'1': 'x', '3': 1, '4': 1, '5': 2, '9': 0, '10': 'x', '17': true}, + {'1': 'y', '3': 2, '4': 1, '5': 2, '9': 1, '10': 'y', '17': true}, + ], + '8': [ + {'1': '_x'}, + {'1': '_y'}, + ], +}; + +/// Descriptor for `EPoint`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List ePointDescriptor = $convert.base64Decode( + 'CgZFUG9pbnQSEQoBeBgBIAEoAkgAUgF4iAEBEhEKAXkYAiABKAJIAVIBeYgBAUIECgJfeEIECg' + 'JfeQ=='); + diff --git a/mobile/lib/generated/protos/ente/common/point.pbserver.dart b/mobile/lib/generated/protos/ente/common/point.pbserver.dart new file mode 100644 index 0000000000..66728e123a --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/point.pbserver.dart @@ -0,0 +1,14 @@ +// +// Generated code. Do not modify. +// source: ente/common/point.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names +// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +export 'point.pb.dart'; + diff --git a/mobile/lib/generated/protos/ente/common/vector.pb.dart b/mobile/lib/generated/protos/ente/common/vector.pb.dart new file mode 100644 index 0000000000..44aa7d7485 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/vector.pb.dart @@ -0,0 +1,64 @@ +// +// Generated code. Do not modify. +// source: ente/common/vector.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:core' as $core; + +import 'package:protobuf/protobuf.dart' as $pb; + +/// Vector is generic message for dealing with lists of doubles +/// It should ideally be used independently and not as a submessage +class EVector extends $pb.GeneratedMessage { + factory EVector({ + $core.Iterable<$core.double>? values, + }) { + final $result = create(); + if (values != null) { + $result.values.addAll(values); + } + return $result; + } + EVector._() : super(); + factory EVector.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory EVector.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'EVector', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create) + ..p<$core.double>(1, _omitFieldNames ? '' : 'values', $pb.PbFieldType.KD) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + EVector clone() => EVector()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + EVector copyWith(void Function(EVector) updates) => super.copyWith((message) => updates(message as EVector)) as EVector; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static EVector create() => EVector._(); + EVector createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static EVector getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static EVector? _defaultInstance; + + @$pb.TagNumber(1) + $core.List<$core.double> get values => $_getList(0); +} + + +const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names'); +const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names'); diff --git a/mobile/lib/generated/protos/ente/common/vector.pbenum.dart b/mobile/lib/generated/protos/ente/common/vector.pbenum.dart new file mode 100644 index 0000000000..c88d2648a1 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/vector.pbenum.dart @@ -0,0 +1,11 @@ +// +// Generated code. Do not modify. +// source: ente/common/vector.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + diff --git a/mobile/lib/generated/protos/ente/common/vector.pbjson.dart b/mobile/lib/generated/protos/ente/common/vector.pbjson.dart new file mode 100644 index 0000000000..1aff5cb290 --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/vector.pbjson.dart @@ -0,0 +1,27 @@ +// +// Generated code. Do not modify. +// source: ente/common/vector.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:convert' as $convert; +import 'dart:core' as $core; +import 'dart:typed_data' as $typed_data; + +@$core.Deprecated('Use eVectorDescriptor instead') +const EVector$json = { + '1': 'EVector', + '2': [ + {'1': 'values', '3': 1, '4': 3, '5': 1, '10': 'values'}, + ], +}; + +/// Descriptor for `EVector`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List eVectorDescriptor = $convert.base64Decode( + 'CgdFVmVjdG9yEhYKBnZhbHVlcxgBIAMoAVIGdmFsdWVz'); + diff --git a/mobile/lib/generated/protos/ente/common/vector.pbserver.dart b/mobile/lib/generated/protos/ente/common/vector.pbserver.dart new file mode 100644 index 0000000000..dbf5ac36fa --- /dev/null +++ b/mobile/lib/generated/protos/ente/common/vector.pbserver.dart @@ -0,0 +1,14 @@ +// +// Generated code. Do not modify. +// source: ente/common/vector.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names +// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +export 'vector.pb.dart'; + diff --git a/mobile/lib/generated/protos/ente/ml/face.pb.dart b/mobile/lib/generated/protos/ente/ml/face.pb.dart new file mode 100644 index 0000000000..55d512b664 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/face.pb.dart @@ -0,0 +1,169 @@ +// +// Generated code. Do not modify. +// source: ente/ml/face.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:core' as $core; + +import 'package:protobuf/protobuf.dart' as $pb; + +import '../common/box.pb.dart' as $0; +import '../common/point.pb.dart' as $1; + +class Detection extends $pb.GeneratedMessage { + factory Detection({ + $0.CenterBox? box, + $1.EPoint? landmarks, + }) { + final $result = create(); + if (box != null) { + $result.box = box; + } + if (landmarks != null) { + $result.landmarks = landmarks; + } + return $result; + } + Detection._() : super(); + factory Detection.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory Detection.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Detection', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create) + ..aOM<$0.CenterBox>(1, _omitFieldNames ? '' : 'box', subBuilder: $0.CenterBox.create) + ..aOM<$1.EPoint>(2, _omitFieldNames ? '' : 'landmarks', subBuilder: $1.EPoint.create) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + Detection clone() => Detection()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Detection copyWith(void Function(Detection) updates) => super.copyWith((message) => updates(message as Detection)) as Detection; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static Detection create() => Detection._(); + Detection createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static Detection getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static Detection? _defaultInstance; + + @$pb.TagNumber(1) + $0.CenterBox get box => $_getN(0); + @$pb.TagNumber(1) + set box($0.CenterBox v) { setField(1, v); } + @$pb.TagNumber(1) + $core.bool hasBox() => $_has(0); + @$pb.TagNumber(1) + void clearBox() => clearField(1); + @$pb.TagNumber(1) + $0.CenterBox ensureBox() => $_ensure(0); + + @$pb.TagNumber(2) + $1.EPoint get landmarks => $_getN(1); + @$pb.TagNumber(2) + set landmarks($1.EPoint v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasLandmarks() => $_has(1); + @$pb.TagNumber(2) + void clearLandmarks() => clearField(2); + @$pb.TagNumber(2) + $1.EPoint ensureLandmarks() => $_ensure(1); +} + +class Face extends $pb.GeneratedMessage { + factory Face({ + $core.String? id, + Detection? detection, + $core.double? confidence, + }) { + final $result = create(); + if (id != null) { + $result.id = id; + } + if (detection != null) { + $result.detection = detection; + } + if (confidence != null) { + $result.confidence = confidence; + } + return $result; + } + Face._() : super(); + factory Face.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory Face.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Face', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create) + ..aOS(1, _omitFieldNames ? '' : 'id') + ..aOM(2, _omitFieldNames ? '' : 'detection', subBuilder: Detection.create) + ..a<$core.double>(3, _omitFieldNames ? '' : 'confidence', $pb.PbFieldType.OF) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + Face clone() => Face()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Face copyWith(void Function(Face) updates) => super.copyWith((message) => updates(message as Face)) as Face; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static Face create() => Face._(); + Face createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static Face getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static Face? _defaultInstance; + + @$pb.TagNumber(1) + $core.String get id => $_getSZ(0); + @$pb.TagNumber(1) + set id($core.String v) { $_setString(0, v); } + @$pb.TagNumber(1) + $core.bool hasId() => $_has(0); + @$pb.TagNumber(1) + void clearId() => clearField(1); + + @$pb.TagNumber(2) + Detection get detection => $_getN(1); + @$pb.TagNumber(2) + set detection(Detection v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasDetection() => $_has(1); + @$pb.TagNumber(2) + void clearDetection() => clearField(2); + @$pb.TagNumber(2) + Detection ensureDetection() => $_ensure(1); + + @$pb.TagNumber(3) + $core.double get confidence => $_getN(2); + @$pb.TagNumber(3) + set confidence($core.double v) { $_setFloat(2, v); } + @$pb.TagNumber(3) + $core.bool hasConfidence() => $_has(2); + @$pb.TagNumber(3) + void clearConfidence() => clearField(3); +} + + +const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names'); +const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names'); diff --git a/mobile/lib/generated/protos/ente/ml/face.pbenum.dart b/mobile/lib/generated/protos/ente/ml/face.pbenum.dart new file mode 100644 index 0000000000..2eefe1f447 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/face.pbenum.dart @@ -0,0 +1,11 @@ +// +// Generated code. Do not modify. +// source: ente/ml/face.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + diff --git a/mobile/lib/generated/protos/ente/ml/face.pbjson.dart b/mobile/lib/generated/protos/ente/ml/face.pbjson.dart new file mode 100644 index 0000000000..5aa614a8b8 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/face.pbjson.dart @@ -0,0 +1,55 @@ +// +// Generated code. Do not modify. +// source: ente/ml/face.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:convert' as $convert; +import 'dart:core' as $core; +import 'dart:typed_data' as $typed_data; + +@$core.Deprecated('Use detectionDescriptor instead') +const Detection$json = { + '1': 'Detection', + '2': [ + {'1': 'box', '3': 1, '4': 1, '5': 11, '6': '.ente.common.CenterBox', '9': 0, '10': 'box', '17': true}, + {'1': 'landmarks', '3': 2, '4': 1, '5': 11, '6': '.ente.common.EPoint', '9': 1, '10': 'landmarks', '17': true}, + ], + '8': [ + {'1': '_box'}, + {'1': '_landmarks'}, + ], +}; + +/// Descriptor for `Detection`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List detectionDescriptor = $convert.base64Decode( + 'CglEZXRlY3Rpb24SLQoDYm94GAEgASgLMhYuZW50ZS5jb21tb24uQ2VudGVyQm94SABSA2JveI' + 'gBARI2CglsYW5kbWFya3MYAiABKAsyEy5lbnRlLmNvbW1vbi5FUG9pbnRIAVIJbGFuZG1hcmtz' + 'iAEBQgYKBF9ib3hCDAoKX2xhbmRtYXJrcw=='); + +@$core.Deprecated('Use faceDescriptor instead') +const Face$json = { + '1': 'Face', + '2': [ + {'1': 'id', '3': 1, '4': 1, '5': 9, '9': 0, '10': 'id', '17': true}, + {'1': 'detection', '3': 2, '4': 1, '5': 11, '6': '.ente.ml.Detection', '9': 1, '10': 'detection', '17': true}, + {'1': 'confidence', '3': 3, '4': 1, '5': 2, '9': 2, '10': 'confidence', '17': true}, + ], + '8': [ + {'1': '_id'}, + {'1': '_detection'}, + {'1': '_confidence'}, + ], +}; + +/// Descriptor for `Face`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List faceDescriptor = $convert.base64Decode( + 'CgRGYWNlEhMKAmlkGAEgASgJSABSAmlkiAEBEjUKCWRldGVjdGlvbhgCIAEoCzISLmVudGUubW' + 'wuRGV0ZWN0aW9uSAFSCWRldGVjdGlvbogBARIjCgpjb25maWRlbmNlGAMgASgCSAJSCmNvbmZp' + 'ZGVuY2WIAQFCBQoDX2lkQgwKCl9kZXRlY3Rpb25CDQoLX2NvbmZpZGVuY2U='); + diff --git a/mobile/lib/generated/protos/ente/ml/face.pbserver.dart b/mobile/lib/generated/protos/ente/ml/face.pbserver.dart new file mode 100644 index 0000000000..a2cd6ff853 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/face.pbserver.dart @@ -0,0 +1,14 @@ +// +// Generated code. Do not modify. +// source: ente/ml/face.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names +// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +export 'face.pb.dart'; + diff --git a/mobile/lib/generated/protos/ente/ml/fileml.pb.dart b/mobile/lib/generated/protos/ente/ml/fileml.pb.dart new file mode 100644 index 0000000000..853f89bac4 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/fileml.pb.dart @@ -0,0 +1,179 @@ +// +// Generated code. Do not modify. +// source: ente/ml/fileml.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:core' as $core; + +import 'package:fixnum/fixnum.dart' as $fixnum; +import 'package:protobuf/protobuf.dart' as $pb; + +import 'face.pb.dart' as $2; + +class FileML extends $pb.GeneratedMessage { + factory FileML({ + $fixnum.Int64? id, + $core.Iterable<$core.double>? clip, + }) { + final $result = create(); + if (id != null) { + $result.id = id; + } + if (clip != null) { + $result.clip.addAll(clip); + } + return $result; + } + FileML._() : super(); + factory FileML.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory FileML.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'FileML', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create) + ..aInt64(1, _omitFieldNames ? '' : 'id') + ..p<$core.double>(2, _omitFieldNames ? '' : 'clip', $pb.PbFieldType.KD) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + FileML clone() => FileML()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + FileML copyWith(void Function(FileML) updates) => super.copyWith((message) => updates(message as FileML)) as FileML; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static FileML create() => FileML._(); + FileML createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static FileML getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static FileML? _defaultInstance; + + @$pb.TagNumber(1) + $fixnum.Int64 get id => $_getI64(0); + @$pb.TagNumber(1) + set id($fixnum.Int64 v) { $_setInt64(0, v); } + @$pb.TagNumber(1) + $core.bool hasId() => $_has(0); + @$pb.TagNumber(1) + void clearId() => clearField(1); + + @$pb.TagNumber(2) + $core.List<$core.double> get clip => $_getList(1); +} + +class FileFaces extends $pb.GeneratedMessage { + factory FileFaces({ + $core.Iterable<$2.Face>? faces, + $core.int? height, + $core.int? width, + $core.int? version, + $core.String? error, + }) { + final $result = create(); + if (faces != null) { + $result.faces.addAll(faces); + } + if (height != null) { + $result.height = height; + } + if (width != null) { + $result.width = width; + } + if (version != null) { + $result.version = version; + } + if (error != null) { + $result.error = error; + } + return $result; + } + FileFaces._() : super(); + factory FileFaces.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory FileFaces.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'FileFaces', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create) + ..pc<$2.Face>(1, _omitFieldNames ? '' : 'faces', $pb.PbFieldType.PM, subBuilder: $2.Face.create) + ..a<$core.int>(2, _omitFieldNames ? '' : 'height', $pb.PbFieldType.O3) + ..a<$core.int>(3, _omitFieldNames ? '' : 'width', $pb.PbFieldType.O3) + ..a<$core.int>(4, _omitFieldNames ? '' : 'version', $pb.PbFieldType.O3) + ..aOS(5, _omitFieldNames ? '' : 'error') + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + FileFaces clone() => FileFaces()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + FileFaces copyWith(void Function(FileFaces) updates) => super.copyWith((message) => updates(message as FileFaces)) as FileFaces; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static FileFaces create() => FileFaces._(); + FileFaces createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static FileFaces getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static FileFaces? _defaultInstance; + + @$pb.TagNumber(1) + $core.List<$2.Face> get faces => $_getList(0); + + @$pb.TagNumber(2) + $core.int get height => $_getIZ(1); + @$pb.TagNumber(2) + set height($core.int v) { $_setSignedInt32(1, v); } + @$pb.TagNumber(2) + $core.bool hasHeight() => $_has(1); + @$pb.TagNumber(2) + void clearHeight() => clearField(2); + + @$pb.TagNumber(3) + $core.int get width => $_getIZ(2); + @$pb.TagNumber(3) + set width($core.int v) { $_setSignedInt32(2, v); } + @$pb.TagNumber(3) + $core.bool hasWidth() => $_has(2); + @$pb.TagNumber(3) + void clearWidth() => clearField(3); + + @$pb.TagNumber(4) + $core.int get version => $_getIZ(3); + @$pb.TagNumber(4) + set version($core.int v) { $_setSignedInt32(3, v); } + @$pb.TagNumber(4) + $core.bool hasVersion() => $_has(3); + @$pb.TagNumber(4) + void clearVersion() => clearField(4); + + @$pb.TagNumber(5) + $core.String get error => $_getSZ(4); + @$pb.TagNumber(5) + set error($core.String v) { $_setString(4, v); } + @$pb.TagNumber(5) + $core.bool hasError() => $_has(4); + @$pb.TagNumber(5) + void clearError() => clearField(5); +} + + +const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names'); +const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names'); diff --git a/mobile/lib/generated/protos/ente/ml/fileml.pbenum.dart b/mobile/lib/generated/protos/ente/ml/fileml.pbenum.dart new file mode 100644 index 0000000000..71d796efe9 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/fileml.pbenum.dart @@ -0,0 +1,11 @@ +// +// Generated code. Do not modify. +// source: ente/ml/fileml.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + diff --git a/mobile/lib/generated/protos/ente/ml/fileml.pbjson.dart b/mobile/lib/generated/protos/ente/ml/fileml.pbjson.dart new file mode 100644 index 0000000000..824741733e --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/fileml.pbjson.dart @@ -0,0 +1,57 @@ +// +// Generated code. Do not modify. +// source: ente/ml/fileml.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +import 'dart:convert' as $convert; +import 'dart:core' as $core; +import 'dart:typed_data' as $typed_data; + +@$core.Deprecated('Use fileMLDescriptor instead') +const FileML$json = { + '1': 'FileML', + '2': [ + {'1': 'id', '3': 1, '4': 1, '5': 3, '9': 0, '10': 'id', '17': true}, + {'1': 'clip', '3': 2, '4': 3, '5': 1, '10': 'clip'}, + ], + '8': [ + {'1': '_id'}, + ], +}; + +/// Descriptor for `FileML`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List fileMLDescriptor = $convert.base64Decode( + 'CgZGaWxlTUwSEwoCaWQYASABKANIAFICaWSIAQESEgoEY2xpcBgCIAMoAVIEY2xpcEIFCgNfaW' + 'Q='); + +@$core.Deprecated('Use fileFacesDescriptor instead') +const FileFaces$json = { + '1': 'FileFaces', + '2': [ + {'1': 'faces', '3': 1, '4': 3, '5': 11, '6': '.ente.ml.Face', '10': 'faces'}, + {'1': 'height', '3': 2, '4': 1, '5': 5, '9': 0, '10': 'height', '17': true}, + {'1': 'width', '3': 3, '4': 1, '5': 5, '9': 1, '10': 'width', '17': true}, + {'1': 'version', '3': 4, '4': 1, '5': 5, '9': 2, '10': 'version', '17': true}, + {'1': 'error', '3': 5, '4': 1, '5': 9, '9': 3, '10': 'error', '17': true}, + ], + '8': [ + {'1': '_height'}, + {'1': '_width'}, + {'1': '_version'}, + {'1': '_error'}, + ], +}; + +/// Descriptor for `FileFaces`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List fileFacesDescriptor = $convert.base64Decode( + 'CglGaWxlRmFjZXMSIwoFZmFjZXMYASADKAsyDS5lbnRlLm1sLkZhY2VSBWZhY2VzEhsKBmhlaW' + 'dodBgCIAEoBUgAUgZoZWlnaHSIAQESGQoFd2lkdGgYAyABKAVIAVIFd2lkdGiIAQESHQoHdmVy' + 'c2lvbhgEIAEoBUgCUgd2ZXJzaW9uiAEBEhkKBWVycm9yGAUgASgJSANSBWVycm9yiAEBQgkKB1' + '9oZWlnaHRCCAoGX3dpZHRoQgoKCF92ZXJzaW9uQggKBl9lcnJvcg=='); + diff --git a/mobile/lib/generated/protos/ente/ml/fileml.pbserver.dart b/mobile/lib/generated/protos/ente/ml/fileml.pbserver.dart new file mode 100644 index 0000000000..4cb208d271 --- /dev/null +++ b/mobile/lib/generated/protos/ente/ml/fileml.pbserver.dart @@ -0,0 +1,14 @@ +// +// Generated code. Do not modify. +// source: ente/ml/fileml.proto +// +// @dart = 2.12 + +// ignore_for_file: annotate_overrides, camel_case_types, comment_references +// ignore_for_file: constant_identifier_names +// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes +// ignore_for_file: non_constant_identifier_names, prefer_final_fields +// ignore_for_file: unnecessary_import, unnecessary_this, unused_import + +export 'fileml.pb.dart'; + diff --git a/mobile/lib/l10n/intl_cs.arb b/mobile/lib/l10n/intl_cs.arb index e7d3747252..449bdb7609 100644 --- a/mobile/lib/l10n/intl_cs.arb +++ b/mobile/lib/l10n/intl_cs.arb @@ -18,5 +18,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_de.arb b/mobile/lib/l10n/intl_de.arb index 0e5807e1e8..acee623ab5 100644 --- a/mobile/lib/l10n/intl_de.arb +++ b/mobile/lib/l10n/intl_de.arb @@ -1187,6 +1187,8 @@ "changeLocationOfSelectedItems": "Standort der gewählten Elemente ändern?", "editsToLocationWillOnlyBeSeenWithinEnte": "Änderungen des Standorts werden nur in ente sichtbar sein", "cleanUncategorized": "Unkategorisiert leeren", + "addAName": "Add a name", + "findPeopleByName": "Find people quickly by searching by name", "cleanUncategorizedDescription": "Entferne alle Dateien von \"Unkategorisiert\" die in anderen Alben vorhanden sind", "waitingForVerification": "Warte auf Bestätigung...", "passkey": "Passkey", @@ -1204,5 +1206,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb index 6bc8b59269..0fe06c95a6 100644 --- a/mobile/lib/l10n/intl_en.arb +++ b/mobile/lib/l10n/intl_en.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "Free trial valid till {endDate}", "validTill": "Valid till {endDate}", "addOnValidTill": "Your {storageAmount} add-on is valid till {endDate}", - "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.", + "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.", "subWillBeCancelledOn": "Your subscription will be cancelled on {endDate}", "subscription": "Subscription", "paymentDetails": "Payment details", @@ -987,7 +987,7 @@ "fileTypesAndNames": "File types and names", "location": "Location", "moments": "Moments", - "searchFaceEmptySection": "Find all photos of a person", + "searchFaceEmptySection": "Persons will be shown here once indexing is done", "searchDatesEmptySection": "Search by a date, month or year", "searchLocationEmptySection": "Group photos that are taken within some radius of a photo", "searchPeopleEmptySection": "Invite people, and you'll see all photos shared by them here", @@ -1171,6 +1171,7 @@ } }, "faces": "Faces", + "people": "People", "contents": "Contents", "addNew": "Add new", "@addNew": { @@ -1196,14 +1197,14 @@ "verifyPasskey": "Verify passkey", "playOnTv": "Play album on TV", "pair": "Pair", - "autoPair": "Auto pair", - "pairWithPin": "Pair with PIN", "deviceNotFound": "Device not found", "castInstruction": "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV.", "deviceCodeHint": "Enter the code", "joinDiscord": "Join Discord", "locations": "Locations", "descriptions": "Descriptions", + "addAName": "Add a name", + "findPeopleByName": "Find people quickly by name", "addViewers": "{count, plural, zero {Add viewer} one {Add viewer} other {Add viewers}}", "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", @@ -1216,6 +1217,8 @@ "customEndpoint": "Connected to {endpoint}", "createCollaborativeLink": "Create collaborative link", "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", "autoPairDesc": "Auto pair works only with devices that support Chromecast.", "manualPairDesc": "Pair with PIN works with any screen you wish to view your album on.", "connectToDevice": "Connect to device", @@ -1226,5 +1229,11 @@ "stopCastingBody": "Do you want to stop casting?", "castIPMismatchTitle": "Failed to cast album", "castIPMismatchBody": "Please make sure you are on the same network as the TV.", - "pairingComplete": "Pairing complete" + "pairingComplete": "Pairing complete", + "autoPair": "Auto pair", + "pairWithPin": "Pair with PIN", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_es.arb b/mobile/lib/l10n/intl_es.arb index 6515371fa5..a472aaf8eb 100644 --- a/mobile/lib/l10n/intl_es.arb +++ b/mobile/lib/l10n/intl_es.arb @@ -980,5 +980,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_fr.arb b/mobile/lib/l10n/intl_fr.arb index 1d8e5f6d3a..a5b2f2fd07 100644 --- a/mobile/lib/l10n/intl_fr.arb +++ b/mobile/lib/l10n/intl_fr.arb @@ -1161,5 +1161,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_it.arb b/mobile/lib/l10n/intl_it.arb index c9655dd065..e81ac63777 100644 --- a/mobile/lib/l10n/intl_it.arb +++ b/mobile/lib/l10n/intl_it.arb @@ -1123,5 +1123,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_ko.arb b/mobile/lib/l10n/intl_ko.arb index e7d3747252..449bdb7609 100644 --- a/mobile/lib/l10n/intl_ko.arb +++ b/mobile/lib/l10n/intl_ko.arb @@ -18,5 +18,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_nl.arb b/mobile/lib/l10n/intl_nl.arb index a8f854a430..682aee259a 100644 --- a/mobile/lib/l10n/intl_nl.arb +++ b/mobile/lib/l10n/intl_nl.arb @@ -1226,5 +1226,9 @@ "stopCastingBody": "Wil je stoppen met casten?", "castIPMismatchTitle": "Album casten mislukt", "castIPMismatchBody": "Zorg ervoor dat je op hetzelfde netwerk zit als de tv.", - "pairingComplete": "Koppeling voltooid" + "pairingComplete": "Koppeling voltooid", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_no.arb b/mobile/lib/l10n/intl_no.arb index 8908eadb01..697a9f3c46 100644 --- a/mobile/lib/l10n/intl_no.arb +++ b/mobile/lib/l10n/intl_no.arb @@ -32,5 +32,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_pl.arb b/mobile/lib/l10n/intl_pl.arb index 13d740614d..f9b66901e2 100644 --- a/mobile/lib/l10n/intl_pl.arb +++ b/mobile/lib/l10n/intl_pl.arb @@ -119,5 +119,11 @@ "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "enterPersonName": "Enter person name", + "removePersonLabel": "Remove person label", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_pt.arb b/mobile/lib/l10n/intl_pt.arb index 08d932cdaa..10117b4263 100644 --- a/mobile/lib/l10n/intl_pt.arb +++ b/mobile/lib/l10n/intl_pt.arb @@ -410,7 +410,7 @@ "machineLearning": "Aprendizagem de máquina", "magicSearch": "Busca mágica", "magicSearchDescription": "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados.", - "loadingModel": "Baixando modelos...", + "loadingModel": "Fazendo download de modelos...", "waitingForWifi": "Esperando por Wi-Fi...", "status": "Estado", "indexedItems": "Itens indexados", @@ -471,7 +471,7 @@ "criticalUpdateAvailable": "Atualização crítica disponível", "updateAvailable": "Atualização disponível", "ignoreUpdate": "Ignorar", - "downloading": "Baixando...", + "downloading": "Fazendo download...", "cannotDeleteSharedFiles": "Não é possível excluir arquivos compartilhados", "theDownloadCouldNotBeCompleted": "Não foi possível concluir o download", "retry": "Tentar novamente", @@ -660,7 +660,7 @@ "endtoendEncryptedByDefault": "Criptografia de ponta a ponta por padrão", "safelyStored": "Armazenado com segurança", "atAFalloutShelter": "em um abrigo avançado", - "designedToOutlive": "Feito para ter logenvidade", + "designedToOutlive": "Feito para ter longevidade", "available": "Disponível", "everywhere": "em todos os lugares", "androidIosWebDesktop": "Android, iOS, Web, Desktop", @@ -734,7 +734,7 @@ "moveToAlbum": "Mover para álbum", "unhide": "Desocultar", "unarchive": "Desarquivar", - "favorite": "Favoritar", + "favorite": "Favorito", "removeFromFavorite": "Remover dos favoritos", "shareLink": "Compartilhar link", "createCollage": "Criar colagem", @@ -840,7 +840,7 @@ "download": "Baixar", "pressAndHoldToPlayVideo": "Pressione e segure para reproduzir o vídeo", "pressAndHoldToPlayVideoDetailed": "Pressione e segure na imagem para reproduzir o vídeo", - "downloadFailed": "Falha ao baixar", + "downloadFailed": "Falha no download", "deduplicateFiles": "Arquivos duplicados", "deselectAll": "Desmarcar todos", "reviewDeduplicateItems": "Por favor, reveja e exclua os itens que você acredita serem duplicados.", @@ -987,7 +987,7 @@ "fileTypesAndNames": "Tipos de arquivo e nomes", "location": "Local", "moments": "Momentos", - "searchFaceEmptySection": "Encontre todas as fotos de uma pessoa", + "searchFaceEmptySection": "Pessoas serão exibidas aqui uma vez que a indexação é feita", "searchDatesEmptySection": "Pesquisar por data, mês ou ano", "searchLocationEmptySection": "Fotos de grupo que estão sendo tiradas em algum raio da foto", "searchPeopleEmptySection": "Convide pessoas e você verá todas as fotos compartilhadas por elas aqui", @@ -1042,7 +1042,7 @@ "@storageUsageInfo": { "description": "Example: 1.2 GB of 2 GB used or 100 GB or 2TB used" }, - "freeStorageSpace": "{freeAmount} {storageUnit} grátis", + "freeStorageSpace": "{freeAmount} {storageUnit} livre", "appVersion": "Versão: {versionValue}", "verifyIDLabel": "Verificar", "fileInfoAddDescHint": "Adicionar descrição...", @@ -1132,7 +1132,7 @@ "sharedWithYou": "Compartilhado com você", "sharedByYou": "Compartilhado por você", "inviteYourFriendsToEnte": "Convide seus amigos ao Ente", - "failedToDownloadVideo": "Falha ao baixar vídeo", + "failedToDownloadVideo": "Falha ao fazer download do vídeo", "hiding": "Ocultando...", "unhiding": "Desocultando...", "successfullyHid": "Ocultado com sucesso", @@ -1171,6 +1171,7 @@ } }, "faces": "Rostos", + "people": "Pessoas", "contents": "Conteúdos", "addNew": "Adicionar novo", "@addNew": { @@ -1196,14 +1197,14 @@ "verifyPasskey": "Verificar chave de acesso", "playOnTv": "Reproduzir álbum na TV", "pair": "Parear", - "autoPair": "Pareamento automático", - "pairWithPin": "Parear com PIN", "deviceNotFound": "Dispositivo não encontrado", "castInstruction": "Visite cast.ente.io no dispositivo que você deseja parear.\n\ndigite o código abaixo para reproduzir o álbum em sua TV.", "deviceCodeHint": "Insira o código", "joinDiscord": "Junte-se ao Discord", "locations": "Locais", "descriptions": "Descrições", + "addAName": "Adicione um nome", + "findPeopleByName": "Encontre pessoas rapidamente por nome", "addViewers": "{count, plural, zero {Adicionar visualizador} one {Adicionar visualizador} other {Adicionar Visualizadores}}", "addCollaborators": "{count, plural, zero {Adicionar colaborador} one {Adicionar coloborador} other {Adicionar colaboradores}}", "longPressAnEmailToVerifyEndToEndEncryption": "Pressione e segure um e-mail para verificar a criptografia de ponta a ponta.", @@ -1216,6 +1217,8 @@ "customEndpoint": "Conectado a {endpoint}", "createCollaborativeLink": "Criar link colaborativo", "search": "Pesquisar", + "enterPersonName": "Inserir nome da pessoa", + "removePersonLabel": "Remover etiqueta da pessoa", "autoPairDesc": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.", "manualPairDesc": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.", "connectToDevice": "Conectar ao dispositivo", @@ -1226,5 +1229,11 @@ "stopCastingBody": "Você quer parar a transmissão?", "castIPMismatchTitle": "Falha ao transmitir álbum", "castIPMismatchBody": "Certifique-se de estar na mesma rede que a TV.", - "pairingComplete": "Pareamento concluído" + "pairingComplete": "Pareamento concluído", + "autoPair": "Pareamento automático", + "pairWithPin": "Parear com PIN", + "faceRecognition": "Reconhecimento facial", + "faceRecognitionIndexingDescription": "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados.", + "foundFaces": "Rostos encontrados", + "clusteringProgress": "Progresso de agrupamento" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_zh.arb b/mobile/lib/l10n/intl_zh.arb index 9a854a4f0f..933eea126f 100644 --- a/mobile/lib/l10n/intl_zh.arb +++ b/mobile/lib/l10n/intl_zh.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "免费试用有效期至 {endDate}", "validTill": "有效期至 {endDate}", "addOnValidTill": "您的 {storageAmount} 插件有效期至 {endDate}", - "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n之后您可以选择付费计划。", + "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n您可以随后购买付费计划。", "subWillBeCancelledOn": "您的订阅将于 {endDate} 取消", "subscription": "订阅", "paymentDetails": "付款明细", @@ -942,7 +942,7 @@ "thisActionCannotBeUndone": "此操作无法撤销", "emptyTrash": "要清空回收站吗?", "permDeleteWarning": "回收站中的所有项目将被永久删除\n\n此操作无法撤消", - "empty": "空的", + "empty": "清空", "couldNotFreeUpSpace": "无法释放空间", "permanentlyDeleteFromDevice": "要从设备中永久删除吗?", "someOfTheFilesYouAreTryingToDeleteAre": "您要删除的部分文件仅在您的设备上可用,且删除后无法恢复", @@ -1226,5 +1226,9 @@ "stopCastingBody": "您想停止投放吗?", "castIPMismatchTitle": "投放相册失败", "castIPMismatchBody": "请确保您的设备与电视处于同一网络。", - "pairingComplete": "配对完成" + "pairingComplete": "配对完成", + "faceRecognition": "Face recognition", + "faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.", + "foundFaces": "Found faces", + "clusteringProgress": "Clustering progress" } \ No newline at end of file diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index d53a1e363a..fcde8d15b4 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -20,6 +20,7 @@ import 'package:photos/core/errors.dart'; import 'package:photos/core/network/network.dart'; import 'package:photos/db/upload_locks_db.dart'; import 'package:photos/ente_theme_data.dart'; +import "package:photos/face/db.dart"; import "package:photos/l10n/l10n.dart"; import "package:photos/service_locator.dart"; import 'package:photos/services/app_lifecycle_service.dart'; @@ -31,6 +32,9 @@ import 'package:photos/services/home_widget_service.dart'; import 'package:photos/services/local_file_update_service.dart'; import 'package:photos/services/local_sync_service.dart'; import "package:photos/services/location_service.dart"; +import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart'; import "package:photos/services/machine_learning/machine_learning_controller.dart"; import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart'; import 'package:photos/services/memories_service.dart'; @@ -211,6 +215,7 @@ Future _init(bool isBackground, {String via = ''}) async { LocalFileUpdateService.instance.init(preferences); SearchService.instance.init(); StorageBonusService.instance.init(preferences); + RemoteFileMLService.instance.init(preferences); if (!isBackground && Platform.isAndroid && await HomeWidgetService.instance.countHomeWidgets() == 0) { @@ -221,9 +226,21 @@ Future _init(bool isBackground, {String via = ''}) async { // Can not including existing tf/ml binaries as they are not being built // from source. // See https://gitlab.com/fdroid/fdroiddata/-/merge_requests/12671#note_1294346819 - // if (!UpdateService.instance.isFdroidFlavor()) { - // unawaited(ObjectDetectionService.instance.init()); - // } + if (!UpdateService.instance.isFdroidFlavor()) { + // unawaited(ObjectDetectionService.instance.init()); + if (flagService.faceSearchEnabled) { + unawaited(FaceMlService.instance.init()); + } else { + if (LocalSettings.instance.isFaceIndexingEnabled) { + unawaited(LocalSettings.instance.toggleFaceIndexing()); + } + } + } + PersonService.init( + EntityService.instance, + FaceMLDataDB.instance, + preferences, + ); _logger.info("Initialization done"); } diff --git a/mobile/lib/models/api/entity/type.dart b/mobile/lib/models/api/entity/type.dart index 3631792ded..88e60d62f3 100644 --- a/mobile/lib/models/api/entity/type.dart +++ b/mobile/lib/models/api/entity/type.dart @@ -2,6 +2,7 @@ import "package:flutter/foundation.dart"; enum EntityType { location, + person, unknown, } @@ -9,6 +10,8 @@ EntityType typeFromString(String type) { switch (type) { case "location": return EntityType.location; + case "person": + return EntityType.location; } debugPrint("unexpected collection type $type"); return EntityType.unknown; @@ -19,6 +22,8 @@ extension EntityTypeExtn on EntityType { switch (this) { case EntityType.location: return "location"; + case EntityType.person: + return "person"; case EntityType.unknown: return "unknown"; } diff --git a/mobile/lib/models/file/file.dart b/mobile/lib/models/file/file.dart index d96a81e1ce..9df25bb051 100644 --- a/mobile/lib/models/file/file.dart +++ b/mobile/lib/models/file/file.dart @@ -243,6 +243,9 @@ class EnteFile { } String get downloadUrl { + if (localFileServer.isNotEmpty) { + return "$localFileServer/$uploadedFileID"; + } final endpoint = Configuration.instance.getHttpEndpoint(); if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) { return endpoint + "/files/download/" + uploadedFileID.toString(); @@ -256,6 +259,9 @@ class EnteFile { } String get thumbnailUrl { + if (localFileServer.isNotEmpty) { + return "$localFileServer/thumb/$uploadedFileID"; + } final endpoint = Configuration.instance.getHttpEndpoint(); if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) { return endpoint + "/files/preview/" + uploadedFileID.toString(); diff --git a/mobile/lib/models/gallery_type.dart b/mobile/lib/models/gallery_type.dart index 40426f7015..bb02f1bbca 100644 --- a/mobile/lib/models/gallery_type.dart +++ b/mobile/lib/models/gallery_type.dart @@ -18,6 +18,8 @@ enum GalleryType { searchResults, locationTag, quickLink, + peopleTag, + cluster, } extension GalleyTypeExtension on GalleryType { @@ -32,12 +34,14 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.locationTag: case GalleryType.quickLink: case GalleryType.uncategorized: + case GalleryType.peopleTag: case GalleryType.sharedCollection: return true; case GalleryType.hiddenSection: case GalleryType.hiddenOwnedCollection: case GalleryType.trash: + case GalleryType.cluster: return false; } } @@ -50,6 +54,7 @@ extension GalleyTypeExtension on GalleryType { return true; case GalleryType.hiddenSection: + case GalleryType.peopleTag: case GalleryType.hiddenOwnedCollection: case GalleryType.favorite: case GalleryType.searchResults: @@ -59,6 +64,7 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.trash: case GalleryType.sharedCollection: case GalleryType.locationTag: + case GalleryType.cluster: return false; } } @@ -75,12 +81,14 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.uncategorized: case GalleryType.locationTag: case GalleryType.quickLink: + case GalleryType.peopleTag: return true; case GalleryType.trash: case GalleryType.archive: case GalleryType.hiddenSection: case GalleryType.hiddenOwnedCollection: case GalleryType.sharedCollection: + case GalleryType.cluster: return false; } } @@ -98,8 +106,10 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.localFolder: case GalleryType.locationTag: case GalleryType.quickLink: + case GalleryType.peopleTag: return true; case GalleryType.trash: + case GalleryType.cluster: case GalleryType.sharedCollection: return false; } @@ -114,8 +124,10 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.archive: case GalleryType.uncategorized: case GalleryType.locationTag: + case GalleryType.peopleTag: return true; case GalleryType.hiddenSection: + case GalleryType.cluster: case GalleryType.hiddenOwnedCollection: case GalleryType.localFolder: case GalleryType.trash: @@ -132,6 +144,7 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.quickLink: return true; case GalleryType.hiddenSection: + case GalleryType.peopleTag: case GalleryType.hiddenOwnedCollection: case GalleryType.uncategorized: case GalleryType.favorite: @@ -139,6 +152,7 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.homepage: case GalleryType.archive: case GalleryType.localFolder: + case GalleryType.cluster: case GalleryType.trash: case GalleryType.locationTag: return false; @@ -154,6 +168,7 @@ extension GalleyTypeExtension on GalleryType { return true; case GalleryType.hiddenSection: + case GalleryType.peopleTag: case GalleryType.hiddenOwnedCollection: case GalleryType.favorite: case GalleryType.searchResults: @@ -162,6 +177,7 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.trash: case GalleryType.sharedCollection: case GalleryType.locationTag: + case GalleryType.cluster: return false; } } @@ -182,10 +198,12 @@ extension GalleyTypeExtension on GalleryType { return true; case GalleryType.hiddenSection: + case GalleryType.peopleTag: case GalleryType.hiddenOwnedCollection: case GalleryType.localFolder: case GalleryType.trash: case GalleryType.favorite: + case GalleryType.cluster: case GalleryType.sharedCollection: return false; } @@ -203,12 +221,14 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.searchResults: case GalleryType.uncategorized: case GalleryType.locationTag: + case GalleryType.peopleTag: return true; case GalleryType.hiddenSection: case GalleryType.hiddenOwnedCollection: case GalleryType.quickLink: case GalleryType.favorite: + case GalleryType.cluster: case GalleryType.archive: case GalleryType.localFolder: case GalleryType.trash: @@ -244,7 +264,7 @@ extension GalleyTypeExtension on GalleryType { } bool showEditLocation() { - return this != GalleryType.sharedCollection; + return this != GalleryType.sharedCollection && this != GalleryType.cluster; } } @@ -334,7 +354,9 @@ extension GalleryAppBarExtn on GalleryType { case GalleryType.locationTag: case GalleryType.searchResults: return false; + case GalleryType.cluster: case GalleryType.uncategorized: + case GalleryType.peopleTag: case GalleryType.ownedCollection: case GalleryType.sharedCollection: case GalleryType.quickLink: diff --git a/mobile/lib/models/local_entity_data.dart b/mobile/lib/models/local_entity_data.dart index 9066e16fd9..910167b13e 100644 --- a/mobile/lib/models/local_entity_data.dart +++ b/mobile/lib/models/local_entity_data.dart @@ -1,6 +1,7 @@ import "package:equatable/equatable.dart"; import "package:photos/models/api/entity/type.dart"; +// LocalEntityData is a class that represents the data of an entity stored locally. class LocalEntityData { final String id; final EntityType type; diff --git a/mobile/lib/models/ml/ml_typedefs.dart b/mobile/lib/models/ml/ml_typedefs.dart new file mode 100644 index 0000000000..bcb23251ec --- /dev/null +++ b/mobile/lib/models/ml/ml_typedefs.dart @@ -0,0 +1,7 @@ +typedef Embedding = List; + +typedef Num3DInputMatrix = List>>; + +typedef Int3DInputMatrix = List>>; + +typedef Double3DInputMatrix = List>>; diff --git a/mobile/lib/models/ml/ml_versions.dart b/mobile/lib/models/ml/ml_versions.dart new file mode 100644 index 0000000000..857bef33c5 --- /dev/null +++ b/mobile/lib/models/ml/ml_versions.dart @@ -0,0 +1,3 @@ +const faceMlVersion = 1; +const clusterMlVersion = 1; +const minimumClusterSize = 2; \ No newline at end of file diff --git a/mobile/lib/models/search/generic_search_result.dart b/mobile/lib/models/search/generic_search_result.dart index 352886a509..a40f71fd32 100644 --- a/mobile/lib/models/search/generic_search_result.dart +++ b/mobile/lib/models/search/generic_search_result.dart @@ -8,8 +8,15 @@ class GenericSearchResult extends SearchResult { final List _files; final ResultType _type; final Function(BuildContext context)? onResultTap; + final Map params; - GenericSearchResult(this._type, this._name, this._files, {this.onResultTap}); + GenericSearchResult( + this._type, + this._name, + this._files, { + this.onResultTap, + this.params = const {}, + }); @override String name() { diff --git a/mobile/lib/models/search/search_constants.dart b/mobile/lib/models/search/search_constants.dart new file mode 100644 index 0000000000..6a0bcb8866 --- /dev/null +++ b/mobile/lib/models/search/search_constants.dart @@ -0,0 +1,3 @@ +const kPersonParamID = 'person_id'; +const kClusterParamId = 'cluster_id'; +const kFileID = 'file_id'; diff --git a/mobile/lib/models/search/search_types.dart b/mobile/lib/models/search/search_types.dart index 1ec197c7e7..a13fd57dcb 100644 --- a/mobile/lib/models/search/search_types.dart +++ b/mobile/lib/models/search/search_types.dart @@ -6,6 +6,7 @@ import "package:photos/core/event_bus.dart"; import "package:photos/events/collection_updated_event.dart"; import "package:photos/events/event.dart"; import "package:photos/events/location_tag_updated_event.dart"; +import "package:photos/events/people_changed_event.dart"; import "package:photos/generated/l10n.dart"; import "package:photos/models/collection/collection.dart"; import "package:photos/models/collection/collection_items.dart"; @@ -33,6 +34,7 @@ enum ResultType { fileCaption, event, shared, + faces, magic, } @@ -55,7 +57,7 @@ extension SectionTypeExtensions on SectionType { String sectionTitle(BuildContext context) { switch (this) { case SectionType.face: - return S.of(context).faces; + return S.of(context).people; case SectionType.content: return S.of(context).contents; case SectionType.moment: @@ -117,10 +119,12 @@ extension SectionTypeExtensions on SectionType { } } + bool get sortByName => this != SectionType.face; + bool get isEmptyCTAVisible { switch (this) { case SectionType.face: - return true; + return false; case SectionType.content: return false; case SectionType.moment: @@ -245,8 +249,7 @@ extension SectionTypeExtensions on SectionType { }) { switch (this) { case SectionType.face: - return Future.value(List.empty()); - + return SearchService.instance.getAllFace(limit); case SectionType.content: return Future.value(List.empty()); @@ -277,6 +280,8 @@ extension SectionTypeExtensions on SectionType { return [Bus.instance.on()]; case SectionType.album: return [Bus.instance.on()]; + case SectionType.face: + return [Bus.instance.on()]; default: return []; } diff --git a/mobile/lib/module/upload/model/multipart.dart b/mobile/lib/module/upload/model/multipart.dart new file mode 100644 index 0000000000..cda72d141c --- /dev/null +++ b/mobile/lib/module/upload/model/multipart.dart @@ -0,0 +1,66 @@ +import "package:photos/module/upload/model/xml.dart"; + +class PartETag extends XmlParsableObject { + final int partNumber; + final String eTag; + + PartETag(this.partNumber, this.eTag); + + @override + String get elementName => "Part"; + + @override + Map toMap() { + return { + "PartNumber": partNumber, + "ETag": eTag, + }; + } +} + +enum MultipartStatus { + pending, + uploaded, + completed, +} + +enum PartStatus { + pending, + uploaded, +} + +class MultipartInfo { + final List? partUploadStatus; + final Map? partETags; + final int? partSize; + final MultipartUploadURLs urls; + final MultipartStatus status; + + MultipartInfo({ + this.partUploadStatus, + this.partETags, + this.partSize, + this.status = MultipartStatus.pending, + required this.urls, + }); +} + +class MultipartUploadURLs { + final String objectKey; + final List partsURLs; + final String completeURL; + + MultipartUploadURLs({ + required this.objectKey, + required this.partsURLs, + required this.completeURL, + }); + + factory MultipartUploadURLs.fromMap(Map map) { + return MultipartUploadURLs( + objectKey: map["urls"]["objectKey"], + partsURLs: (map["urls"]["partURLs"] as List).cast(), + completeURL: map["urls"]["completeURL"], + ); + } +} diff --git a/mobile/lib/module/upload/model/xml.dart b/mobile/lib/module/upload/model/xml.dart new file mode 100644 index 0000000000..9490fc40cb --- /dev/null +++ b/mobile/lib/module/upload/model/xml.dart @@ -0,0 +1,41 @@ +// ignore_for_file: implementation_imports + +import "package:xml/xml.dart"; + +// used for classes that can be converted to xml +abstract class XmlParsableObject { + Map toMap(); + String get elementName; +} + +// for converting the response to xml +String convertJs2Xml(Map json) { + final builder = XmlBuilder(); + buildXml(builder, json); + return builder.buildDocument().toXmlString( + pretty: true, + indent: ' ', + ); +} + +// for building the xml node tree recursively +void buildXml(XmlBuilder builder, dynamic node) { + if (node is Map) { + node.forEach((key, value) { + builder.element(key, nest: () => buildXml(builder, value)); + }); + } else if (node is List) { + for (var item in node) { + buildXml(builder, item); + } + } else if (node is XmlParsableObject) { + builder.element( + node.elementName, + nest: () { + buildXml(builder, node.toMap()); + }, + ); + } else { + builder.text(node.toString()); + } +} diff --git a/mobile/lib/module/upload/service/multipart.dart b/mobile/lib/module/upload/service/multipart.dart new file mode 100644 index 0000000000..ad0d19703a --- /dev/null +++ b/mobile/lib/module/upload/service/multipart.dart @@ -0,0 +1,266 @@ +import "dart:io"; + +import "package:dio/dio.dart"; +import "package:ente_feature_flag/ente_feature_flag.dart"; +import "package:flutter/foundation.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/constants.dart"; +import "package:photos/db/upload_locks_db.dart"; +import "package:photos/models/encryption_result.dart"; +import "package:photos/module/upload/model/multipart.dart"; +import "package:photos/module/upload/model/xml.dart"; +import "package:photos/services/collections_service.dart"; +import "package:photos/utils/crypto_util.dart"; + +class MultiPartUploader { + final Dio _enteDio; + final Dio _s3Dio; + final UploadLocksDB _db; + final FlagService _featureFlagService; + late final Logger _logger = Logger("MultiPartUploader"); + + MultiPartUploader( + this._enteDio, + this._s3Dio, + this._db, + this._featureFlagService, + ); + + Future getEncryptionResult( + String localId, + String fileHash, + int collectionID, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + final result = + await _db.getFileEncryptionData(localId, fileHash, collectionID); + final encryptedFileKey = CryptoUtil.base642bin(result.encryptedFileKey); + final fileNonce = CryptoUtil.base642bin(result.fileNonce); + + final encryptKeyNonce = CryptoUtil.base642bin(result.keyNonce); + + return EncryptionResult( + key: CryptoUtil.decryptSync( + encryptedFileKey, + collectionKey, + encryptKeyNonce, + ), + header: fileNonce, + ); + } + + int get multipartPartSizeForUpload { + if (_featureFlagService.internalUser) { + return multipartPartSizeInternal; + } + return multipartPartSize; + } + + Future calculatePartCount(int fileSize) async { + // Multipart upload is only enabled for internal users + // and debug builds till it's battle tested. + if (!_featureFlagService.internalUser) return 1; + + final partCount = (fileSize / multipartPartSizeForUpload).ceil(); + return partCount; + } + + Future getMultipartUploadURLs(int count) async { + try { + assert( + _featureFlagService.internalUser, + "Multipart upload should not be enabled for external users.", + ); + final response = await _enteDio.get( + "/files/multipart-upload-urls", + queryParameters: { + "count": count, + }, + ); + + return MultipartUploadURLs.fromMap(response.data); + } on Exception catch (e) { + _logger.severe('failed to get multipart url', e); + rethrow; + } + } + + Future createTableEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + Uint8List fileKey, + Uint8List fileNonce, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + + final encryptedResult = CryptoUtil.encryptSync( + fileKey, + collectionKey, + ); + + await _db.createTrackUploadsEntry( + localId, + fileHash, + collectionID, + urls, + encryptedFileName, + fileSize, + CryptoUtil.bin2base64(encryptedResult.encryptedData!), + CryptoUtil.bin2base64(fileNonce), + CryptoUtil.bin2base64(encryptedResult.nonce!), + partSize: multipartPartSizeForUpload, + ); + } + + Future putExistingMultipartFile( + File encryptedFile, + String localId, + String fileHash, + int collectionID, + ) async { + final multipartInfo = + await _db.getCachedLinks(localId, fileHash, collectionID); + await _db.updateLastAttempted(localId, fileHash, collectionID); + + Map etags = multipartInfo.partETags ?? {}; + + if (multipartInfo.status == MultipartStatus.pending) { + // upload individual parts and get their etags + etags = await _uploadParts(multipartInfo, encryptedFile); + } + + if (multipartInfo.status != MultipartStatus.completed) { + // complete the multipart upload + await _completeMultipartUpload( + multipartInfo.urls.objectKey, + etags, + multipartInfo.urls.completeURL, + ); + } + + return multipartInfo.urls.objectKey; + } + + Future putMultipartFile( + MultipartUploadURLs urls, + File encryptedFile, + ) async { + // upload individual parts and get their etags + final etags = await _uploadParts( + MultipartInfo(urls: urls), + encryptedFile, + ); + + // complete the multipart upload + await _completeMultipartUpload(urls.objectKey, etags, urls.completeURL); + + return urls.objectKey; + } + + Future> _uploadParts( + MultipartInfo partInfo, + File encryptedFile, + ) async { + final partsURLs = partInfo.urls.partsURLs; + final partUploadStatus = partInfo.partUploadStatus; + final partsLength = partsURLs.length; + final etags = partInfo.partETags ?? {}; + + int i = 0; + final partSize = partInfo.partSize ?? multipartPartSizeForUpload; + + // Go to the first part that is not uploaded + while (i < (partUploadStatus?.length ?? 0) && + (partUploadStatus?[i] ?? false)) { + i++; + } + + final int encFileLength = encryptedFile.lengthSync(); + // Start parts upload + int count = 0; + while (i < partsLength) { + count++; + final partURL = partsURLs[i]; + final isLastPart = i == partsLength - 1; + final fileSize = isLastPart ? encFileLength % partSize : partSize; + _logger.info( + "Uploading part ${i + 1} / $partsLength of size $fileSize bytes (total size $encFileLength).", + ); + if (kDebugMode && count > 3) { + throw Exception( + 'Forced exception to test multipart upload retry mechanism.', + ); + } + final response = await _s3Dio.put( + partURL, + data: encryptedFile.openRead( + i * partSize, + isLastPart ? null : (i + 1) * partSize, + ), + options: Options( + headers: { + Headers.contentLengthHeader: fileSize, + }, + ), + ); + + final eTag = response.headers.value("etag"); + + if (eTag?.isEmpty ?? true) { + throw Exception('ETAG_MISSING'); + } + + etags[i] = eTag!; + + await _db.updatePartStatus(partInfo.urls.objectKey, i, eTag); + i++; + } + + await _db.updateTrackUploadStatus( + partInfo.urls.objectKey, + MultipartStatus.uploaded, + ); + + return etags; + } + + Future _completeMultipartUpload( + String objectKey, + Map partEtags, + String completeURL, + ) async { + final body = convertJs2Xml({ + 'CompleteMultipartUpload': partEtags.entries + .map( + (e) => PartETag( + e.key + 1, + e.value, + ), + ) + .toList(), + }).replaceAll('"', '').replaceAll('"', ''); + + try { + await _s3Dio.post( + completeURL, + data: body, + options: Options( + contentType: "text/xml", + ), + ); + await _db.updateTrackUploadStatus( + objectKey, + MultipartStatus.completed, + ); + } catch (e) { + Logger("MultipartUpload").severe(e); + rethrow; + } + } +} diff --git a/mobile/lib/services/entity_service.dart b/mobile/lib/services/entity_service.dart index e681f37b71..6ffe87358b 100644 --- a/mobile/lib/services/entity_service.dart +++ b/mobile/lib/services/entity_service.dart @@ -50,6 +50,10 @@ class EntityService { return await _db.getEntities(type); } + Future getEntity(EntityType type, String id) async { + return await _db.getEntity(type, id); + } + Future addOrUpdate( EntityType type, String plainText, { @@ -57,13 +61,16 @@ class EntityService { }) async { final key = await getOrCreateEntityKey(type); final encryptedKeyData = await CryptoUtil.encryptChaCha( - utf8.encode(plainText) as Uint8List, + utf8.encode(plainText), key, ); final String encryptedData = CryptoUtil.bin2base64(encryptedKeyData.encryptedData!); final String header = CryptoUtil.bin2base64(encryptedKeyData.header!); - debugPrint("Adding entity of type: " + type.typeToString()); + debugPrint( + " ${id == null ? 'Adding' : 'Updating'} entity of type: " + + type.typeToString(), + ); final EntityData data = id == null ? await _gateway.createEntity(type, encryptedData, header) : await _gateway.updateEntity(type, id, encryptedData, header); @@ -87,6 +94,7 @@ class EntityService { Future syncEntities() async { try { await _remoteToLocalSync(EntityType.location); + await _remoteToLocalSync(EntityType.person); } catch (e) { _logger.severe("Failed to sync entities", e); } diff --git a/mobile/lib/services/machine_learning/face_ml/face_alignment/alignment_result.dart b/mobile/lib/services/machine_learning/face_ml/face_alignment/alignment_result.dart new file mode 100644 index 0000000000..41fd88b61c --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_alignment/alignment_result.dart @@ -0,0 +1,36 @@ +class AlignmentResult { + final List> affineMatrix; // 3x3 + final List center; // [x, y] + final double size; // 1 / scale + final double rotation; // atan2(simRotation[1][0], simRotation[0][0]); + + AlignmentResult({required this.affineMatrix, required this.center, required this.size, required this.rotation}); + + AlignmentResult.empty() + : affineMatrix = >[ + [1, 0, 0], + [0, 1, 0], + [0, 0, 1], + ], + center = [0, 0], + size = 1, + rotation = 0; + + factory AlignmentResult.fromJson(Map json) { + return AlignmentResult( + affineMatrix: (json['affineMatrix'] as List) + .map((item) => List.from(item)) + .toList(), + center: List.from(json['center'] as List), + size: json['size'] as double, + rotation: json['rotation'] as double, + ); + } + + Map toJson() => { + 'affineMatrix': affineMatrix, + 'center': center, + 'size': size, + 'rotation': rotation, + }; +} \ No newline at end of file diff --git a/mobile/lib/services/machine_learning/face_ml/face_alignment/similarity_transform.dart b/mobile/lib/services/machine_learning/face_ml/face_alignment/similarity_transform.dart new file mode 100644 index 0000000000..0d8e7ab3ae --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_alignment/similarity_transform.dart @@ -0,0 +1,171 @@ +import 'dart:math' show atan2; +import 'package:ml_linalg/linalg.dart'; +import 'package:photos/extensions/ml_linalg_extensions.dart'; +import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart'; + +/// Class to compute the similarity transform between two sets of points. +/// +/// The class estimates the parameters of the similarity transformation via the `estimate` function. +/// After estimation, the transformation can be applied to an image using the `warpAffine` function. +class SimilarityTransform { + Matrix _params = Matrix.fromList([ + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0, 0, 1], + ]); + List _center = [0, 0]; // [x, y] + double _size = 1; // 1 / scale + double _rotation = 0; // atan2(simRotation[1][0], simRotation[0][0]); + + final arcface4Landmarks = [ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [56.1396, 92.2848], + ]; + final arcface5Landmarks = [ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041], + ]; + get arcfaceNormalized4 => arcface4Landmarks + .map((list) => list.map((value) => value / 112.0).toList()) + .toList(); + get arcfaceNormalized5 => arcface5Landmarks + .map((list) => list.map((value) => value / 112.0).toList()) + .toList(); + + List> get paramsList => _params.to2DList(); + + // singleton pattern + SimilarityTransform._privateConstructor(); + static final instance = SimilarityTransform._privateConstructor(); + factory SimilarityTransform() => instance; + + void _cleanParams() { + _params = Matrix.fromList([ + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0, 0, 1], + ]); + _center = [0, 0]; + _size = 1; + _rotation = 0; + } + + /// Function to estimate the parameters of the affine transformation. These parameters are stored in the class variable params. + /// + /// Returns a tuple of (AlignmentResult, bool). The bool indicates whether the parameters are valid or not. + /// + /// Runs efficiently in about 1-3 ms after initial warm-up. + /// + /// It takes the source and destination points as input and returns the + /// parameters of the affine transformation as output. The function + /// returns false if the parameters cannot be estimated. The function + /// estimates the parameters by solving a least-squares problem using + /// the Umeyama algorithm, via [_umeyama]. + (AlignmentResult, bool) estimate(List> src) { + _cleanParams(); + final (params, center, size, rotation) = + _umeyama(src, arcfaceNormalized5, true); + _params = params; + _center = center; + _size = size; + _rotation = rotation; + final alignmentResult = AlignmentResult( + affineMatrix: paramsList, + center: _center, + size: _size, + rotation: _rotation, + ); + // We check for NaN in the transformation matrix params. + final isNoNanInParam = + !_params.asFlattenedList.any((element) => element.isNaN); + return (alignmentResult, isNoNanInParam); + } + + static (Matrix, List, double, double) _umeyama( + List> src, + List> dst, [ + bool estimateScale = true, + ]) { + final srcMat = Matrix.fromList( + src, + // .map((list) => list.map((value) => value.toDouble()).toList()) + // .toList(), + ); + final dstMat = Matrix.fromList(dst); + final num = srcMat.rowCount; + final dim = srcMat.columnCount; + + // Compute mean of src and dst. + final srcMean = srcMat.mean(Axis.columns); + final dstMean = dstMat.mean(Axis.columns); + + // Subtract mean from src and dst. + final srcDemean = srcMat.mapRows((vector) => vector - srcMean); + final dstDemean = dstMat.mapRows((vector) => vector - dstMean); + + // Eq. (38). + final A = (dstDemean.transpose() * srcDemean) / num; + + // Eq. (39). + var d = Vector.filled(dim, 1.0); + if (A.determinant() < 0) { + d = d.set(dim - 1, -1); + } + + var T = Matrix.identity(dim + 1); + + final svdResult = A.svd(); + final Matrix U = svdResult['U']!; + final Vector S = svdResult['S']!; + final Matrix V = svdResult['V']!; + + // Eq. (40) and (43). + final rank = A.matrixRank(); + if (rank == 0) { + return (T * double.nan, [0, 0], 1, 0); + } else if (rank == dim - 1) { + if (U.determinant() * V.determinant() > 0) { + T = T.setSubMatrix(0, dim, 0, dim, U * V); + } else { + final s = d[dim - 1]; + d = d.set(dim - 1, -1); + final replacement = U * Matrix.diagonal(d.toList()) * V; + T = T.setSubMatrix(0, dim, 0, dim, replacement); + d = d.set(dim - 1, s); + } + } else { + final replacement = U * Matrix.diagonal(d.toList()) * V; + T = T.setSubMatrix(0, dim, 0, dim, replacement); + } + final Matrix simRotation = U * Matrix.diagonal(d.toList()) * V; + + var scale = 1.0; + if (estimateScale) { + // Eq. (41) and (42). + scale = 1.0 / srcDemean.variance(Axis.columns).sum() * (S * d).sum(); + } + + final subTIndices = Iterable.generate(dim, (index) => index); + final subT = T.sample(rowIndices: subTIndices, columnIndices: subTIndices); + final newSubT = dstMean - (subT * srcMean) * scale; + T = T.setValues(0, dim, dim, dim + 1, newSubT); + final newNewSubT = + T.sample(rowIndices: subTIndices, columnIndices: subTIndices) * scale; + T = T.setSubMatrix(0, dim, 0, dim, newNewSubT); + + // final List translation = [T[0][2], T[1][2]]; + // final simRotation = replacement?; + final size = 1 / scale; + final rotation = atan2(simRotation[1][0], simRotation[0][0]); + final meanTranslation = (dstMean - 0.5) * size; + final centerMat = srcMean - meanTranslation; + final List center = [centerMat[0], centerMat[1]]; + + return (T, center, size, rotation); + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_clustering/clusters_mapping.dart b/mobile/lib/services/machine_learning/face_ml/face_clustering/clusters_mapping.dart new file mode 100644 index 0000000000..77be47e2b2 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_clustering/clusters_mapping.dart @@ -0,0 +1,22 @@ +import "package:photos/face/model/person.dart"; + +enum MappingSource { + local, + remote, +} + +class ClustersMapping { + final Map> fileIDToClusterIDs; + final Map clusterToPersonID; + // personIDToPerson is a map of personID to PersonEntity, and it's same for + // both local and remote sources + final Map personIDToPerson; + final MappingSource source; + + ClustersMapping({ + required this.fileIDToClusterIDs, + required this.clusterToPersonID, + required this.personIDToPerson, + required this.source, + }); +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_clustering/cosine_distance.dart b/mobile/lib/services/machine_learning/face_ml/face_clustering/cosine_distance.dart new file mode 100644 index 0000000000..0611a1d838 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_clustering/cosine_distance.dart @@ -0,0 +1,79 @@ +import 'dart:math' show sqrt; + +import "package:ml_linalg/linalg.dart"; + +/// Calculates the cosine distance between two embeddings/vectors using SIMD from ml_linalg +/// +/// WARNING: This assumes both vectors are already normalized! +double cosineDistanceSIMD(Vector vector1, Vector vector2) { + if (vector1.length != vector2.length) { + throw ArgumentError('Vectors must be the same length'); + } + + return 1 - vector1.dot(vector2); +} + +/// Calculates the cosine distance between two embeddings/vectors using SIMD from ml_linalg +/// +/// WARNING: Only use when you're not sure if vectors are normalized. If you're sure they are, use [cosineDistanceSIMD] instead for better performance. +double cosineDistanceSIMDSafe(Vector vector1, Vector vector2) { + if (vector1.length != vector2.length) { + throw ArgumentError('Vectors must be the same length'); + } + + return vector1.distanceTo(vector2, distance: Distance.cosine); +} + +/// Calculates the cosine distance between two embeddings/vectors. +/// +/// Throws an ArgumentError if the vectors are of different lengths or +/// if either of the vectors has a magnitude of zero. +double cosineDistance(List vector1, List vector2) { + if (vector1.length != vector2.length) { + throw ArgumentError('Vectors must be the same length'); + } + + double dotProduct = 0.0; + double magnitude1 = 0.0; + double magnitude2 = 0.0; + + for (int i = 0; i < vector1.length; i++) { + dotProduct += vector1[i] * vector2[i]; + magnitude1 += vector1[i] * vector1[i]; + magnitude2 += vector2[i] * vector2[i]; + } + + magnitude1 = sqrt(magnitude1); + magnitude2 = sqrt(magnitude2); + + // Avoid division by zero. This should never happen. If it does, then one of the vectors contains only zeros. + if (magnitude1 == 0 || magnitude2 == 0) { + throw ArgumentError('Vectors must not have a magnitude of zero'); + } + + final double similarity = dotProduct / (magnitude1 * magnitude2); + + // Cosine distance is the complement of cosine similarity + return 1.0 - similarity; +} + +// cosineDistForNormVectors calculates the cosine distance between two normalized embeddings/vectors. +@pragma('vm:entry-point') +double cosineDistForNormVectors(List vector1, List vector2) { + if (vector1.length != vector2.length) { + throw ArgumentError('Vectors must be the same length'); + } + double dotProduct = 0.0; + for (int i = 0; i < vector1.length; i++) { + dotProduct += vector1[i] * vector2[i]; + } + return 1.0 - dotProduct; +} + +double calculateSqrDistance(List v1, List v2) { + double sum = 0; + for (int i = 0; i < v1.length; i++) { + sum += (v1[i] - v2[i]) * (v1[i] - v2[i]); + } + return sqrt(sum); +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart b/mobile/lib/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart new file mode 100644 index 0000000000..1b8d9c3bd5 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart @@ -0,0 +1,1029 @@ +import "dart:async"; +import "dart:developer"; +import "dart:isolate"; +import "dart:math" show max; +import "dart:typed_data" show Uint8List; + +import "package:computer/computer.dart"; +import "package:flutter/foundation.dart" show kDebugMode; +import "package:logging/logging.dart"; +import "package:ml_linalg/dtype.dart"; +import "package:ml_linalg/vector.dart"; +import "package:photos/generated/protos/ente/common/vector.pb.dart"; +import 'package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart'; +import "package:photos/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart"; +import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart"; +import "package:photos/services/machine_learning/face_ml/face_ml_result.dart"; +import "package:simple_cluster/simple_cluster.dart"; +import "package:synchronized/synchronized.dart"; + +class FaceInfo { + final String faceID; + final double? faceScore; + final double? blurValue; + final bool? badFace; + final List? embedding; + final Vector? vEmbedding; + int? clusterId; + String? closestFaceId; + int? closestDist; + int? fileCreationTime; + FaceInfo({ + required this.faceID, + this.faceScore, + this.blurValue, + this.badFace, + this.embedding, + this.vEmbedding, + this.clusterId, + this.fileCreationTime, + }); +} + +enum ClusterOperation { linearIncrementalClustering, dbscanClustering } + +class ClusteringResult { + final Map newFaceIdToCluster; + final Map>? newClusterIdToFaceIds; + final Map? newClusterSummaries; + + bool get isEmpty => newFaceIdToCluster.isEmpty; + + ClusteringResult({ + required this.newFaceIdToCluster, + this.newClusterSummaries, + this.newClusterIdToFaceIds, + }); +} + +class FaceClusteringService { + final _logger = Logger("FaceLinearClustering"); + final _computer = Computer.shared(); + + Timer? _inactivityTimer; + final Duration _inactivityDuration = const Duration(minutes: 3); + int _activeTasks = 0; + + final _initLock = Lock(); + + late Isolate _isolate; + late ReceivePort _receivePort = ReceivePort(); + late SendPort _mainSendPort; + + bool isSpawned = false; + bool isRunning = false; + + static const kRecommendedDistanceThreshold = 0.24; + static const kConservativeDistanceThreshold = 0.16; + + // singleton pattern + FaceClusteringService._privateConstructor(); + + /// Use this instance to access the FaceClustering service. + /// e.g. `FaceLinearClustering.instance.predict(dataset)` + static final instance = FaceClusteringService._privateConstructor(); + factory FaceClusteringService() => instance; + + Future init() async { + return _initLock.synchronized(() async { + if (isSpawned) return; + + _receivePort = ReceivePort(); + + try { + _isolate = await Isolate.spawn( + _isolateMain, + _receivePort.sendPort, + ); + _mainSendPort = await _receivePort.first as SendPort; + isSpawned = true; + + _resetInactivityTimer(); + } catch (e) { + _logger.severe('Could not spawn isolate', e); + isSpawned = false; + } + }); + } + + Future ensureSpawned() async { + if (!isSpawned) { + await init(); + } + } + + /// The main execution function of the isolate. + static void _isolateMain(SendPort mainSendPort) async { + final receivePort = ReceivePort(); + mainSendPort.send(receivePort.sendPort); + + receivePort.listen((message) async { + final functionIndex = message[0] as int; + final function = ClusterOperation.values[functionIndex]; + final args = message[1] as Map; + final sendPort = message[2] as SendPort; + + try { + switch (function) { + case ClusterOperation.linearIncrementalClustering: + final result = FaceClusteringService.runLinearClustering(args); + sendPort.send(result); + break; + case ClusterOperation.dbscanClustering: + final result = FaceClusteringService._runDbscanClustering(args); + sendPort.send(result); + break; + } + } catch (e, stackTrace) { + sendPort + .send({'error': e.toString(), 'stackTrace': stackTrace.toString()}); + } + }); + } + + /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result. + Future _runInIsolate( + (ClusterOperation, Map) message, + ) async { + await ensureSpawned(); + _resetInactivityTimer(); + final completer = Completer(); + final answerPort = ReceivePort(); + + _activeTasks++; + _mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]); + + answerPort.listen((receivedMessage) { + if (receivedMessage is Map && receivedMessage.containsKey('error')) { + // Handle the error + final errorMessage = receivedMessage['error']; + final errorStackTrace = receivedMessage['stackTrace']; + final exception = Exception(errorMessage); + final stackTrace = StackTrace.fromString(errorStackTrace); + _activeTasks--; + completer.completeError(exception, stackTrace); + } else { + _activeTasks--; + completer.complete(receivedMessage); + } + }); + + return completer.future; + } + + /// Resets a timer that kills the isolate after a certain amount of inactivity. + /// + /// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`) + void _resetInactivityTimer() { + _inactivityTimer?.cancel(); + _inactivityTimer = Timer(_inactivityDuration, () { + if (_activeTasks > 0) { + _logger.info('Tasks are still running. Delaying isolate disposal.'); + // Optionally, reschedule the timer to check again later. + _resetInactivityTimer(); + } else { + _logger.info( + 'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.', + ); + dispose(); + } + }); + } + + /// Disposes the isolate worker. + void dispose() { + if (!isSpawned) return; + + isSpawned = false; + _isolate.kill(); + _receivePort.close(); + _inactivityTimer?.cancel(); + } + + /// Runs the clustering algorithm [runLinearClustering] on the given [input], in an isolate. + /// + /// Returns the clustering result, which is a list of clusters, where each cluster is a list of indices of the dataset. + /// + /// WARNING: Make sure to always input data in the same ordering, otherwise the clustering can less less deterministic. + Future predictLinear( + Set input, { + Map? fileIDToCreationTime, + double distanceThreshold = kRecommendedDistanceThreshold, + double conservativeDistanceThreshold = kConservativeDistanceThreshold, + bool useDynamicThreshold = true, + int? offset, + required Map oldClusterSummaries, + }) async { + if (input.isEmpty) { + _logger.warning( + "Clustering dataset of embeddings is empty, returning empty list.", + ); + return null; + } + if (isRunning) { + _logger.warning("Clustering is already running, returning empty list."); + return null; + } + + isRunning = true; + try { + // Clustering inside the isolate + _logger.info( + "Start clustering on ${input.length} embeddings inside computer isolate", + ); + final stopwatchClustering = Stopwatch()..start(); + // final Map faceIdToCluster = + // await _runLinearClusteringInComputer(input); + final ClusteringResult? faceIdToCluster = await _runInIsolate( + ( + ClusterOperation.linearIncrementalClustering, + { + 'input': input, + 'fileIDToCreationTime': fileIDToCreationTime, + 'distanceThreshold': distanceThreshold, + 'conservativeDistanceThreshold': conservativeDistanceThreshold, + 'useDynamicThreshold': useDynamicThreshold, + 'offset': offset, + 'oldClusterSummaries': oldClusterSummaries, + } + ), + ); + // return _runLinearClusteringInComputer(input); + _logger.info( + 'predictLinear Clustering executed in ${stopwatchClustering.elapsed.inSeconds} seconds', + ); + + isRunning = false; + return faceIdToCluster; + } catch (e, stackTrace) { + _logger.severe('Error while running clustering', e, stackTrace); + isRunning = false; + rethrow; + } + } + + /// Runs the clustering algorithm [runLinearClustering] on the given [input], in computer, without any dynamic thresholding + Future predictLinearComputer( + Map input, { + Map? fileIDToCreationTime, + double distanceThreshold = kRecommendedDistanceThreshold, + }) async { + if (input.isEmpty) { + _logger.warning( + "Linear Clustering dataset of embeddings is empty, returning empty list.", + ); + return null; + } + + // Clustering inside the isolate + _logger.info( + "Start Linear clustering on ${input.length} embeddings inside computer isolate", + ); + + try { + final clusteringInput = input + .map((key, value) { + return MapEntry( + key, + FaceInfoForClustering( + faceID: key, + embeddingBytes: value, + faceScore: kMinimumQualityFaceScore + 0.01, + blurValue: kLapacianDefault, + ), + ); + }) + .values + .toSet(); + final startTime = DateTime.now(); + final faceIdToCluster = await _computer.compute( + runLinearClustering, + param: { + "input": clusteringInput, + "fileIDToCreationTime": fileIDToCreationTime, + "distanceThreshold": distanceThreshold, + "conservativeDistanceThreshold": distanceThreshold - 0.08, + "useDynamicThreshold": false, + }, + taskName: "createImageEmbedding", + ) as ClusteringResult; + final endTime = DateTime.now(); + _logger.info( + "Linear Clustering took: ${endTime.difference(startTime).inMilliseconds}ms", + ); + return faceIdToCluster; + } catch (e, s) { + _logger.severe(e, s); + rethrow; + } + } + + /// Runs the clustering algorithm [runCompleteClustering] on the given [input], in computer. + /// + /// WARNING: Only use on small datasets, as it is not optimized for large datasets. + Future predictCompleteComputer( + Map input, { + Map? fileIDToCreationTime, + double distanceThreshold = kRecommendedDistanceThreshold, + double mergeThreshold = 0.30, + }) async { + if (input.isEmpty) { + _logger.warning( + "Complete Clustering dataset of embeddings is empty, returning empty list.", + ); + return ClusteringResult(newFaceIdToCluster: {}); + } + + // Clustering inside the isolate + _logger.info( + "Start Complete clustering on ${input.length} embeddings inside computer isolate", + ); + + try { + final startTime = DateTime.now(); + final clusteringResult = await _computer.compute( + runCompleteClustering, + param: { + "input": input, + "fileIDToCreationTime": fileIDToCreationTime, + "distanceThreshold": distanceThreshold, + "mergeThreshold": mergeThreshold, + }, + taskName: "createImageEmbedding", + ) as ClusteringResult; + final endTime = DateTime.now(); + _logger.info( + "Complete Clustering took: ${endTime.difference(startTime).inMilliseconds}ms", + ); + return clusteringResult; + } catch (e, s) { + _logger.severe(e, s); + rethrow; + } + } + + Future predictWithinClusterComputer( + Map input, { + Map? fileIDToCreationTime, + double distanceThreshold = kRecommendedDistanceThreshold, + }) async { + _logger.info( + '`predictWithinClusterComputer` called with ${input.length} faces and distance threshold $distanceThreshold', + ); + try { + if (input.length < 500) { + final mergeThreshold = distanceThreshold; + _logger.info( + 'Running complete clustering on ${input.length} faces with distance threshold $mergeThreshold', + ); + final result = await predictCompleteComputer( + input, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: distanceThreshold - 0.08, + mergeThreshold: mergeThreshold, + ); + if (result.newFaceIdToCluster.isEmpty) return null; + return result; + } else { + _logger.info( + 'Running linear clustering on ${input.length} faces with distance threshold $distanceThreshold', + ); + final clusterResult = await predictLinearComputer( + input, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: distanceThreshold, + ); + return clusterResult; + } + } catch (e, s) { + _logger.severe(e, s); + rethrow; + } + } + + Future>> predictDbscan( + Map input, { + Map? fileIDToCreationTime, + double eps = 0.3, + int minPts = 5, + }) async { + if (input.isEmpty) { + _logger.warning( + "DBSCAN Clustering dataset of embeddings is empty, returning empty list.", + ); + return []; + } + if (isRunning) { + _logger.warning( + "DBSCAN Clustering is already running, returning empty list.", + ); + return []; + } + + isRunning = true; + + // Clustering inside the isolate + _logger.info( + "Start DBSCAN clustering on ${input.length} embeddings inside computer isolate", + ); + final stopwatchClustering = Stopwatch()..start(); + // final Map faceIdToCluster = + // await _runLinearClusteringInComputer(input); + final List> clusterFaceIDs = await _runInIsolate( + ( + ClusterOperation.dbscanClustering, + { + 'input': input, + 'fileIDToCreationTime': fileIDToCreationTime, + 'eps': eps, + 'minPts': minPts, + } + ), + ); + // return _runLinearClusteringInComputer(input); + _logger.info( + 'DBSCAN Clustering executed in ${stopwatchClustering.elapsed.inSeconds} seconds', + ); + + isRunning = false; + + return clusterFaceIDs; + } + + static ClusteringResult? runLinearClustering(Map args) { + // final input = args['input'] as Map; + final input = args['input'] as Set; + final fileIDToCreationTime = args['fileIDToCreationTime'] as Map?; + final distanceThreshold = args['distanceThreshold'] as double; + final conservativeDistanceThreshold = + args['conservativeDistanceThreshold'] as double; + final useDynamicThreshold = args['useDynamicThreshold'] as bool; + final offset = args['offset'] as int?; + final oldClusterSummaries = + args['oldClusterSummaries'] as Map?; + + log( + "[ClusterIsolate] ${DateTime.now()} Copied to isolate ${input.length} faces", + ); + + // Organize everything into a list of FaceInfo objects + final List faceInfos = []; + for (final face in input) { + faceInfos.add( + FaceInfo( + faceID: face.faceID, + faceScore: face.faceScore, + blurValue: face.blurValue, + badFace: face.faceScore < kMinimumQualityFaceScore || + face.blurValue < kLaplacianSoftThreshold || + (face.blurValue < kLaplacianVerySoftThreshold && + face.faceScore < kMediumQualityFaceScore) || + face.isSideways, + vEmbedding: Vector.fromList( + EVector.fromBuffer(face.embeddingBytes).values, + dtype: DType.float32, + ), + clusterId: face.clusterId, + fileCreationTime: + fileIDToCreationTime?[getFileIdFromFaceId(face.faceID)], + ), + ); + } + + // Assert that the embeddings are normalized + for (final faceInfo in faceInfos) { + if (faceInfo.vEmbedding != null) { + final norm = faceInfo.vEmbedding!.norm(); + assert((norm - 1.0).abs() < 1e-5); + } + } + + // Sort the faceInfos based on fileCreationTime, in ascending order, so oldest faces are first + if (fileIDToCreationTime != null) { + faceInfos.sort((a, b) { + if (a.fileCreationTime == null && b.fileCreationTime == null) { + return 0; + } else if (a.fileCreationTime == null) { + return 1; + } else if (b.fileCreationTime == null) { + return -1; + } else { + return a.fileCreationTime!.compareTo(b.fileCreationTime!); + } + }); + } + + // Sort the faceInfos such that the ones with null clusterId are at the end + final List facesWithClusterID = []; + final List facesWithoutClusterID = []; + for (final FaceInfo faceInfo in faceInfos) { + if (faceInfo.clusterId == null) { + facesWithoutClusterID.add(faceInfo); + } else { + facesWithClusterID.add(faceInfo); + } + } + final alreadyClusteredCount = facesWithClusterID.length; + final sortedFaceInfos = []; + sortedFaceInfos.addAll(facesWithClusterID); + sortedFaceInfos.addAll(facesWithoutClusterID); + + log( + "[ClusterIsolate] ${DateTime.now()} Clustering ${facesWithoutClusterID.length} new faces without clusterId, and $alreadyClusteredCount faces with clusterId", + ); + + // Make sure the first face has a clusterId + final int totalFaces = sortedFaceInfos.length; + int dynamicThresholdCount = 0; + + if (sortedFaceInfos.isEmpty) { + return null; + } + + // Start actual clustering + log( + "[ClusterIsolate] ${DateTime.now()} Processing $totalFaces faces in total in this round ${offset != null ? "on top of ${offset + facesWithClusterID.length} earlier processed faces" : ""}", + ); + // set current epoch time as clusterID + int clusterID = DateTime.now().microsecondsSinceEpoch; + if (facesWithClusterID.isEmpty) { + // assign a clusterID to the first face + sortedFaceInfos[0].clusterId = clusterID; + clusterID++; + } + final stopwatchClustering = Stopwatch()..start(); + for (int i = 1; i < totalFaces; i++) { + // Incremental clustering, so we can skip faces that already have a clusterId + if (sortedFaceInfos[i].clusterId != null) { + clusterID = max(clusterID, sortedFaceInfos[i].clusterId!); + continue; + } + + int closestIdx = -1; + double closestDistance = double.infinity; + late double thresholdValue; + if (useDynamicThreshold) { + thresholdValue = sortedFaceInfos[i].badFace! + ? conservativeDistanceThreshold + : distanceThreshold; + if (sortedFaceInfos[i].badFace!) dynamicThresholdCount++; + } else { + thresholdValue = distanceThreshold; + } + if (i % 250 == 0) { + log("[ClusterIsolate] ${DateTime.now()} Processed ${offset != null ? i + offset : i} faces"); + } + for (int j = i - 1; j >= 0; j--) { + late double distance; + if (sortedFaceInfos[i].vEmbedding != null) { + distance = cosineDistanceSIMD( + sortedFaceInfos[i].vEmbedding!, + sortedFaceInfos[j].vEmbedding!, + ); + } else { + distance = cosineDistForNormVectors( + sortedFaceInfos[i].embedding!, + sortedFaceInfos[j].embedding!, + ); + } + if (distance < closestDistance) { + if (sortedFaceInfos[j].badFace! && + distance > conservativeDistanceThreshold) { + continue; + } + closestDistance = distance; + closestIdx = j; + } + } + + if (closestDistance < thresholdValue) { + if (sortedFaceInfos[closestIdx].clusterId == null) { + // Ideally this should never happen, but just in case log it + log( + " [ClusterIsolate] [WARNING] ${DateTime.now()} Found new cluster $clusterID", + ); + clusterID++; + sortedFaceInfos[closestIdx].clusterId = clusterID; + } + sortedFaceInfos[i].clusterId = sortedFaceInfos[closestIdx].clusterId; + } else { + clusterID++; + sortedFaceInfos[i].clusterId = clusterID; + } + } + + // Finally, assign the new clusterId to the faces + final Map newFaceIdToCluster = {}; + final newClusteredFaceInfos = + sortedFaceInfos.sublist(alreadyClusteredCount); + for (final faceInfo in newClusteredFaceInfos) { + newFaceIdToCluster[faceInfo.faceID] = faceInfo.clusterId!; + } + + // Create a map of clusterId to faceIds + final Map> clusterIdToFaceIds = {}; + for (final entry in newFaceIdToCluster.entries) { + final clusterID = entry.value; + if (clusterIdToFaceIds.containsKey(clusterID)) { + clusterIdToFaceIds[clusterID]!.add(entry.key); + } else { + clusterIdToFaceIds[clusterID] = [entry.key]; + } + } + + stopwatchClustering.stop(); + log( + ' [ClusterIsolate] ${DateTime.now()} Clustering for ${sortedFaceInfos.length} embeddings executed in ${stopwatchClustering.elapsedMilliseconds}ms', + ); + if (useDynamicThreshold) { + log( + "[ClusterIsolate] ${DateTime.now()} Dynamic thresholding: $dynamicThresholdCount faces had a low face score or low blur clarity", + ); + } + + // Now calculate the mean of the embeddings for each cluster and update the cluster summaries + Map? newClusterSummaries; + if (oldClusterSummaries != null) { + newClusterSummaries = FaceClusteringService.updateClusterSummaries( + oldSummary: oldClusterSummaries, + newFaceInfos: newClusteredFaceInfos, + ); + } + + // analyze the results + // FaceClusteringService._analyzeClusterResults(sortedFaceInfos); + + return ClusteringResult( + newFaceIdToCluster: newFaceIdToCluster, + newClusterSummaries: newClusterSummaries, + newClusterIdToFaceIds: clusterIdToFaceIds, + ); + } + + static Map updateClusterSummaries({ + required Map oldSummary, + required List newFaceInfos, + }) { + final calcSummariesStart = DateTime.now(); + final Map> newClusterIdToFaceInfos = {}; + for (final faceInfo in newFaceInfos) { + if (newClusterIdToFaceInfos.containsKey(faceInfo.clusterId!)) { + newClusterIdToFaceInfos[faceInfo.clusterId!]!.add(faceInfo); + } else { + newClusterIdToFaceInfos[faceInfo.clusterId!] = [faceInfo]; + } + } + + final Map newClusterSummaries = {}; + for (final clusterId in newClusterIdToFaceInfos.keys) { + final List newEmbeddings = newClusterIdToFaceInfos[clusterId]! + .map((faceInfo) => faceInfo.vEmbedding!) + .toList(); + final newCount = newEmbeddings.length; + if (oldSummary.containsKey(clusterId)) { + final oldMean = Vector.fromList( + EVector.fromBuffer(oldSummary[clusterId]!.$1).values, + dtype: DType.float32, + ); + final oldCount = oldSummary[clusterId]!.$2; + final oldEmbeddings = oldMean * oldCount; + newEmbeddings.add(oldEmbeddings); + final newMeanVector = + newEmbeddings.reduce((a, b) => a + b) / (oldCount + newCount); + final newMeanVectorNormalized = newMeanVector / newMeanVector.norm(); + newClusterSummaries[clusterId] = ( + EVector(values: newMeanVectorNormalized.toList()).writeToBuffer(), + oldCount + newCount + ); + } else { + final newMeanVector = newEmbeddings.reduce((a, b) => a + b); + final newMeanVectorNormalized = newMeanVector / newMeanVector.norm(); + newClusterSummaries[clusterId] = ( + EVector(values: newMeanVectorNormalized.toList()).writeToBuffer(), + newCount + ); + } + } + log( + "[ClusterIsolate] ${DateTime.now()} Calculated cluster summaries in ${DateTime.now().difference(calcSummariesStart).inMilliseconds}ms", + ); + + return newClusterSummaries; + } + + static void _analyzeClusterResults(List sortedFaceInfos) { + if (!kDebugMode) return; + final stopwatch = Stopwatch()..start(); + + final Map faceIdToCluster = {}; + for (final faceInfo in sortedFaceInfos) { + faceIdToCluster[faceInfo.faceID] = faceInfo.clusterId!; + } + + // Find faceIDs that are part of a cluster which is larger than 5 and are new faceIDs + final Map clusterIdToSize = {}; + faceIdToCluster.forEach((key, value) { + if (clusterIdToSize.containsKey(value)) { + clusterIdToSize[value] = clusterIdToSize[value]! + 1; + } else { + clusterIdToSize[value] = 1; + } + }); + + // print top 10 cluster ids and their sizes based on the internal cluster id + final clusterIds = faceIdToCluster.values.toSet(); + final clusterSizes = clusterIds.map((clusterId) { + return faceIdToCluster.values.where((id) => id == clusterId).length; + }).toList(); + clusterSizes.sort(); + // find clusters whose size is greater than 1 + int oneClusterCount = 0; + int moreThan5Count = 0; + int moreThan10Count = 0; + int moreThan20Count = 0; + int moreThan50Count = 0; + int moreThan100Count = 0; + + for (int i = 0; i < clusterSizes.length; i++) { + if (clusterSizes[i] > 100) { + moreThan100Count++; + } else if (clusterSizes[i] > 50) { + moreThan50Count++; + } else if (clusterSizes[i] > 20) { + moreThan20Count++; + } else if (clusterSizes[i] > 10) { + moreThan10Count++; + } else if (clusterSizes[i] > 5) { + moreThan5Count++; + } else if (clusterSizes[i] == 1) { + oneClusterCount++; + } + } + + // print the metrics + log( + "[ClusterIsolate] Total clusters ${clusterIds.length}: \n oneClusterCount $oneClusterCount \n moreThan5Count $moreThan5Count \n moreThan10Count $moreThan10Count \n moreThan20Count $moreThan20Count \n moreThan50Count $moreThan50Count \n moreThan100Count $moreThan100Count", + ); + stopwatch.stop(); + log( + "[ClusterIsolate] Clustering additional analysis took ${stopwatch.elapsedMilliseconds} ms", + ); + } + + static ClusteringResult runCompleteClustering(Map args) { + final input = args['input'] as Map; + final fileIDToCreationTime = args['fileIDToCreationTime'] as Map?; + final distanceThreshold = args['distanceThreshold'] as double; + final mergeThreshold = args['mergeThreshold'] as double; + + log( + "[CompleteClustering] ${DateTime.now()} Copied to isolate ${input.length} faces for clustering", + ); + + // Organize everything into a list of FaceInfo objects + final List faceInfos = []; + for (final entry in input.entries) { + faceInfos.add( + FaceInfo( + faceID: entry.key, + vEmbedding: Vector.fromList( + EVector.fromBuffer(entry.value).values, + dtype: DType.float32, + ), + fileCreationTime: + fileIDToCreationTime?[getFileIdFromFaceId(entry.key)], + ), + ); + } + + // Sort the faceInfos based on fileCreationTime, in ascending order, so oldest faces are first + if (fileIDToCreationTime != null) { + faceInfos.sort((a, b) { + if (a.fileCreationTime == null && b.fileCreationTime == null) { + return 0; + } else if (a.fileCreationTime == null) { + return 1; + } else if (b.fileCreationTime == null) { + return -1; + } else { + return a.fileCreationTime!.compareTo(b.fileCreationTime!); + } + }); + } + + if (faceInfos.isEmpty) { + ClusteringResult(newFaceIdToCluster: {}); + } + final int totalFaces = faceInfos.length; + + // Start actual clustering + log( + "[CompleteClustering] ${DateTime.now()} Processing $totalFaces faces in one single round of complete clustering", + ); + + // set current epoch time as clusterID + int clusterID = DateTime.now().microsecondsSinceEpoch; + + // Start actual clustering + final Map newFaceIdToCluster = {}; + final stopwatchClustering = Stopwatch()..start(); + for (int i = 0; i < totalFaces; i++) { + if ((i + 1) % 250 == 0) { + log("[CompleteClustering] ${DateTime.now()} Processed ${i + 1} faces"); + } + if (faceInfos[i].clusterId != null) continue; + int closestIdx = -1; + double closestDistance = double.infinity; + for (int j = 0; j < totalFaces; j++) { + if (i == j) continue; + final double distance = cosineDistanceSIMD( + faceInfos[i].vEmbedding!, + faceInfos[j].vEmbedding!, + ); + if (distance < closestDistance) { + closestDistance = distance; + closestIdx = j; + } + } + + if (closestDistance < distanceThreshold) { + if (faceInfos[closestIdx].clusterId == null) { + clusterID++; + faceInfos[closestIdx].clusterId = clusterID; + } + faceInfos[i].clusterId = faceInfos[closestIdx].clusterId!; + } else { + clusterID++; + faceInfos[i].clusterId = clusterID; + } + } + + // Now calculate the mean of the embeddings for each cluster + final Map> clusterIdToFaceInfos = {}; + for (final faceInfo in faceInfos) { + if (clusterIdToFaceInfos.containsKey(faceInfo.clusterId)) { + clusterIdToFaceInfos[faceInfo.clusterId]!.add(faceInfo); + } else { + clusterIdToFaceInfos[faceInfo.clusterId!] = [faceInfo]; + } + } + final Map clusterIdToMeanEmbeddingAndWeight = {}; + for (final clusterId in clusterIdToFaceInfos.keys) { + final List embeddings = clusterIdToFaceInfos[clusterId]! + .map((faceInfo) => faceInfo.vEmbedding!) + .toList(); + final count = clusterIdToFaceInfos[clusterId]!.length; + final Vector meanEmbedding = embeddings.reduce((a, b) => a + b) / count; + final Vector meanEmbeddingNormalized = + meanEmbedding / meanEmbedding.norm(); + clusterIdToMeanEmbeddingAndWeight[clusterId] = + (meanEmbeddingNormalized, count); + } + + // Now merge the clusters that are close to each other, based on mean embedding + final List<(int, int)> mergedClustersList = []; + final List clusterIds = + clusterIdToMeanEmbeddingAndWeight.keys.toList(); + log(' [CompleteClustering] ${DateTime.now()} ${clusterIds.length} clusters found, now checking for merges'); + while (true) { + if (clusterIds.length < 2) break; + double distance = double.infinity; + (int, int) clusterIDsToMerge = (-1, -1); + for (int i = 0; i < clusterIds.length; i++) { + for (int j = 0; j < clusterIds.length; j++) { + if (i == j) continue; + final double newDistance = cosineDistanceSIMD( + clusterIdToMeanEmbeddingAndWeight[clusterIds[i]]!.$1, + clusterIdToMeanEmbeddingAndWeight[clusterIds[j]]!.$1, + ); + if (newDistance < distance) { + distance = newDistance; + clusterIDsToMerge = (clusterIds[i], clusterIds[j]); + } + } + } + if (distance < mergeThreshold) { + mergedClustersList.add(clusterIDsToMerge); + final clusterID1 = clusterIDsToMerge.$1; + final clusterID2 = clusterIDsToMerge.$2; + final mean1 = clusterIdToMeanEmbeddingAndWeight[clusterID1]!.$1; + final mean2 = clusterIdToMeanEmbeddingAndWeight[clusterID2]!.$1; + final count1 = clusterIdToMeanEmbeddingAndWeight[clusterID1]!.$2; + final count2 = clusterIdToMeanEmbeddingAndWeight[clusterID2]!.$2; + final weight1 = count1 / (count1 + count2); + final weight2 = count2 / (count1 + count2); + final weightedMean = mean1 * weight1 + mean2 * weight2; + final weightedMeanNormalized = weightedMean / weightedMean.norm(); + clusterIdToMeanEmbeddingAndWeight[clusterID1] = ( + weightedMeanNormalized, + count1 + count2, + ); + clusterIdToMeanEmbeddingAndWeight.remove(clusterID2); + clusterIds.remove(clusterID2); + } else { + break; + } + } + log(' [CompleteClustering] ${DateTime.now()} ${mergedClustersList.length} clusters merged'); + + // Now assign the new clusterId to the faces + for (final faceInfo in faceInfos) { + for (final mergedClusters in mergedClustersList) { + if (faceInfo.clusterId == mergedClusters.$2) { + faceInfo.clusterId = mergedClusters.$1; + } + } + } + + // Finally, assign the new clusterId to the faces + for (final faceInfo in faceInfos) { + newFaceIdToCluster[faceInfo.faceID] = faceInfo.clusterId!; + } + + final Map> clusterIdToFaceIds = {}; + for (final entry in newFaceIdToCluster.entries) { + final clusterID = entry.value; + if (clusterIdToFaceIds.containsKey(clusterID)) { + clusterIdToFaceIds[clusterID]!.add(entry.key); + } else { + clusterIdToFaceIds[clusterID] = [entry.key]; + } + } + + final newClusterSummaries = FaceClusteringService.updateClusterSummaries( + oldSummary: {}, + newFaceInfos: faceInfos, + ); + + stopwatchClustering.stop(); + log( + ' [CompleteClustering] ${DateTime.now()} Clustering for ${faceInfos.length} embeddings executed in ${stopwatchClustering.elapsedMilliseconds}ms', + ); + + return ClusteringResult( + newFaceIdToCluster: newFaceIdToCluster, + newClusterSummaries: newClusterSummaries, + newClusterIdToFaceIds: clusterIdToFaceIds, + ); + } + + static List> _runDbscanClustering(Map args) { + final input = args['input'] as Map; + final fileIDToCreationTime = args['fileIDToCreationTime'] as Map?; + final eps = args['eps'] as double; + final minPts = args['minPts'] as int; + + log( + "[ClusterIsolate] ${DateTime.now()} Copied to isolate ${input.length} faces", + ); + + final DBSCAN dbscan = DBSCAN( + epsilon: eps, + minPoints: minPts, + distanceMeasure: cosineDistForNormVectors, + ); + + // Organize everything into a list of FaceInfo objects + final List faceInfos = []; + for (final entry in input.entries) { + faceInfos.add( + FaceInfo( + faceID: entry.key, + embedding: EVector.fromBuffer(entry.value).values, + fileCreationTime: + fileIDToCreationTime?[getFileIdFromFaceId(entry.key)], + ), + ); + } + + // Sort the faceInfos based on fileCreationTime, in ascending order, so oldest faces are first + if (fileIDToCreationTime != null) { + faceInfos.sort((a, b) { + if (a.fileCreationTime == null && b.fileCreationTime == null) { + return 0; + } else if (a.fileCreationTime == null) { + return 1; + } else if (b.fileCreationTime == null) { + return -1; + } else { + return a.fileCreationTime!.compareTo(b.fileCreationTime!); + } + }); + } + + // Get the embeddings + final List> embeddings = + faceInfos.map((faceInfo) => faceInfo.embedding!).toList(); + + // Run the DBSCAN clustering + final List> clusterOutput = dbscan.run(embeddings); + // final List> clusteredFaceInfos = clusterOutput + // .map((cluster) => cluster.map((idx) => faceInfos[idx]).toList()) + // .toList(); + final List> clusteredFaceIDs = clusterOutput + .map((cluster) => cluster.map((idx) => faceInfos[idx].faceID).toList()) + .toList(); + + return clusteredFaceIDs; + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart b/mobile/lib/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart new file mode 100644 index 0000000000..b2f5c2e9e7 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart @@ -0,0 +1,25 @@ +import "dart:typed_data" show Uint8List; + +class FaceInfoForClustering { + final String faceID; + int? clusterId; + final Uint8List embeddingBytes; + final double faceScore; + final double blurValue; + final bool isSideways; + int? _fileID; + + int get fileID { + _fileID ??= int.parse(faceID.split('_').first); + return _fileID!; + } + + FaceInfoForClustering({ + required this.faceID, + this.clusterId, + required this.embeddingBytes, + required this.faceScore, + required this.blurValue, + this.isSideways = false, + }); +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_detection/detection.dart b/mobile/lib/services/machine_learning/face_ml/face_detection/detection.dart new file mode 100644 index 0000000000..de8535c871 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_detection/detection.dart @@ -0,0 +1,516 @@ +import 'dart:math' show max, min, pow, sqrt; + +import "package:photos/face/model/dimension.dart"; + +enum FaceDirection { left, right, straight } + +extension FaceDirectionExtension on FaceDirection { + String toDirectionString() { + switch (this) { + case FaceDirection.left: + return 'Left'; + case FaceDirection.right: + return 'Right'; + case FaceDirection.straight: + return 'Straight'; + default: + throw Exception('Unknown FaceDirection'); + } + } +} + +abstract class Detection { + final double score; + + Detection({required this.score}); + + const Detection.empty() : score = 0; + + get width; + get height; + + @override + String toString(); +} + +@Deprecated('Old method only used in other deprecated methods') +extension BBoxExtension on List { + void roundBoxToDouble() { + final widthRounded = (this[2] - this[0]).roundToDouble(); + final heightRounded = (this[3] - this[1]).roundToDouble(); + this[0] = this[0].roundToDouble(); + this[1] = this[1].roundToDouble(); + this[2] = this[0] + widthRounded; + this[3] = this[1] + heightRounded; + } + + // double get xMinBox => + // isNotEmpty ? this[0] : throw IndexError.withLength(0, length); + // double get yMinBox => + // length >= 2 ? this[1] : throw IndexError.withLength(1, length); + // double get xMaxBox => + // length >= 3 ? this[2] : throw IndexError.withLength(2, length); + // double get yMaxBox => + // length >= 4 ? this[3] : throw IndexError.withLength(3, length); +} + +/// This class represents a face detection with relative coordinates in the range [0, 1]. +/// The coordinates are relative to the image size. The pattern for the coordinates is always [x, y], where x is the horizontal coordinate and y is the vertical coordinate. +/// +/// The [score] attribute is a double representing the confidence of the face detection. +/// +/// The [box] attribute is a list of 4 doubles, representing the coordinates of the bounding box of the face detection. +/// The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox]. +/// +/// The [allKeypoints] attribute is a list of 6 lists of 2 doubles, representing the coordinates of the keypoints of the face detection. +/// The six lists of two values in order are: [leftEye, rightEye, nose, mouth, leftEar, rightEar]. Again, all in [x, y] order. +class FaceDetectionRelative extends Detection { + final List box; + final List> allKeypoints; + + double get xMinBox => box[0]; + double get yMinBox => box[1]; + double get xMaxBox => box[2]; + double get yMaxBox => box[3]; + + List get leftEye => allKeypoints[0]; + List get rightEye => allKeypoints[1]; + List get nose => allKeypoints[2]; + List get leftMouth => allKeypoints[3]; + List get rightMouth => allKeypoints[4]; + + FaceDetectionRelative({ + required double score, + required List box, + required List> allKeypoints, + }) : assert( + box.every((e) => e >= -0.1 && e <= 1.1), + "Bounding box values must be in the range [0, 1], with only a small margin of error allowed.", + ), + assert( + allKeypoints + .every((sublist) => sublist.every((e) => e >= -0.1 && e <= 1.1)), + "All keypoints must be in the range [0, 1], with only a small margin of error allowed.", + ), + box = List.from(box.map((e) => e.clamp(0.0, 1.0))), + allKeypoints = allKeypoints + .map( + (sublist) => + List.from(sublist.map((e) => e.clamp(0.0, 1.0))), + ) + .toList(), + super(score: score); + + factory FaceDetectionRelative.zero() { + return FaceDetectionRelative( + score: 0, + box: [0, 0, 0, 0], + allKeypoints: >[ + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + ], + ); + } + + /// This is used to initialize the FaceDetectionRelative object with default values. + /// This constructor is useful because it can be used to initialize a FaceDetectionRelative object as a constant. + /// Contrary to the `FaceDetectionRelative.zero()` constructor, this one gives immutable attributes [box] and [allKeypoints]. + FaceDetectionRelative.defaultInitialization() + : box = const [0, 0, 0, 0], + allKeypoints = const >[ + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + ], + super.empty(); + + FaceDetectionRelative getNearestDetection( + List detections, + ) { + if (detections.isEmpty) { + throw ArgumentError("The detection list cannot be empty."); + } + + var nearestDetection = detections[0]; + var minDistance = double.infinity; + + // Calculate the center of the current instance + final centerX1 = (xMinBox + xMaxBox) / 2; + final centerY1 = (yMinBox + yMaxBox) / 2; + + for (var detection in detections) { + final centerX2 = (detection.xMinBox + detection.xMaxBox) / 2; + final centerY2 = (detection.yMinBox + detection.yMaxBox) / 2; + final distance = + sqrt(pow(centerX2 - centerX1, 2) + pow(centerY2 - centerY1, 2)); + if (distance < minDistance) { + minDistance = distance; + nearestDetection = detection; + } + } + return nearestDetection; + } + + void transformRelativeToOriginalImage( + List fromBox, // [xMin, yMin, xMax, yMax] + List toBox, // [xMin, yMin, xMax, yMax] + ) { + // Return if all elements of fromBox and toBox are equal + for (int i = 0; i < fromBox.length; i++) { + if (fromBox[i] != toBox[i]) { + break; + } + if (i == fromBox.length - 1) { + return; + } + } + + // Account for padding + final double paddingXRatio = + (fromBox[0] - toBox[0]) / (toBox[2] - toBox[0]); + final double paddingYRatio = + (fromBox[1] - toBox[1]) / (toBox[3] - toBox[1]); + + // Calculate the scaling and translation + final double scaleX = (fromBox[2] - fromBox[0]) / (1 - 2 * paddingXRatio); + final double scaleY = (fromBox[3] - fromBox[1]) / (1 - 2 * paddingYRatio); + final double translateX = fromBox[0] - paddingXRatio * scaleX; + final double translateY = fromBox[1] - paddingYRatio * scaleY; + + // Transform Box + _transformBox(box, scaleX, scaleY, translateX, translateY); + + // Transform All Keypoints + for (int i = 0; i < allKeypoints.length; i++) { + allKeypoints[i] = _transformPoint( + allKeypoints[i], + scaleX, + scaleY, + translateX, + translateY, + ); + } + } + + void correctForMaintainedAspectRatio( + Dimensions originalSize, + Dimensions newSize, + ) { + // Return if both are the same size, meaning no scaling was done on both width and height + if (originalSize == newSize) { + return; + } + + // Calculate the scaling + final double scaleX = originalSize.width / newSize.width; + final double scaleY = originalSize.height / newSize.height; + const double translateX = 0; + const double translateY = 0; + + // Transform Box + _transformBox(box, scaleX, scaleY, translateX, translateY); + + // Transform All Keypoints + for (int i = 0; i < allKeypoints.length; i++) { + allKeypoints[i] = _transformPoint( + allKeypoints[i], + scaleX, + scaleY, + translateX, + translateY, + ); + } + } + + void _transformBox( + List box, + double scaleX, + double scaleY, + double translateX, + double translateY, + ) { + box[0] = (box[0] * scaleX + translateX).clamp(0.0, 1.0); + box[1] = (box[1] * scaleY + translateY).clamp(0.0, 1.0); + box[2] = (box[2] * scaleX + translateX).clamp(0.0, 1.0); + box[3] = (box[3] * scaleY + translateY).clamp(0.0, 1.0); + } + + List _transformPoint( + List point, + double scaleX, + double scaleY, + double translateX, + double translateY, + ) { + return [ + (point[0] * scaleX + translateX).clamp(0.0, 1.0), + (point[1] * scaleY + translateY).clamp(0.0, 1.0), + ]; + } + + FaceDetectionAbsolute toAbsolute({ + required int imageWidth, + required int imageHeight, + }) { + final scoreCopy = score; + final boxCopy = List.from(box, growable: false); + final allKeypointsCopy = allKeypoints + .map((sublist) => List.from(sublist, growable: false)) + .toList(); + + boxCopy[0] *= imageWidth; + boxCopy[1] *= imageHeight; + boxCopy[2] *= imageWidth; + boxCopy[3] *= imageHeight; + // final intbox = boxCopy.map((e) => e.toInt()).toList(); + + for (List keypoint in allKeypointsCopy) { + keypoint[0] *= imageWidth; + keypoint[1] *= imageHeight; + } + // final intKeypoints = + // allKeypointsCopy.map((e) => e.map((e) => e.toInt()).toList()).toList(); + return FaceDetectionAbsolute( + score: scoreCopy, + box: boxCopy, + allKeypoints: allKeypointsCopy, + ); + } + + String toFaceID({required int fileID}) { + // Assert that the values are within the expected range + assert( + (xMinBox >= 0 && xMinBox <= 1) && + (yMinBox >= 0 && yMinBox <= 1) && + (xMaxBox >= 0 && xMaxBox <= 1) && + (yMaxBox >= 0 && yMaxBox <= 1), + "Bounding box values must be in the range [0, 1]", + ); + + // Extract bounding box values + final String xMin = + xMinBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2); + final String yMin = + yMinBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2); + final String xMax = + xMaxBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2); + final String yMax = + yMaxBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2); + + // Convert the bounding box values to string and concatenate + final String rawID = "${xMin}_${yMin}_${xMax}_$yMax"; + + final faceID = fileID.toString() + '_' + rawID.toString(); + + // Return the hexadecimal representation of the hash + return faceID; + } + + /// This method is used to generate a faceID for a face detection that was manually added by the user. + static String toFaceIDEmpty({required int fileID}) { + return fileID.toString() + '_0'; + } + + /// This method is used to check if a faceID corresponds to a manually added face detection and not an actual face detection. + static bool isFaceIDEmpty(String faceID) { + return faceID.split('_')[1] == '0'; + } + + @override + String toString() { + return 'FaceDetectionRelative( with relative coordinates: \n score: $score \n Box: xMinBox: $xMinBox, yMinBox: $yMinBox, xMaxBox: $xMaxBox, yMaxBox: $yMaxBox, \n Keypoints: leftEye: $leftEye, rightEye: $rightEye, nose: $nose, leftMouth: $leftMouth, rightMouth: $rightMouth \n )'; + } + + Map toJson() { + return { + 'score': score, + 'box': box, + 'allKeypoints': allKeypoints, + }; + } + + factory FaceDetectionRelative.fromJson(Map json) { + return FaceDetectionRelative( + score: (json['score'] as num).toDouble(), + box: List.from(json['box']), + allKeypoints: (json['allKeypoints'] as List) + .map((item) => List.from(item)) + .toList(), + ); + } + + @override + + /// The width of the bounding box of the face detection, in relative range [0, 1]. + double get width => xMaxBox - xMinBox; + @override + + /// The height of the bounding box of the face detection, in relative range [0, 1]. + double get height => yMaxBox - yMinBox; +} + +/// This class represents a face detection with absolute coordinates in pixels, in the range [0, imageWidth] for the horizontal coordinates and [0, imageHeight] for the vertical coordinates. +/// The pattern for the coordinates is always [x, y], where x is the horizontal coordinate and y is the vertical coordinate. +/// +/// The [score] attribute is a double representing the confidence of the face detection. +/// +/// The [box] attribute is a list of 4 integers, representing the coordinates of the bounding box of the face detection. +/// The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox]. +/// +/// The [allKeypoints] attribute is a list of 6 lists of 2 integers, representing the coordinates of the keypoints of the face detection. +/// The six lists of two values in order are: [leftEye, rightEye, nose, mouth, leftEar, rightEar]. Again, all in [x, y] order. +class FaceDetectionAbsolute extends Detection { + final List box; + final List> allKeypoints; + + double get xMinBox => box[0]; + double get yMinBox => box[1]; + double get xMaxBox => box[2]; + double get yMaxBox => box[3]; + + List get leftEye => allKeypoints[0]; + List get rightEye => allKeypoints[1]; + List get nose => allKeypoints[2]; + List get leftMouth => allKeypoints[3]; + List get rightMouth => allKeypoints[4]; + + FaceDetectionAbsolute({ + required double score, + required this.box, + required this.allKeypoints, + }) : super(score: score); + + factory FaceDetectionAbsolute._zero() { + return FaceDetectionAbsolute( + score: 0, + box: [0, 0, 0, 0], + allKeypoints: >[ + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + ], + ); + } + + FaceDetectionAbsolute.defaultInitialization() + : box = const [0, 0, 0, 0], + allKeypoints = const >[ + [0, 0], + [0, 0], + [0, 0], + [0, 0], + [0, 0], + ], + super.empty(); + + @override + String toString() { + return 'FaceDetectionAbsolute( with absolute coordinates: \n score: $score \n Box: xMinBox: $xMinBox, yMinBox: $yMinBox, xMaxBox: $xMaxBox, yMaxBox: $yMaxBox, \n Keypoints: leftEye: $leftEye, rightEye: $rightEye, nose: $nose, leftMouth: $leftMouth, rightMouth: $rightMouth \n )'; + } + + Map toJson() { + return { + 'score': score, + 'box': box, + 'allKeypoints': allKeypoints, + }; + } + + factory FaceDetectionAbsolute.fromJson(Map json) { + return FaceDetectionAbsolute( + score: (json['score'] as num).toDouble(), + box: List.from(json['box']), + allKeypoints: (json['allKeypoints'] as List) + .map((item) => List.from(item)) + .toList(), + ); + } + + static FaceDetectionAbsolute empty = FaceDetectionAbsolute._zero(); + + @override + + /// The width of the bounding box of the face detection, in number of pixels, range [0, imageWidth]. + double get width => xMaxBox - xMinBox; + @override + + /// The height of the bounding box of the face detection, in number of pixels, range [0, imageHeight]. + double get height => yMaxBox - yMinBox; + + FaceDirection getFaceDirection() { + final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs(); + final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs(); + final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs(); + + final bool faceIsUpright = + (max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) && + (nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1])); + + final bool noseStickingOutLeft = (nose[0] < min(leftEye[0], rightEye[0])) && + (nose[0] < min(leftMouth[0], rightMouth[0])); + final bool noseStickingOutRight = + (nose[0] > max(leftEye[0], rightEye[0])) && + (nose[0] > max(leftMouth[0], rightMouth[0])); + + final bool noseCloseToLeftEye = + (nose[0] - leftEye[0]).abs() < 0.2 * eyeDistanceX; + final bool noseCloseToRightEye = + (nose[0] - rightEye[0]).abs() < 0.2 * eyeDistanceX; + + // if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) { + if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { + return FaceDirection.left; + // } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) { + } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { + return FaceDirection.right; + } + + return FaceDirection.straight; + } +} + +List relativeToAbsoluteDetections({ + required List relativeDetections, + required int imageWidth, + required int imageHeight, +}) { + final numberOfDetections = relativeDetections.length; + final absoluteDetections = List.filled( + numberOfDetections, + FaceDetectionAbsolute._zero(), + ); + for (var i = 0; i < relativeDetections.length; i++) { + final relativeDetection = relativeDetections[i]; + final absoluteDetection = relativeDetection.toAbsolute( + imageWidth: imageWidth, + imageHeight: imageHeight, + ); + + absoluteDetections[i] = absoluteDetection; + } + + return absoluteDetections; +} + +/// Returns an enlarged version of the [box] by a factor of [factor]. +List getEnlargedRelativeBox(List box, [double factor = 2]) { + final boxCopy = List.from(box, growable: false); + // The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox]. + + final width = boxCopy[2] - boxCopy[0]; + final height = boxCopy[3] - boxCopy[1]; + + boxCopy[0] -= width * (factor - 1) / 2; + boxCopy[1] -= height * (factor - 1) / 2; + boxCopy[2] += width * (factor - 1) / 2; + boxCopy[3] += height * (factor - 1) / 2; + + return boxCopy; +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart b/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart new file mode 100644 index 0000000000..ed2f977913 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart @@ -0,0 +1,3 @@ +class YOLOFaceInterpreterInitializationException implements Exception {} + +class YOLOFaceInterpreterRunException implements Exception {} diff --git a/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_service.dart b/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_service.dart new file mode 100644 index 0000000000..443df50f2e --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_detection/face_detection_service.dart @@ -0,0 +1,788 @@ +import "dart:async"; +import "dart:developer" as dev show log; +import "dart:io" show File; +import "dart:isolate"; +import 'dart:typed_data' show ByteData, Float32List, Uint8List; +import 'dart:ui' as ui show Image; + +import "package:computer/computer.dart"; +import 'package:flutter/material.dart'; +import 'package:logging/logging.dart'; +import 'package:onnxruntime/onnxruntime.dart'; +import "package:photos/face/model/dimension.dart"; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart'; +import "package:photos/services/remote_assets_service.dart"; +import "package:photos/utils/image_ml_isolate.dart"; +import "package:photos/utils/image_ml_util.dart"; +import "package:synchronized/synchronized.dart"; + +enum FaceDetectionOperation { yoloInferenceAndPostProcessing } + +/// This class is responsible for running the face detection model (YOLOv5Face) on ONNX runtime, and can be accessed through the singleton instance [FaceDetectionService.instance]. +class FaceDetectionService { + static final _logger = Logger('YOLOFaceDetectionService'); + + final _computer = Computer.shared(); + + int sessionAddress = 0; + + static const String kModelBucketEndpoint = "https://models.ente.io/"; + static const String kRemoteBucketModelPath = + "yolov5s_face_640_640_dynamic.onnx"; + // static const kRemoteBucketModelPath = "yolov5n_face_640_640.onnx"; + static const String modelRemotePath = + kModelBucketEndpoint + kRemoteBucketModelPath; + + static const int kInputWidth = 640; + static const int kInputHeight = 640; + static const double kIouThreshold = 0.4; + static const double kMinScoreSigmoidThreshold = 0.7; + static const int kNumKeypoints = 5; + + bool isInitialized = false; + + // Isolate things + Timer? _inactivityTimer; + final Duration _inactivityDuration = const Duration(seconds: 30); + + final _initLock = Lock(); + final _computerLock = Lock(); + + late Isolate _isolate; + late ReceivePort _receivePort = ReceivePort(); + late SendPort _mainSendPort; + + bool isSpawned = false; + bool isRunning = false; + + // singleton pattern + FaceDetectionService._privateConstructor(); + + /// Use this instance to access the FaceDetection service. Make sure to call `init()` before using it. + /// e.g. `await FaceDetection.instance.init();` + /// + /// Then you can use `predict()` to get the bounding boxes of the faces, so `FaceDetection.instance.predict(imageData)` + /// + /// config options: yoloV5FaceN // + static final instance = FaceDetectionService._privateConstructor(); + + factory FaceDetectionService() => instance; + + /// Check if the interpreter is initialized, if not initialize it with `loadModel()` + Future init() async { + if (!isInitialized) { + _logger.info('init is called'); + final model = + await RemoteAssetsService.instance.getAsset(modelRemotePath); + final startTime = DateTime.now(); + // Doing this from main isolate since `rootBundle` cannot be accessed outside it + sessionAddress = await _computer.compute( + _loadModel, + param: { + "modelPath": model.path, + }, + ); + final endTime = DateTime.now(); + _logger.info( + "Face detection model loaded, took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch).toString()}ms", + ); + if (sessionAddress != -1) { + isInitialized = true; + } + } + } + + Future release() async { + if (isInitialized) { + await _computer + .compute(_releaseModel, param: {'address': sessionAddress}); + isInitialized = false; + sessionAddress = 0; + } + } + + Future initIsolate() async { + return _initLock.synchronized(() async { + if (isSpawned) return; + + _receivePort = ReceivePort(); + + try { + _isolate = await Isolate.spawn( + _isolateMain, + _receivePort.sendPort, + ); + _mainSendPort = await _receivePort.first as SendPort; + isSpawned = true; + + _resetInactivityTimer(); + } catch (e) { + _logger.severe('Could not spawn isolate', e); + isSpawned = false; + } + }); + } + + Future ensureSpawnedIsolate() async { + if (!isSpawned) { + await initIsolate(); + } + } + + /// The main execution function of the isolate. + static void _isolateMain(SendPort mainSendPort) async { + final receivePort = ReceivePort(); + mainSendPort.send(receivePort.sendPort); + + receivePort.listen((message) async { + final functionIndex = message[0] as int; + final function = FaceDetectionOperation.values[functionIndex]; + final args = message[1] as Map; + final sendPort = message[2] as SendPort; + + try { + switch (function) { + case FaceDetectionOperation.yoloInferenceAndPostProcessing: + final inputImageList = args['inputImageList'] as Float32List; + final inputShape = args['inputShape'] as List; + final newSize = args['newSize'] as Dimensions; + final sessionAddress = args['sessionAddress'] as int; + final timeSentToIsolate = args['timeNow'] as DateTime; + final delaySentToIsolate = + DateTime.now().difference(timeSentToIsolate).inMilliseconds; + + final Stopwatch stopwatchPrepare = Stopwatch()..start(); + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + inputShape, + ); + final inputs = {'input': inputOrt}; + stopwatchPrepare.stop(); + dev.log( + '[YOLOFaceDetectionService] data preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms', + ); + + stopwatchPrepare.reset(); + stopwatchPrepare.start(); + final runOptions = OrtRunOptions(); + final session = OrtSession.fromAddress(sessionAddress); + stopwatchPrepare.stop(); + dev.log( + '[YOLOFaceDetectionService] session preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms', + ); + + final stopwatchInterpreter = Stopwatch()..start(); + late final List outputs; + try { + outputs = session.run(runOptions, inputs); + } catch (e, s) { + dev.log( + '[YOLOFaceDetectionService] Error while running inference: $e \n $s', + ); + throw YOLOFaceInterpreterRunException(); + } + stopwatchInterpreter.stop(); + dev.log( + '[YOLOFaceDetectionService] interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms', + ); + + final relativeDetections = + _yoloPostProcessOutputs(outputs, newSize); + + sendPort + .send((relativeDetections, delaySentToIsolate, DateTime.now())); + break; + } + } catch (e, stackTrace) { + sendPort + .send({'error': e.toString(), 'stackTrace': stackTrace.toString()}); + } + }); + } + + /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result. + Future _runInIsolate( + (FaceDetectionOperation, Map) message, + ) async { + await ensureSpawnedIsolate(); + _resetInactivityTimer(); + final completer = Completer(); + final answerPort = ReceivePort(); + + _mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]); + + answerPort.listen((receivedMessage) { + if (receivedMessage is Map && receivedMessage.containsKey('error')) { + // Handle the error + final errorMessage = receivedMessage['error']; + final errorStackTrace = receivedMessage['stackTrace']; + final exception = Exception(errorMessage); + final stackTrace = StackTrace.fromString(errorStackTrace); + completer.completeError(exception, stackTrace); + } else { + completer.complete(receivedMessage); + } + }); + + return completer.future; + } + + /// Resets a timer that kills the isolate after a certain amount of inactivity. + /// + /// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`) + void _resetInactivityTimer() { + _inactivityTimer?.cancel(); + _inactivityTimer = Timer(_inactivityDuration, () { + _logger.info( + 'Face detection (YOLO ONNX) Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds. Killing isolate.', + ); + disposeIsolate(); + }); + } + + /// Disposes the isolate worker. + void disposeIsolate() { + if (!isSpawned) return; + + isSpawned = false; + _isolate.kill(); + _receivePort.close(); + _inactivityTimer?.cancel(); + } + + /// Detects faces in the given image data. + Future<(List, Dimensions)> predict( + Uint8List imageData, + ) async { + assert(isInitialized); + + final stopwatch = Stopwatch()..start(); + + final stopwatchDecoding = Stopwatch()..start(); + final (inputImageList, originalSize, newSize) = + await ImageMlIsolate.instance.preprocessImageYoloOnnx( + imageData, + normalize: true, + requiredWidth: kInputWidth, + requiredHeight: kInputHeight, + maintainAspectRatio: true, + quality: FilterQuality.medium, + ); + + // final input = [inputImageList]; + final inputShape = [ + 1, + 3, + kInputHeight, + kInputWidth, + ]; + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + inputShape, + ); + final inputs = {'input': inputOrt}; + stopwatchDecoding.stop(); + _logger.info( + 'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms', + ); + _logger.info('original size: $originalSize \n new size: $newSize'); + + // Run inference + final stopwatchInterpreter = Stopwatch()..start(); + List? outputs; + try { + final runOptions = OrtRunOptions(); + final session = OrtSession.fromAddress(sessionAddress); + outputs = session.run(runOptions, inputs); + // inputOrt.release(); + // runOptions.release(); + } catch (e, s) { + _logger.severe('Error while running inference: $e \n $s'); + throw YOLOFaceInterpreterRunException(); + } + stopwatchInterpreter.stop(); + _logger.info( + 'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms', + ); + + final relativeDetections = _yoloPostProcessOutputs(outputs, newSize); + + stopwatch.stop(); + _logger.info( + 'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms', + ); + + return (relativeDetections, originalSize); + } + + /// Detects faces in the given image data. + static Future<(List, Dimensions)> predictSync( + ui.Image image, + ByteData imageByteData, + int sessionAddress, + ) async { + assert(sessionAddress != 0 && sessionAddress != -1); + + final stopwatch = Stopwatch()..start(); + + final stopwatchPreprocessing = Stopwatch()..start(); + final (inputImageList, originalSize, newSize) = + await preprocessImageToFloat32ChannelsFirst( + image, + imageByteData, + normalization: 1, + requiredWidth: kInputWidth, + requiredHeight: kInputHeight, + maintainAspectRatio: true, + ); + + // final input = [inputImageList]; + final inputShape = [ + 1, + 3, + kInputHeight, + kInputWidth, + ]; + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + inputShape, + ); + final inputs = {'input': inputOrt}; + stopwatchPreprocessing.stop(); + dev.log( + 'Face detection image preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms', + ); + _logger.info( + 'Image decoding and preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms', + ); + _logger.info('original size: $originalSize \n new size: $newSize'); + + // Run inference + final stopwatchInterpreter = Stopwatch()..start(); + List? outputs; + try { + final runOptions = OrtRunOptions(); + final session = OrtSession.fromAddress(sessionAddress); + outputs = session.run(runOptions, inputs); + // inputOrt.release(); + // runOptions.release(); + } catch (e, s) { + _logger.severe('Error while running inference: $e \n $s'); + throw YOLOFaceInterpreterRunException(); + } + stopwatchInterpreter.stop(); + _logger.info( + 'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms', + ); + + final relativeDetections = _yoloPostProcessOutputs(outputs, newSize); + + stopwatch.stop(); + _logger.info( + 'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms', + ); + + return (relativeDetections, originalSize); + } + + /// Detects faces in the given image data. + Future<(List, Dimensions)> predictInIsolate( + Uint8List imageData, + ) async { + await ensureSpawnedIsolate(); + assert(isInitialized); + + _logger.info('predictInIsolate() is called'); + + final stopwatch = Stopwatch()..start(); + + final stopwatchDecoding = Stopwatch()..start(); + final (inputImageList, originalSize, newSize) = + await ImageMlIsolate.instance.preprocessImageYoloOnnx( + imageData, + normalize: true, + requiredWidth: kInputWidth, + requiredHeight: kInputHeight, + maintainAspectRatio: true, + quality: FilterQuality.medium, + ); + // final input = [inputImageList]; + final inputShape = [ + 1, + 3, + kInputHeight, + kInputWidth, + ]; + + stopwatchDecoding.stop(); + _logger.info( + 'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms', + ); + _logger.info('original size: $originalSize \n new size: $newSize'); + + final ( + List relativeDetections, + delaySentToIsolate, + timeSentToMain + ) = await _runInIsolate( + ( + FaceDetectionOperation.yoloInferenceAndPostProcessing, + { + 'inputImageList': inputImageList, + 'inputShape': inputShape, + 'newSize': newSize, + 'sessionAddress': sessionAddress, + 'timeNow': DateTime.now(), + } + ), + ) as (List, int, DateTime); + + final delaySentToMain = + DateTime.now().difference(timeSentToMain).inMilliseconds; + + stopwatch.stop(); + _logger.info( + 'predictInIsolate() face detection executed in ${stopwatch.elapsedMilliseconds}ms, with ${delaySentToIsolate}ms delay sent to isolate, and ${delaySentToMain}ms delay sent to main, for a total of ${delaySentToIsolate + delaySentToMain}ms delay due to isolate', + ); + + return (relativeDetections, originalSize); + } + + Future<(List, Dimensions)> predictInComputer( + String imagePath, + ) async { + assert(isInitialized); + + _logger.info('predictInComputer() is called'); + + final stopwatch = Stopwatch()..start(); + + final stopwatchDecoding = Stopwatch()..start(); + final imageData = await File(imagePath).readAsBytes(); + final (inputImageList, originalSize, newSize) = + await ImageMlIsolate.instance.preprocessImageYoloOnnx( + imageData, + normalize: true, + requiredWidth: kInputWidth, + requiredHeight: kInputHeight, + maintainAspectRatio: true, + quality: FilterQuality.medium, + ); + // final input = [inputImageList]; + return await _computerLock.synchronized(() async { + final inputShape = [ + 1, + 3, + kInputHeight, + kInputWidth, + ]; + + stopwatchDecoding.stop(); + _logger.info( + 'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms', + ); + _logger.info('original size: $originalSize \n new size: $newSize'); + + final ( + List relativeDetections, + delaySentToIsolate, + timeSentToMain + ) = await _computer.compute( + inferenceAndPostProcess, + param: { + 'inputImageList': inputImageList, + 'inputShape': inputShape, + 'newSize': newSize, + 'sessionAddress': sessionAddress, + 'timeNow': DateTime.now(), + }, + ) as (List, int, DateTime); + + final delaySentToMain = + DateTime.now().difference(timeSentToMain).inMilliseconds; + + stopwatch.stop(); + _logger.info( + 'predictInIsolate() face detection executed in ${stopwatch.elapsedMilliseconds}ms, with ${delaySentToIsolate}ms delay sent to isolate, and ${delaySentToMain}ms delay sent to main, for a total of ${delaySentToIsolate + delaySentToMain}ms delay due to isolate', + ); + + return (relativeDetections, originalSize); + }); + } + + /// Detects faces in the given image data. + /// This method is optimized for batch processing. + /// + /// `imageDataList`: The image data to analyze. + /// + /// WARNING: Currently this method only returns the detections for the first image in the batch. + /// Change the function to output all detection before actually using it in production. + Future> predictBatch( + List imageDataList, + ) async { + assert(isInitialized); + + final stopwatch = Stopwatch()..start(); + + final stopwatchDecoding = Stopwatch()..start(); + final List inputImageDataLists = []; + final List<(Dimensions, Dimensions)> originalAndNewSizeList = []; + int concatenatedImageInputsLength = 0; + for (final imageData in imageDataList) { + final (inputImageList, originalSize, newSize) = + await ImageMlIsolate.instance.preprocessImageYoloOnnx( + imageData, + normalize: true, + requiredWidth: kInputWidth, + requiredHeight: kInputHeight, + maintainAspectRatio: true, + quality: FilterQuality.medium, + ); + inputImageDataLists.add(inputImageList); + originalAndNewSizeList.add((originalSize, newSize)); + concatenatedImageInputsLength += inputImageList.length; + } + + final inputImageList = Float32List(concatenatedImageInputsLength); + + int offset = 0; + for (int i = 0; i < inputImageDataLists.length; i++) { + final inputImageData = inputImageDataLists[i]; + inputImageList.setRange( + offset, + offset + inputImageData.length, + inputImageData, + ); + offset += inputImageData.length; + } + + // final input = [inputImageList]; + final inputShape = [ + inputImageDataLists.length, + 3, + kInputHeight, + kInputWidth, + ]; + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + inputShape, + ); + final inputs = {'input': inputOrt}; + stopwatchDecoding.stop(); + _logger.info( + 'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms', + ); + // _logger.info('original size: $originalSize \n new size: $newSize'); + + _logger.info('interpreter.run is called'); + // Run inference + final stopwatchInterpreter = Stopwatch()..start(); + List? outputs; + try { + final runOptions = OrtRunOptions(); + final session = OrtSession.fromAddress(sessionAddress); + outputs = session.run(runOptions, inputs); + inputOrt.release(); + runOptions.release(); + } catch (e, s) { + _logger.severe('Error while running inference: $e \n $s'); + throw YOLOFaceInterpreterRunException(); + } + stopwatchInterpreter.stop(); + _logger.info( + 'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms, or ${stopwatchInterpreter.elapsedMilliseconds / inputImageDataLists.length} ms per image', + ); + + _logger.info('outputs: $outputs'); + + const int imageOutputToUse = 0; + + // // Get output tensors + final nestedResults = + outputs[0]?.value as List>>; // [b, 25200, 16] + final selectedResults = nestedResults[imageOutputToUse]; // [25200, 16] + + // final rawScores = []; + // for (final result in firstResults) { + // rawScores.add(result[4]); + // } + // final rawScoresCopy = List.from(rawScores); + // rawScoresCopy.sort(); + // _logger.info('rawScores minimum: ${rawScoresCopy.first}'); + // _logger.info('rawScores maximum: ${rawScoresCopy.last}'); + + var relativeDetections = yoloOnnxFilterExtractDetections( + kMinScoreSigmoidThreshold, + kInputWidth, + kInputHeight, + results: selectedResults, + ); + + // Release outputs + for (var element in outputs) { + element?.release(); + } + + // Account for the fact that the aspect ratio was maintained + for (final faceDetection in relativeDetections) { + faceDetection.correctForMaintainedAspectRatio( + const Dimensions( + width: kInputWidth, + height: kInputHeight, + ), + originalAndNewSizeList[imageOutputToUse].$2, + ); + } + + // Non-maximum suppression to remove duplicate detections + relativeDetections = naiveNonMaxSuppression( + detections: relativeDetections, + iouThreshold: kIouThreshold, + ); + + if (relativeDetections.isEmpty) { + _logger.info('No face detected'); + return []; + } + + stopwatch.stop(); + _logger.info( + 'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms', + ); + + return relativeDetections; + } + + static List _yoloPostProcessOutputs( + List? outputs, + Dimensions newSize, + ) { + // // Get output tensors + final nestedResults = + outputs?[0]?.value as List>>; // [1, 25200, 16] + final firstResults = nestedResults[0]; // [25200, 16] + + // final rawScores = []; + // for (final result in firstResults) { + // rawScores.add(result[4]); + // } + // final rawScoresCopy = List.from(rawScores); + // rawScoresCopy.sort(); + // _logger.info('rawScores minimum: ${rawScoresCopy.first}'); + // _logger.info('rawScores maximum: ${rawScoresCopy.last}'); + + var relativeDetections = yoloOnnxFilterExtractDetections( + kMinScoreSigmoidThreshold, + kInputWidth, + kInputHeight, + results: firstResults, + ); + + // Release outputs + // outputs?.forEach((element) { + // element?.release(); + // }); + + // Account for the fact that the aspect ratio was maintained + for (final faceDetection in relativeDetections) { + faceDetection.correctForMaintainedAspectRatio( + const Dimensions( + width: kInputWidth, + height: kInputHeight, + ), + newSize, + ); + } + + // Non-maximum suppression to remove duplicate detections + relativeDetections = naiveNonMaxSuppression( + detections: relativeDetections, + iouThreshold: kIouThreshold, + ); + + return relativeDetections; + } + + /// Initialize the interpreter by loading the model file. + static Future _loadModel(Map args) async { + final sessionOptions = OrtSessionOptions() + ..setInterOpNumThreads(1) + ..setIntraOpNumThreads(1) + ..setSessionGraphOptimizationLevel(GraphOptimizationLevel.ortEnableAll); + try { + // _logger.info('Loading face embedding model'); + final session = + OrtSession.fromFile(File(args["modelPath"]), sessionOptions); + // _logger.info('Face embedding model loaded'); + return session.address; + } catch (e, _) { + // _logger.severe('Face embedding model not loaded', e, s); + } + return -1; + } + + static Future _releaseModel(Map args) async { + final address = args['address'] as int; + if (address == 0) { + return; + } + final session = OrtSession.fromAddress(address); + session.release(); + return; + } + + static Future<(List, int, DateTime)> + inferenceAndPostProcess( + Map args, + ) async { + final inputImageList = args['inputImageList'] as Float32List; + final inputShape = args['inputShape'] as List; + final newSize = args['newSize'] as Dimensions; + final sessionAddress = args['sessionAddress'] as int; + final timeSentToIsolate = args['timeNow'] as DateTime; + final delaySentToIsolate = + DateTime.now().difference(timeSentToIsolate).inMilliseconds; + + final Stopwatch stopwatchPrepare = Stopwatch()..start(); + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + inputShape, + ); + final inputs = {'input': inputOrt}; + stopwatchPrepare.stop(); + dev.log( + '[YOLOFaceDetectionService] data preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms', + ); + + stopwatchPrepare.reset(); + stopwatchPrepare.start(); + final runOptions = OrtRunOptions(); + final session = OrtSession.fromAddress(sessionAddress); + stopwatchPrepare.stop(); + dev.log( + '[YOLOFaceDetectionService] session preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms', + ); + + final stopwatchInterpreter = Stopwatch()..start(); + late final List outputs; + try { + outputs = session.run(runOptions, inputs); + } catch (e, s) { + dev.log( + '[YOLOFaceDetectionService] Error while running inference: $e \n $s', + ); + throw YOLOFaceInterpreterRunException(); + } + stopwatchInterpreter.stop(); + dev.log( + '[YOLOFaceDetectionService] interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms', + ); + + final relativeDetections = _yoloPostProcessOutputs(outputs, newSize); + + return (relativeDetections, delaySentToIsolate, DateTime.now()); + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart b/mobile/lib/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart new file mode 100644 index 0000000000..624181a669 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart @@ -0,0 +1,49 @@ +import 'dart:math' as math show max, min; + +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; + +List naiveNonMaxSuppression({ + required List detections, + required double iouThreshold, +}) { + // Sort the detections by score, the highest first + detections.sort((a, b) => b.score.compareTo(a.score)); + + // Loop through the detections and calculate the IOU + for (var i = 0; i < detections.length - 1; i++) { + for (var j = i + 1; j < detections.length; j++) { + final iou = _calculateIOU(detections[i], detections[j]); + if (iou >= iouThreshold) { + detections.removeAt(j); + j--; + } + } + } + return detections; +} + +double _calculateIOU( + FaceDetectionRelative detectionA, + FaceDetectionRelative detectionB, +) { + final areaA = detectionA.width * detectionA.height; + final areaB = detectionB.width * detectionB.height; + + final intersectionMinX = math.max(detectionA.xMinBox, detectionB.xMinBox); + final intersectionMinY = math.max(detectionA.yMinBox, detectionB.yMinBox); + final intersectionMaxX = math.min(detectionA.xMaxBox, detectionB.xMaxBox); + final intersectionMaxY = math.min(detectionA.yMaxBox, detectionB.yMaxBox); + + final intersectionWidth = intersectionMaxX - intersectionMinX; + final intersectionHeight = intersectionMaxY - intersectionMinY; + + if (intersectionWidth < 0 || intersectionHeight < 0) { + return 0.0; // If boxes do not overlap, IoU is 0 + } + + final intersectionArea = intersectionWidth * intersectionHeight; + + final unionArea = areaA + areaB - intersectionArea; + + return intersectionArea / unionArea; +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart b/mobile/lib/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart new file mode 100644 index 0000000000..ec546533ab --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart @@ -0,0 +1,95 @@ +import 'dart:developer' as dev show log; + +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; + +List yoloOnnxFilterExtractDetections( + double minScoreSigmoidThreshold, + int inputWidth, + int inputHeight, { + required List> results, // // [25200, 16] +}) { + final outputDetections = []; + final output = >[]; + + // Go through the raw output and check the scores + for (final result in results) { + // Filter out raw detections with low scores + if (result[4] < minScoreSigmoidThreshold) { + continue; + } + + // Get the raw detection + final rawDetection = List.from(result); + + // Append the processed raw detection to the output + output.add(rawDetection); + } + + if (output.isEmpty) { + double maxScore = 0; + for (final result in results) { + if (result[4] > maxScore) { + maxScore = result[4]; + } + } + dev.log( + 'No face detections found above the minScoreSigmoidThreshold of $minScoreSigmoidThreshold. The max score was $maxScore.', + ); + } + + for (final List rawDetection in output) { + // Get absolute bounding box coordinates in format [xMin, yMin, xMax, yMax] https://github.com/deepcam-cn/yolov5-face/blob/eb23d18defe4a76cc06449a61cd51004c59d2697/utils/general.py#L216 + final xMinAbs = rawDetection[0] - rawDetection[2] / 2; + final yMinAbs = rawDetection[1] - rawDetection[3] / 2; + final xMaxAbs = rawDetection[0] + rawDetection[2] / 2; + final yMaxAbs = rawDetection[1] + rawDetection[3] / 2; + + // Get the relative bounding box coordinates in format [xMin, yMin, xMax, yMax] + final box = [ + xMinAbs / inputWidth, + yMinAbs / inputHeight, + xMaxAbs / inputWidth, + yMaxAbs / inputHeight, + ]; + + // Get the keypoints coordinates in format [x, y] + final allKeypoints = >[ + [ + rawDetection[5] / inputWidth, + rawDetection[6] / inputHeight, + ], + [ + rawDetection[7] / inputWidth, + rawDetection[8] / inputHeight, + ], + [ + rawDetection[9] / inputWidth, + rawDetection[10] / inputHeight, + ], + [ + rawDetection[11] / inputWidth, + rawDetection[12] / inputHeight, + ], + [ + rawDetection[13] / inputWidth, + rawDetection[14] / inputHeight, + ], + ]; + + // Get the score + final score = + rawDetection[4]; // Or should it be rawDetection[4]*rawDetection[15]? + + // Create the relative detection + final detection = FaceDetectionRelative( + score: score, + box: box, + allKeypoints: allKeypoints, + ); + + // Append the relative detection to the output + outputDetections.add(detection); + } + + return outputDetections; +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart b/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart new file mode 100644 index 0000000000..548b80a957 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart @@ -0,0 +1,11 @@ +class MobileFaceNetInterpreterInitializationException implements Exception {} + +class MobileFaceNetImagePreprocessingException implements Exception {} + +class MobileFaceNetEmptyInput implements Exception {} + +class MobileFaceNetWrongInputSize implements Exception {} + +class MobileFaceNetWrongInputRange implements Exception {} + +class MobileFaceNetInterpreterRunException implements Exception {} \ No newline at end of file diff --git a/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart b/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart new file mode 100644 index 0000000000..777e793769 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart @@ -0,0 +1,249 @@ +import "dart:io" show File; +import 'dart:math' as math show max, min, sqrt; +import 'dart:typed_data' show Float32List; + +import 'package:computer/computer.dart'; +import 'package:logging/logging.dart'; +import 'package:onnxruntime/onnxruntime.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import "package:photos/services/remote_assets_service.dart"; +import "package:photos/utils/image_ml_isolate.dart"; +import "package:synchronized/synchronized.dart"; + +/// This class is responsible for running the face embedding model (MobileFaceNet) on ONNX runtime, and can be accessed through the singleton instance [FaceEmbeddingService.instance]. +class FaceEmbeddingService { + static const kModelBucketEndpoint = "https://models.ente.io/"; + static const kRemoteBucketModelPath = "mobilefacenet_opset15.onnx"; + static const modelRemotePath = kModelBucketEndpoint + kRemoteBucketModelPath; + + static const int kInputSize = 112; + static const int kEmbeddingSize = 192; + static const int kNumChannels = 3; + static const bool kPreWhiten = false; + + static final _logger = Logger('FaceEmbeddingOnnx'); + + bool isInitialized = false; + int sessionAddress = 0; + + final _computer = Computer.shared(); + + final _computerLock = Lock(); + + // singleton pattern + FaceEmbeddingService._privateConstructor(); + + /// Use this instance to access the FaceEmbedding service. Make sure to call `init()` before using it. + /// e.g. `await FaceEmbedding.instance.init();` + /// + /// Then you can use `predict()` to get the embedding of a face, so `FaceEmbedding.instance.predict(imageData)` + /// + /// config options: faceEmbeddingEnte + static final instance = FaceEmbeddingService._privateConstructor(); + factory FaceEmbeddingService() => instance; + + /// Check if the interpreter is initialized, if not initialize it with `loadModel()` + Future init() async { + if (!isInitialized) { + _logger.info('init is called'); + final model = + await RemoteAssetsService.instance.getAsset(modelRemotePath); + final startTime = DateTime.now(); + // Doing this from main isolate since `rootBundle` cannot be accessed outside it + sessionAddress = await _computer.compute( + _loadModel, + param: { + "modelPath": model.path, + }, + ); + final endTime = DateTime.now(); + _logger.info( + "Face embedding model loaded, took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch).toString()}ms", + ); + if (sessionAddress != -1) { + isInitialized = true; + } + } + } + + Future release() async { + if (isInitialized) { + await _computer + .compute(_releaseModel, param: {'address': sessionAddress}); + isInitialized = false; + sessionAddress = 0; + } + } + + static Future _loadModel(Map args) async { + final sessionOptions = OrtSessionOptions() + ..setInterOpNumThreads(1) + ..setIntraOpNumThreads(1) + ..setSessionGraphOptimizationLevel(GraphOptimizationLevel.ortEnableAll); + try { + // _logger.info('Loading face embedding model'); + final session = + OrtSession.fromFile(File(args["modelPath"]), sessionOptions); + // _logger.info('Face embedding model loaded'); + return session.address; + } catch (e, _) { + // _logger.severe('Face embedding model not loaded', e, s); + } + return -1; + } + + static Future _releaseModel(Map args) async { + final address = args['address'] as int; + if (address == 0) { + return; + } + final session = OrtSession.fromAddress(address); + session.release(); + return; + } + + Future<(List, bool, double)> predictFromImageDataInComputer( + String imagePath, + FaceDetectionRelative face, + ) async { + assert(sessionAddress != 0 && sessionAddress != -1 && isInitialized); + + try { + final stopwatchDecoding = Stopwatch()..start(); + final (inputImageList, _, isBlur, blurValue, _) = + await ImageMlIsolate.instance.preprocessMobileFaceNetOnnx( + imagePath, + [face], + ); + stopwatchDecoding.stop(); + _logger.info( + 'MobileFaceNet image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms', + ); + + final stopwatch = Stopwatch()..start(); + _logger.info('MobileFaceNet interpreter.run is called'); + final embedding = await _computer.compute( + inferFromMap, + param: { + 'input': inputImageList, + 'address': sessionAddress, + 'inputSize': kInputSize, + }, + taskName: 'createFaceEmbedding', + ) as List; + stopwatch.stop(); + _logger.info( + 'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms', + ); + + _logger.info( + 'MobileFaceNet results (only first few numbers): embedding ${embedding.sublist(0, 5)}', + ); + _logger.info( + 'Mean of embedding: ${embedding.reduce((a, b) => a + b) / embedding.length}', + ); + _logger.info( + 'Max of embedding: ${embedding.reduce(math.max)}', + ); + _logger.info( + 'Min of embedding: ${embedding.reduce(math.min)}', + ); + + return (embedding, isBlur[0], blurValue[0]); + } catch (e) { + _logger.info('MobileFaceNet Error while running inference: $e'); + rethrow; + } + } + + Future>> predictInComputer(Float32List input) async { + assert(sessionAddress != 0 && sessionAddress != -1 && isInitialized); + return await _computerLock.synchronized(() async { + try { + final stopwatch = Stopwatch()..start(); + _logger.info('MobileFaceNet interpreter.run is called'); + final embeddings = await _computer.compute( + inferFromMap, + param: { + 'input': input, + 'address': sessionAddress, + 'inputSize': kInputSize, + }, + taskName: 'createFaceEmbedding', + ) as List>; + stopwatch.stop(); + _logger.info( + 'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms', + ); + + return embeddings; + } catch (e) { + _logger.info('MobileFaceNet Error while running inference: $e'); + rethrow; + } + }); + } + + static Future>> predictSync( + Float32List input, + int sessionAddress, + ) async { + assert(sessionAddress != 0 && sessionAddress != -1); + try { + final stopwatch = Stopwatch()..start(); + _logger.info('MobileFaceNet interpreter.run is called'); + final embeddings = await infer( + input, + sessionAddress, + kInputSize, + ); + stopwatch.stop(); + _logger.info( + 'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms', + ); + + return embeddings; + } catch (e) { + _logger.info('MobileFaceNet Error while running inference: $e'); + rethrow; + } + } + + static Future>> inferFromMap(Map args) async { + final inputImageList = args['input'] as Float32List; + final address = args['address'] as int; + final inputSize = args['inputSize'] as int; + return await infer(inputImageList, address, inputSize); + } + + static Future>> infer( + Float32List inputImageList, + int address, + int inputSize, + ) async { + final runOptions = OrtRunOptions(); + final int numberOfFaces = + inputImageList.length ~/ (inputSize * inputSize * 3); + final inputOrt = OrtValueTensor.createTensorWithDataList( + inputImageList, + [numberOfFaces, inputSize, inputSize, 3], + ); + final inputs = {'img_inputs': inputOrt}; + final session = OrtSession.fromAddress(address); + final List outputs = session.run(runOptions, inputs); + final embeddings = outputs[0]?.value as List>; + + for (final embedding in embeddings) { + double normalization = 0; + for (int i = 0; i < kEmbeddingSize; i++) { + normalization += embedding[i] * embedding[i]; + } + final double sqrtNormalization = math.sqrt(normalization); + for (int i = 0; i < kEmbeddingSize; i++) { + embedding[i] = embedding[i] / sqrtNormalization; + } + } + + return embeddings; + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_filtering/blur_detection_service.dart b/mobile/lib/services/machine_learning/face_ml/face_filtering/blur_detection_service.dart new file mode 100644 index 0000000000..9c8d2d8c80 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_filtering/blur_detection_service.dart @@ -0,0 +1,155 @@ +import 'package:logging/logging.dart'; +import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart"; +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; + +class BlurDetectionService { + final _logger = Logger('BlurDetectionService'); + + // singleton pattern + BlurDetectionService._privateConstructor(); + static final instance = BlurDetectionService._privateConstructor(); + factory BlurDetectionService() => instance; + + Future<(bool, double)> predictIsBlurGrayLaplacian( + List> grayImage, { + int threshold = kLaplacianHardThreshold, + FaceDirection faceDirection = FaceDirection.straight, + }) async { + final List> laplacian = + _applyLaplacian(grayImage, faceDirection: faceDirection); + final double variance = _calculateVariance(laplacian); + _logger.info('Variance: $variance'); + return (variance < threshold, variance); + } + + double _calculateVariance(List> matrix) { + final int numRows = matrix.length; + final int numCols = matrix[0].length; + final int totalElements = numRows * numCols; + + // Calculate the mean + double mean = 0; + for (var row in matrix) { + for (var value in row) { + mean += value; + } + } + mean /= totalElements; + + // Calculate the variance + double variance = 0; + for (var row in matrix) { + for (var value in row) { + final double diff = value - mean; + variance += diff * diff; + } + } + variance /= totalElements; + + return variance; + } + + List> _padImage( + List> image, { + int removeSideColumns = 56, + FaceDirection faceDirection = FaceDirection.straight, + }) { + // Exception is removeSideColumns is not even + if (removeSideColumns % 2 != 0) { + throw Exception('removeSideColumns must be even'); + } + + final int numRows = image.length; + final int numCols = image[0].length; + final int paddedNumCols = numCols + 2 - removeSideColumns; + final int paddedNumRows = numRows + 2; + + // Create a new matrix with extra padding + final List> paddedImage = List.generate( + paddedNumRows, + (i) => List.generate( + paddedNumCols, + (j) => 0, + growable: false, + ), + growable: false, + ); + + // Copy original image into the center of the padded image, taking into account the face direction + if (faceDirection == FaceDirection.straight) { + for (int i = 0; i < numRows; i++) { + for (int j = 0; j < (paddedNumCols - 2); j++) { + paddedImage[i + 1][j + 1] = + image[i][j + (removeSideColumns / 2).round()]; + } + } + // If the face is facing left, we only take the right side of the face image + } else if (faceDirection == FaceDirection.left) { + for (int i = 0; i < numRows; i++) { + for (int j = 0; j < (paddedNumCols - 2); j++) { + paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; + } + } + // If the face is facing right, we only take the left side of the face image + } else if (faceDirection == FaceDirection.right) { + for (int i = 0; i < numRows; i++) { + for (int j = 0; j < (paddedNumCols - 2); j++) { + paddedImage[i + 1][j + 1] = image[i][j]; + } + } + } + + // Reflect padding + // Top and bottom rows + for (int j = 1; j <= (paddedNumCols - 2); j++) { + paddedImage[0][j] = paddedImage[2][j]; // Top row + paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row + } + // Left and right columns + for (int i = 0; i < numRows + 2; i++) { + paddedImage[i][0] = paddedImage[i][2]; // Left column + paddedImage[i][paddedNumCols - 1] = + paddedImage[i][paddedNumCols - 3]; // Right column + } + + return paddedImage; + } + + List> _applyLaplacian( + List> image, { + FaceDirection faceDirection = FaceDirection.straight, + }) { + final List> paddedImage = + _padImage(image, faceDirection: faceDirection); + final int numRows = paddedImage.length - 2; + final int numCols = paddedImage[0].length - 2; + final List> outputImage = List.generate( + numRows, + (i) => List.generate(numCols, (j) => 0, growable: false), + growable: false, + ); + + // Define the Laplacian kernel + final List> kernel = [ + [0, 1, 0], + [1, -4, 1], + [0, 1, 0], + ]; + + // Apply the kernel to each pixel + for (int i = 0; i < numRows; i++) { + for (int j = 0; j < numCols; j++) { + int sum = 0; + for (int ki = 0; ki < 3; ki++) { + for (int kj = 0; kj < 3; kj++) { + sum += paddedImage[i + ki][j + kj] * kernel[ki][kj]; + } + } + // Adjust the output value if necessary (e.g., clipping) + outputImage[i][j] = sum; //.clamp(0, 255); + } + } + + return outputImage; + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart b/mobile/lib/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart new file mode 100644 index 0000000000..b0f954f8f9 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart @@ -0,0 +1,20 @@ +import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart'; + +/// Blur detection threshold +const kLaplacianHardThreshold = 10; +const kLaplacianSoftThreshold = 50; +const kLaplacianVerySoftThreshold = 200; + +/// Default blur value +const kLapacianDefault = 10000.0; + +/// The minimum score for a face to be considered a high quality face for clustering and person detection +const kMinimumQualityFaceScore = 0.80; +const kMediumQualityFaceScore = 0.85; +const kHighQualityFaceScore = 0.90; + +/// The minimum score for a face to be detected, regardless of quality. Use [kMinimumQualityFaceScore] for high quality faces. +const kMinFaceDetectionScore = FaceDetectionService.kMinScoreSigmoidThreshold; + +/// The minimum cluster size for displaying a cluster in the UI +const kMinimumClusterSizeSearchResult = 20; diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_exceptions.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_exceptions.dart new file mode 100644 index 0000000000..78a4bcb1f8 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_exceptions.dart @@ -0,0 +1,30 @@ + +class GeneralFaceMlException implements Exception { + final String message; + + GeneralFaceMlException(this.message); + + @override + String toString() => 'GeneralFaceMlException: $message'; +} + +class CouldNotRetrieveAnyFileData implements Exception {} + +class CouldNotInitializeFaceDetector implements Exception {} + +class CouldNotRunFaceDetector implements Exception {} + +class CouldNotWarpAffine implements Exception {} + +class CouldNotInitializeFaceEmbeddor implements Exception {} + +class InputProblemFaceEmbeddor implements Exception { + final String message; + + InputProblemFaceEmbeddor(this.message); + + @override + String toString() => 'InputProblemFaceEmbeddor: $message'; +} + +class CouldNotRunFaceEmbeddor implements Exception {} \ No newline at end of file diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_methods.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_methods.dart new file mode 100644 index 0000000000..5745234b58 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_methods.dart @@ -0,0 +1,90 @@ +import 'package:photos/services/machine_learning/face_ml/face_ml_version.dart'; + +/// Represents a face detection method with a specific version. +class FaceDetectionMethod extends VersionedMethod { + /// Creates a [FaceDetectionMethod] instance with a specific `method` and `version` (default `1`) + FaceDetectionMethod(String method, {int version = 1}) + : super(method, version); + + /// Creates a [FaceDetectionMethod] instance with 'Empty method' as the method, and a specific `version` (default `1`) + const FaceDetectionMethod.empty() : super.empty(); + + /// Creates a [FaceDetectionMethod] instance with 'BlazeFace' as the method, and a specific `version` (default `1`) + FaceDetectionMethod.blazeFace({int version = 1}) + : super('BlazeFace', version); + + static FaceDetectionMethod fromMlVersion(int version) { + switch (version) { + case 1: + return FaceDetectionMethod.blazeFace(version: version); + default: + return const FaceDetectionMethod.empty(); + } + } + + static FaceDetectionMethod fromJson(Map json) { + return FaceDetectionMethod( + json['method'], + version: json['version'], + ); + } +} + +/// Represents a face alignment method with a specific version. +class FaceAlignmentMethod extends VersionedMethod { + /// Creates a [FaceAlignmentMethod] instance with a specific `method` and `version` (default `1`) + FaceAlignmentMethod(String method, {int version = 1}) + : super(method, version); + + /// Creates a [FaceAlignmentMethod] instance with 'Empty method' as the method, and a specific `version` (default `1`) + const FaceAlignmentMethod.empty() : super.empty(); + + /// Creates a [FaceAlignmentMethod] instance with 'ArcFace' as the method, and a specific `version` (default `1`) + FaceAlignmentMethod.arcFace({int version = 1}) : super('ArcFace', version); + + static FaceAlignmentMethod fromMlVersion(int version) { + switch (version) { + case 1: + return FaceAlignmentMethod.arcFace(version: version); + default: + return const FaceAlignmentMethod.empty(); + } + } + + static FaceAlignmentMethod fromJson(Map json) { + return FaceAlignmentMethod( + json['method'], + version: json['version'], + ); + } +} + +/// Represents a face embedding method with a specific version. +class FaceEmbeddingMethod extends VersionedMethod { + /// Creates a [FaceEmbeddingMethod] instance with a specific `method` and `version` (default `1`) + FaceEmbeddingMethod(String method, {int version = 1}) + : super(method, version); + + /// Creates a [FaceEmbeddingMethod] instance with 'Empty method' as the method, and a specific `version` (default `1`) + const FaceEmbeddingMethod.empty() : super.empty(); + + /// Creates a [FaceEmbeddingMethod] instance with 'MobileFaceNet' as the method, and a specific `version` (default `1`) + FaceEmbeddingMethod.mobileFaceNet({int version = 1}) + : super('MobileFaceNet', version); + + static FaceEmbeddingMethod fromMlVersion(int version) { + switch (version) { + case 1: + return FaceEmbeddingMethod.mobileFaceNet(version: version); + default: + return const FaceEmbeddingMethod.empty(); + } + } + + static FaceEmbeddingMethod fromJson(Map json) { + return FaceEmbeddingMethod( + json['method'], + version: json['version'], + ); + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_result.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_result.dart new file mode 100644 index 0000000000..19f954013e --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_result.dart @@ -0,0 +1,314 @@ +import "dart:convert" show jsonEncode, jsonDecode; + +import "package:flutter/material.dart" show immutable; +import "package:logging/logging.dart"; +import "package:photos/face/model/dimension.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/models/ml/ml_typedefs.dart'; +import "package:photos/models/ml/ml_versions.dart"; +import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; +import 'package:photos/services/machine_learning/face_ml/face_ml_methods.dart'; + +final _logger = Logger('ClusterResult_FaceMlResult'); + +@immutable +class FaceMlResult { + final int fileId; + + final List faces; + + final Dimensions decodedImageSize; + + final int mlVersion; + final bool errorOccured; + final bool onlyThumbnailUsed; + + bool get hasFaces => faces.isNotEmpty; + int get numberOfFaces => faces.length; + + List get allFaceEmbeddings { + return faces.map((face) => face.embedding).toList(); + } + + List get allFaceIds { + return faces.map((face) => face.faceId).toList(); + } + + List get fileIdForEveryFace { + return List.filled(faces.length, fileId); + } + + FaceDetectionMethod get faceDetectionMethod => + FaceDetectionMethod.fromMlVersion(mlVersion); + FaceAlignmentMethod get faceAlignmentMethod => + FaceAlignmentMethod.fromMlVersion(mlVersion); + FaceEmbeddingMethod get faceEmbeddingMethod => + FaceEmbeddingMethod.fromMlVersion(mlVersion); + + const FaceMlResult({ + required this.fileId, + required this.faces, + required this.mlVersion, + required this.errorOccured, + required this.onlyThumbnailUsed, + required this.decodedImageSize, + }); + + Map _toJson() => { + 'fileId': fileId, + 'faces': faces.map((face) => face.toJson()).toList(), + 'mlVersion': mlVersion, + 'errorOccured': errorOccured, + 'onlyThumbnailUsed': onlyThumbnailUsed, + 'decodedImageSize': { + 'width': decodedImageSize.width, + 'height': decodedImageSize.height, + }, + }; + + String toJsonString() => jsonEncode(_toJson()); + + static FaceMlResult _fromJson(Map json) { + return FaceMlResult( + fileId: json['fileId'], + faces: (json['faces'] as List) + .map((item) => FaceResult.fromJson(item as Map)) + .toList(), + mlVersion: json['mlVersion'], + errorOccured: json['errorOccured'] ?? false, + onlyThumbnailUsed: json['onlyThumbnailUsed'] ?? false, + decodedImageSize: json['decodedImageSize'] != null + ? Dimensions( + width: json['decodedImageSize']['width'], + height: json['decodedImageSize']['height'], + ) + : json['faceDetectionImageSize'] == null + ? const Dimensions(width: -1, height: -1) + : Dimensions( + width: (json['faceDetectionImageSize']['width'] as double) + .truncate(), + height: (json['faceDetectionImageSize']['height'] as double) + .truncate(), + ), + ); + } + + static FaceMlResult fromJsonString(String jsonString) { + return _fromJson(jsonDecode(jsonString)); + } + + /// Sets the embeddings of the faces with the given faceIds to [10, 10,..., 10]. + /// + /// Throws an exception if a faceId is not found in the FaceMlResult. + void setEmbeddingsToTen(List faceIds) { + for (final faceId in faceIds) { + final faceIndex = faces.indexWhere((face) => face.faceId == faceId); + if (faceIndex == -1) { + throw Exception("No face found with faceId $faceId"); + } + for (var i = 0; i < faces[faceIndex].embedding.length; i++) { + faces[faceIndex].embedding[i] = 10; + } + } + } + + FaceDetectionRelative getDetectionForFaceId(String faceId) { + final faceIndex = faces.indexWhere((face) => face.faceId == faceId); + if (faceIndex == -1) { + throw Exception("No face found with faceId $faceId"); + } + return faces[faceIndex].detection; + } +} + +class FaceMlResultBuilder { + int fileId; + + List faces = []; + + Dimensions decodedImageSize; + + int mlVersion; + bool errorOccured; + bool onlyThumbnailUsed; + + FaceMlResultBuilder({ + this.fileId = -1, + this.mlVersion = faceMlVersion, + this.errorOccured = false, + this.onlyThumbnailUsed = false, + this.decodedImageSize = const Dimensions(width: -1, height: -1), + }); + + FaceMlResultBuilder.fromEnteFile( + EnteFile file, { + this.mlVersion = faceMlVersion, + this.errorOccured = false, + this.onlyThumbnailUsed = false, + this.decodedImageSize = const Dimensions(width: -1, height: -1), + }) : fileId = file.uploadedFileID ?? -1; + + FaceMlResultBuilder.fromEnteFileID( + int fileID, { + this.mlVersion = faceMlVersion, + this.errorOccured = false, + this.onlyThumbnailUsed = false, + this.decodedImageSize = const Dimensions(width: -1, height: -1), + }) : fileId = fileID; + + void addNewlyDetectedFaces( + List faceDetections, + Dimensions originalSize, + ) { + decodedImageSize = originalSize; + for (var i = 0; i < faceDetections.length; i++) { + faces.add( + FaceResultBuilder.fromFaceDetection( + faceDetections[i], + resultBuilder: this, + ), + ); + } + } + + void addAlignmentResults( + List alignmentResults, + List blurValues, + ) { + if (alignmentResults.length != faces.length) { + throw Exception( + "The amount of alignment results (${alignmentResults.length}) does not match the number of faces (${faces.length})", + ); + } + + for (var i = 0; i < alignmentResults.length; i++) { + faces[i].alignment = alignmentResults[i]; + faces[i].blurValue = blurValues[i]; + } + } + + void addEmbeddingsToExistingFaces( + List embeddings, + ) { + if (embeddings.length != faces.length) { + throw Exception( + "The amount of embeddings (${embeddings.length}) does not match the number of faces (${faces.length})", + ); + } + for (var faceIndex = 0; faceIndex < faces.length; faceIndex++) { + faces[faceIndex].embedding = embeddings[faceIndex]; + } + } + + FaceMlResult build() { + final faceResults = []; + for (var i = 0; i < faces.length; i++) { + faceResults.add(faces[i].build()); + } + return FaceMlResult( + fileId: fileId, + faces: faceResults, + mlVersion: mlVersion, + errorOccured: errorOccured, + onlyThumbnailUsed: onlyThumbnailUsed, + decodedImageSize: decodedImageSize, + ); + } + + FaceMlResult buildNoFaceDetected() { + faces = []; + return build(); + } + + FaceMlResult buildErrorOccurred() { + faces = []; + errorOccured = true; + return build(); + } +} + +@immutable +class FaceResult { + final FaceDetectionRelative detection; + final double blurValue; + final AlignmentResult alignment; + final Embedding embedding; + final int fileId; + final String faceId; + + bool get isBlurry => blurValue < kLaplacianHardThreshold; + + const FaceResult({ + required this.detection, + required this.blurValue, + required this.alignment, + required this.embedding, + required this.fileId, + required this.faceId, + }); + + Map toJson() => { + 'detection': detection.toJson(), + 'blurValue': blurValue, + 'alignment': alignment.toJson(), + 'embedding': embedding, + 'fileId': fileId, + 'faceId': faceId, + }; + + static FaceResult fromJson(Map json) { + return FaceResult( + detection: FaceDetectionRelative.fromJson(json['detection']), + blurValue: json['blurValue'], + alignment: AlignmentResult.fromJson(json['alignment']), + embedding: Embedding.from(json['embedding']), + fileId: json['fileId'], + faceId: json['faceId'], + ); + } +} + +class FaceResultBuilder { + FaceDetectionRelative detection = + FaceDetectionRelative.defaultInitialization(); + double blurValue = 1000; + AlignmentResult alignment = AlignmentResult.empty(); + Embedding embedding = []; + int fileId = -1; + String faceId = ''; + + bool get isBlurry => blurValue < kLaplacianHardThreshold; + + FaceResultBuilder({ + required this.fileId, + required this.faceId, + }); + + FaceResultBuilder.fromFaceDetection( + FaceDetectionRelative faceDetection, { + required FaceMlResultBuilder resultBuilder, + }) { + fileId = resultBuilder.fileId; + faceId = faceDetection.toFaceID(fileID: resultBuilder.fileId); + detection = faceDetection; + } + + FaceResult build() { + assert(detection.allKeypoints[0][0] <= 1); + assert(detection.box[0] <= 1); + return FaceResult( + detection: detection, + blurValue: blurValue, + alignment: alignment, + embedding: embedding, + fileId: fileId, + faceId: faceId, + ); + } +} + +int getFileIdFromFaceId(String faceId) { + return int.parse(faceId.split("_")[0]); +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart new file mode 100644 index 0000000000..38079753c2 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart @@ -0,0 +1,1257 @@ +import "dart:async"; +import "dart:developer" as dev show log; +import "dart:io" show File, Platform; +import "dart:isolate"; +import "dart:math" show min; +import "dart:typed_data" show Uint8List, Float32List, ByteData; +import "dart:ui" show Image; + +import "package:computer/computer.dart"; +import "package:dart_ui_isolate/dart_ui_isolate.dart"; +import "package:flutter/foundation.dart" show debugPrint, kDebugMode; +import "package:logging/logging.dart"; +import "package:onnxruntime/onnxruntime.dart"; +import "package:package_info_plus/package_info_plus.dart"; +import "package:photos/core/configuration.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/db/files_db.dart"; +import "package:photos/events/diff_sync_complete_event.dart"; +import "package:photos/events/machine_learning_control_event.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/extensions/list.dart"; +import "package:photos/extensions/stop_watch.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/box.dart"; +import "package:photos/face/model/detection.dart" as face_detection; +import "package:photos/face/model/face.dart"; +import "package:photos/face/model/landmark.dart"; +import "package:photos/models/file/extensions/file_props.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/models/file/file_type.dart"; +import "package:photos/models/ml/ml_versions.dart"; +import "package:photos/service_locator.dart"; +import 'package:photos/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart'; +import "package:photos/services/machine_learning/face_ml/face_clustering/face_info_for_clustering.dart"; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart'; +import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart'; +import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart'; +import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart'; +import 'package:photos/services/machine_learning/face_ml/face_ml_exceptions.dart'; +import 'package:photos/services/machine_learning/face_ml/face_ml_result.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import 'package:photos/services/machine_learning/file_ml/file_ml.dart'; +import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart'; +import "package:photos/services/search_service.dart"; +import "package:photos/utils/file_util.dart"; +import 'package:photos/utils/image_ml_isolate.dart'; +import "package:photos/utils/image_ml_util.dart"; +import "package:photos/utils/local_settings.dart"; +import "package:photos/utils/network_util.dart"; +import "package:photos/utils/thumbnail_util.dart"; +import "package:synchronized/synchronized.dart"; + +enum FileDataForML { thumbnailData, fileData, compressedFileData } + +enum FaceMlOperation { analyzeImage } + +/// This class is responsible for running the full face ml pipeline on images. +/// +/// WARNING: For getting the ML results needed for the UI, you should use `FaceSearchService` instead of this class! +/// +/// The pipeline consists of face detection, face alignment and face embedding. +class FaceMlService { + final _logger = Logger("FaceMlService"); + + // Flutter isolate things for running the image ml pipeline + Timer? _inactivityTimer; + final Duration _inactivityDuration = const Duration(seconds: 120); + int _activeTasks = 0; + final _initLockIsolate = Lock(); + late DartUiIsolate _isolate; + late ReceivePort _receivePort = ReceivePort(); + late SendPort _mainSendPort; + + bool _isIsolateSpawned = false; + + // singleton pattern + FaceMlService._privateConstructor(); + + static final instance = FaceMlService._privateConstructor(); + + factory FaceMlService() => instance; + + final _initLock = Lock(); + final _functionLock = Lock(); + + final _computer = Computer.shared(); + + bool isInitialized = false; + late String client; + + bool debugIndexingDisabled = false; + bool _mlControllerStatus = false; + bool _isIndexingOrClusteringRunning = false; + bool _shouldPauseIndexingAndClustering = false; + bool _shouldSyncPeople = false; + bool _isSyncing = false; + + final int _fileDownloadLimit = 5; + final int _embeddingFetchLimit = 200; + + Future init({bool initializeImageMlIsolate = false}) async { + if (LocalSettings.instance.isFaceIndexingEnabled == false) { + return; + } + return _initLock.synchronized(() async { + if (isInitialized) { + return; + } + _logger.info("init called"); + _logStatus(); + await _computer.compute(initOrtEnv); + try { + await FaceDetectionService.instance.init(); + } catch (e, s) { + _logger.severe("Could not initialize yolo onnx", e, s); + } + if (initializeImageMlIsolate) { + try { + await ImageMlIsolate.instance.init(); + } catch (e, s) { + _logger.severe("Could not initialize image ml isolate", e, s); + } + } + try { + await FaceEmbeddingService.instance.init(); + } catch (e, s) { + _logger.severe("Could not initialize mobilefacenet", e, s); + } + + // Get client name + final packageInfo = await PackageInfo.fromPlatform(); + client = "${packageInfo.packageName}/${packageInfo.version}"; + _logger.info("client: $client"); + + isInitialized = true; + _mlControllerStatus = !Platform.isAndroid; + + /// hooking FaceML into [MachineLearningController] + Bus.instance.on().listen((event) { + if (LocalSettings.instance.isFaceIndexingEnabled == false) { + return; + } + _mlControllerStatus = event.shouldRun; + if (_mlControllerStatus) { + if (_shouldPauseIndexingAndClustering) { + _shouldPauseIndexingAndClustering = false; + _logger.info( + "MLController allowed running ML, faces indexing undoing previous pause", + ); + } else { + _logger.info( + "MLController allowed running ML, faces indexing starting", + ); + } + unawaited(indexAndClusterAll()); + } else { + _logger.info( + "MLController stopped running ML, faces indexing will be paused (unless it's fetching embeddings)", + ); + pauseIndexingAndClustering(); + } + }); + + _listenIndexOnDiffSync(); + _listenOnPeopleChangedSync(); + }); + } + + static void initOrtEnv() async { + OrtEnv.instance.init(); + } + + void _listenIndexOnDiffSync() { + Bus.instance.on().listen((event) async { + unawaited(sync()); + }); + } + + void _listenOnPeopleChangedSync() { + Bus.instance.on().listen((event) { + _shouldSyncPeople = true; + }); + } + + Future ensureInitialized() async { + if (!isInitialized) { + await init(); + } + } + + Future release() async { + return _initLock.synchronized(() async { + _logger.info("dispose called"); + if (!isInitialized) { + return; + } + try { + await FaceDetectionService.instance.release(); + } catch (e, s) { + _logger.severe("Could not dispose yolo onnx", e, s); + } + try { + ImageMlIsolate.instance.dispose(); + } catch (e, s) { + _logger.severe("Could not dispose image ml isolate", e, s); + } + try { + await FaceEmbeddingService.instance.release(); + } catch (e, s) { + _logger.severe("Could not dispose mobilefacenet", e, s); + } + OrtEnv.instance.release(); + isInitialized = false; + }); + } + + Future _initIsolate() async { + return _initLockIsolate.synchronized(() async { + if (_isIsolateSpawned) return; + _logger.info("initIsolate called"); + + _receivePort = ReceivePort(); + + try { + _isolate = await DartUiIsolate.spawn( + _isolateMain, + _receivePort.sendPort, + ); + _mainSendPort = await _receivePort.first as SendPort; + _isIsolateSpawned = true; + + _resetInactivityTimer(); + } catch (e) { + _logger.severe('Could not spawn isolate', e); + _isIsolateSpawned = false; + } + }); + } + + Future _ensureSpawnedIsolate() async { + if (!_isIsolateSpawned) { + await _initIsolate(); + } + } + + /// The main execution function of the isolate. + @pragma('vm:entry-point') + static void _isolateMain(SendPort mainSendPort) async { + final receivePort = ReceivePort(); + mainSendPort.send(receivePort.sendPort); + + receivePort.listen((message) async { + final functionIndex = message[0] as int; + final function = FaceMlOperation.values[functionIndex]; + final args = message[1] as Map; + final sendPort = message[2] as SendPort; + + try { + switch (function) { + case FaceMlOperation.analyzeImage: + final time = DateTime.now(); + final FaceMlResult result = + await FaceMlService.analyzeImageSync(args); + dev.log( + "`analyzeImageSync` function executed in ${DateTime.now().difference(time).inMilliseconds} ms", + ); + sendPort.send(result.toJsonString()); + break; + } + } catch (e, stackTrace) { + dev.log( + "[SEVERE] Error in FaceML isolate: $e", + error: e, + stackTrace: stackTrace, + ); + sendPort + .send({'error': e.toString(), 'stackTrace': stackTrace.toString()}); + } + }); + } + + /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result. + Future _runInIsolate( + (FaceMlOperation, Map) message, + ) async { + await _ensureSpawnedIsolate(); + return _functionLock.synchronized(() async { + _resetInactivityTimer(); + + final completer = Completer(); + final answerPort = ReceivePort(); + + _activeTasks++; + _mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]); + + answerPort.listen((receivedMessage) { + if (receivedMessage is Map && receivedMessage.containsKey('error')) { + // Handle the error + final errorMessage = receivedMessage['error']; + final errorStackTrace = receivedMessage['stackTrace']; + final exception = Exception(errorMessage); + final stackTrace = StackTrace.fromString(errorStackTrace); + completer.completeError(exception, stackTrace); + } else { + completer.complete(receivedMessage); + } + }); + _activeTasks--; + + return completer.future; + }); + } + + /// Resets a timer that kills the isolate after a certain amount of inactivity. + /// + /// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`) + void _resetInactivityTimer() { + _inactivityTimer?.cancel(); + _inactivityTimer = Timer(_inactivityDuration, () { + if (_activeTasks > 0) { + _logger.info('Tasks are still running. Delaying isolate disposal.'); + // Optionally, reschedule the timer to check again later. + _resetInactivityTimer(); + } else { + _logger.info( + 'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.', + ); + _disposeIsolate(); + } + }); + } + + void _disposeIsolate() async { + if (!_isIsolateSpawned) return; + await release(); + + _isIsolateSpawned = false; + _isolate.kill(); + _receivePort.close(); + _inactivityTimer?.cancel(); + } + + Future sync({bool forceSync = true}) async { + if (_isSyncing) { + return; + } + _isSyncing = true; + if (forceSync) { + await PersonService.instance.reconcileClusters(); + _shouldSyncPeople = false; + } + _isSyncing = false; + } + + Future indexAndClusterAll() async { + if (_cannotRunMLFunction()) return; + + await sync(forceSync: _shouldSyncPeople); + await indexAllImages(); + final indexingCompleteRatio = await _getIndexedDoneRatio(); + if (indexingCompleteRatio < 0.95) { + _logger.info( + "Indexing is not far enough to start clustering, skipping clustering. Indexing is at $indexingCompleteRatio", + ); + return; + } else { + await clusterAllImages(); + } + } + + void pauseIndexingAndClustering() { + if (_isIndexingOrClusteringRunning) { + _shouldPauseIndexingAndClustering = true; + } + } + + /// Analyzes all the images in the database with the latest ml version and stores the results in the database. + /// + /// This function first checks if the image has already been analyzed with the lastest faceMlVersion and stored in the database. If so, it skips the image. + Future indexAllImages({int retryFetchCount = 10}) async { + if (_cannotRunMLFunction()) return; + + try { + _isIndexingOrClusteringRunning = true; + _logger.info('starting image indexing'); + + final w = (kDebugMode ? EnteWatch('prepare indexing files') : null) + ?..start(); + final Map alreadyIndexedFiles = + await FaceMLDataDB.instance.getIndexedFileIds(); + w?.log('getIndexedFileIds'); + final List enteFiles = + await SearchService.instance.getAllFiles(); + w?.log('getAllFiles'); + + // Make sure the image conversion isolate is spawned + // await ImageMlIsolate.instance.ensureSpawned(); + await ensureInitialized(); + + int fileAnalyzedCount = 0; + int fileSkippedCount = 0; + final stopwatch = Stopwatch()..start(); + final List filesWithLocalID = []; + final List filesWithoutLocalID = []; + final List hiddenFilesToIndex = []; + w?.log('getIndexableFileIDs'); + + for (final EnteFile enteFile in enteFiles) { + if (_skipAnalysisEnteFile(enteFile, alreadyIndexedFiles)) { + fileSkippedCount++; + continue; + } + if ((enteFile.localID ?? '').isEmpty) { + filesWithoutLocalID.add(enteFile); + } else { + filesWithLocalID.add(enteFile); + } + } + w?.log('sifting through all normal files'); + final List hiddenFiles = + await SearchService.instance.getHiddenFiles(); + w?.log('getHiddenFiles: ${hiddenFiles.length} hidden files'); + for (final EnteFile enteFile in hiddenFiles) { + if (_skipAnalysisEnteFile(enteFile, alreadyIndexedFiles)) { + fileSkippedCount++; + continue; + } + hiddenFilesToIndex.add(enteFile); + } + + // list of files where files with localID are first + final sortedBylocalID = []; + sortedBylocalID.addAll(filesWithLocalID); + sortedBylocalID.addAll(filesWithoutLocalID); + sortedBylocalID.addAll(hiddenFilesToIndex); + w?.log('preparing all files to index'); + final List> chunks = + sortedBylocalID.chunks(_embeddingFetchLimit); + int fetchedCount = 0; + outerLoop: + for (final chunk in chunks) { + final futures = >[]; + + if (LocalSettings.instance.remoteFetchEnabled) { + try { + final List fileIds = []; + // Try to find embeddings on the remote server + for (final f in chunk) { + fileIds.add(f.uploadedFileID!); + } + _logger.info('starting remote fetch for ${fileIds.length} files'); + final res = + await RemoteFileMLService.instance.getFilessEmbedding(fileIds); + _logger.info('fetched ${res.mlData.length} embeddings'); + fetchedCount += res.mlData.length; + final List faces = []; + final remoteFileIdToVersion = {}; + for (FileMl fileMl in res.mlData.values) { + if (_shouldDiscardRemoteEmbedding(fileMl)) continue; + if (fileMl.faceEmbedding.faces.isEmpty) { + faces.add( + Face.empty( + fileMl.fileID, + ), + ); + } else { + for (final f in fileMl.faceEmbedding.faces) { + f.fileInfo = FileInfo( + imageHeight: fileMl.height, + imageWidth: fileMl.width, + ); + faces.add(f); + } + } + remoteFileIdToVersion[fileMl.fileID] = + fileMl.faceEmbedding.version; + } + if (res.noEmbeddingFileIDs.isNotEmpty) { + _logger.info( + 'No embeddings found for ${res.noEmbeddingFileIDs.length} files', + ); + for (final fileID in res.noEmbeddingFileIDs) { + faces.add(Face.empty(fileID, error: false)); + remoteFileIdToVersion[fileID] = faceMlVersion; + } + } + + await FaceMLDataDB.instance.bulkInsertFaces(faces); + _logger.info('stored embeddings'); + for (final entry in remoteFileIdToVersion.entries) { + alreadyIndexedFiles[entry.key] = entry.value; + } + _logger + .info('already indexed files ${remoteFileIdToVersion.length}'); + } catch (e, s) { + _logger.severe("err while getting files embeddings", e, s); + if (retryFetchCount < 1000) { + Future.delayed(Duration(seconds: retryFetchCount), () { + unawaited(indexAllImages(retryFetchCount: retryFetchCount * 2)); + }); + return; + } else { + _logger.severe( + "Failed to fetch embeddings for files after multiple retries", + e, + s, + ); + rethrow; + } + } + } else { + _logger.warning( + 'Not fetching embeddings because user manually disabled it in debug options', + ); + } + final smallerChunks = chunk.chunks(_fileDownloadLimit); + for (final smallestChunk in smallerChunks) { + if (!await canUseHighBandwidth()) { + _logger.info( + 'stopping indexing because user is not connected to wifi', + ); + break outerLoop; + } + for (final enteFile in smallestChunk) { + if (_shouldPauseIndexingAndClustering) { + _logger.info("indexAllImages() was paused, stopping"); + break outerLoop; + } + if (_skipAnalysisEnteFile( + enteFile, + alreadyIndexedFiles, + )) { + fileSkippedCount++; + continue; + } + futures.add(processImage(enteFile)); + } + final awaitedFutures = await Future.wait(futures); + final sumFutures = awaitedFutures.fold( + 0, + (previousValue, element) => previousValue + (element ? 1 : 0), + ); + fileAnalyzedCount += sumFutures; + } + } + + stopwatch.stop(); + _logger.info( + "`indexAllImages()` finished. Fetched $fetchedCount and analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images)", + ); + _logStatus(); + } catch (e, s) { + _logger.severe("indexAllImages failed", e, s); + } finally { + _isIndexingOrClusteringRunning = false; + _shouldPauseIndexingAndClustering = false; + } + } + + Future clusterAllImages({ + double minFaceScore = kMinimumQualityFaceScore, + bool clusterInBuckets = true, + }) async { + if (_cannotRunMLFunction()) return; + + _logger.info("`clusterAllImages()` called"); + _isIndexingOrClusteringRunning = true; + final clusterAllImagesTime = DateTime.now(); + + try { + // Get a sense of the total number of faces in the database + final int totalFaces = await FaceMLDataDB.instance + .getTotalFaceCount(minFaceScore: minFaceScore); + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + final startEmbeddingFetch = DateTime.now(); + // read all embeddings + final result = await FaceMLDataDB.instance.getFaceInfoForClustering( + minScore: minFaceScore, + maxFaces: totalFaces, + ); + final Set missingFileIDs = {}; + final allFaceInfoForClustering = []; + for (final faceInfo in result) { + if (!fileIDToCreationTime.containsKey(faceInfo.fileID)) { + missingFileIDs.add(faceInfo.fileID); + } else { + allFaceInfoForClustering.add(faceInfo); + } + } + // sort the embeddings based on file creation time, oldest first + allFaceInfoForClustering.sort((a, b) { + return fileIDToCreationTime[a.fileID]! + .compareTo(fileIDToCreationTime[b.fileID]!); + }); + _logger.info( + 'Getting and sorting embeddings took ${DateTime.now().difference(startEmbeddingFetch).inMilliseconds} ms for ${allFaceInfoForClustering.length} embeddings' + 'and ${missingFileIDs.length} missing fileIDs', + ); + + // Get the current cluster statistics + final Map oldClusterSummaries = + await FaceMLDataDB.instance.getAllClusterSummary(); + + if (clusterInBuckets) { + const int bucketSize = 20000; + const int offsetIncrement = 7500; + int offset = 0; + int bucket = 1; + + while (true) { + if (_shouldPauseIndexingAndClustering) { + _logger.info( + "MLController does not allow running ML, stopping before clustering bucket $bucket", + ); + break; + } + if (offset > allFaceInfoForClustering.length - 1) { + _logger.warning( + 'faceIdToEmbeddingBucket is empty, this should ideally not happen as it should have stopped earlier. offset: $offset, totalFaces: $totalFaces', + ); + break; + } + if (offset > totalFaces) { + _logger.warning( + 'offset > totalFaces, this should ideally not happen. offset: $offset, totalFaces: $totalFaces', + ); + break; + } + + final bucketStartTime = DateTime.now(); + final faceInfoForClustering = allFaceInfoForClustering.sublist( + offset, + min(offset + bucketSize, allFaceInfoForClustering.length), + ); + + final clusteringResult = + await FaceClusteringService.instance.predictLinear( + faceInfoForClustering.toSet(), + fileIDToCreationTime: fileIDToCreationTime, + offset: offset, + oldClusterSummaries: oldClusterSummaries, + ); + if (clusteringResult == null) { + _logger.warning("faceIdToCluster is null"); + return; + } + + await FaceMLDataDB.instance + .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); + for (final faceInfo in faceInfoForClustering) { + faceInfo.clusterId ??= + clusteringResult.newFaceIdToCluster[faceInfo.faceID]; + } + for (final clusterUpdate + in clusteringResult.newClusterSummaries!.entries) { + oldClusterSummaries[clusterUpdate.key] = clusterUpdate.value; + } + _logger.info( + 'Done with clustering ${offset + faceInfoForClustering.length} embeddings (${(100 * (offset + faceInfoForClustering.length) / totalFaces).toStringAsFixed(0)}%) in bucket $bucket, offset: $offset, in ${DateTime.now().difference(bucketStartTime).inSeconds} seconds', + ); + if (offset + bucketSize >= totalFaces) { + _logger.info('All faces clustered'); + break; + } + offset += offsetIncrement; + bucket++; + } + } else { + final clusterStartTime = DateTime.now(); + // Cluster the embeddings using the linear clustering algorithm, returning a map from faceID to clusterID + final clusteringResult = + await FaceClusteringService.instance.predictLinear( + allFaceInfoForClustering.toSet(), + fileIDToCreationTime: fileIDToCreationTime, + oldClusterSummaries: oldClusterSummaries, + ); + if (clusteringResult == null) { + _logger.warning("faceIdToCluster is null"); + return; + } + final clusterDoneTime = DateTime.now(); + _logger.info( + 'done with clustering ${allFaceInfoForClustering.length} in ${clusterDoneTime.difference(clusterStartTime).inSeconds} seconds ', + ); + + // Store the updated clusterIDs in the database + _logger.info( + 'Updating ${clusteringResult.newFaceIdToCluster.length} FaceIDs with clusterIDs in the DB', + ); + await FaceMLDataDB.instance + .updateFaceIdToClusterId(clusteringResult.newFaceIdToCluster); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusteringResult.newClusterSummaries!); + _logger.info('Done updating FaceIDs with clusterIDs in the DB, in ' + '${DateTime.now().difference(clusterDoneTime).inSeconds} seconds'); + } + Bus.instance.fire(PeopleChangedEvent()); + _logger.info('clusterAllImages() finished, in ' + '${DateTime.now().difference(clusterAllImagesTime).inSeconds} seconds'); + } catch (e, s) { + _logger.severe("`clusterAllImages` failed", e, s); + } finally { + _isIndexingOrClusteringRunning = false; + _shouldPauseIndexingAndClustering = false; + } + } + + bool _shouldDiscardRemoteEmbedding(FileMl fileMl) { + if (fileMl.faceEmbedding.version < faceMlVersion) { + debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} " + "because version is ${fileMl.faceEmbedding.version} and we need $faceMlVersion"); + return true; + } + // are all landmarks equal? + bool allLandmarksEqual = true; + if (fileMl.faceEmbedding.faces.isEmpty) { + debugPrint("No face for ${fileMl.fileID}"); + allLandmarksEqual = false; + } + for (final face in fileMl.faceEmbedding.faces) { + if (face.detection.landmarks.isEmpty) { + allLandmarksEqual = false; + break; + } + if (face.detection.landmarks + .any((landmark) => landmark.x != landmark.y)) { + allLandmarksEqual = false; + break; + } + } + if (allLandmarksEqual) { + debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} " + "because landmarks are equal"); + debugPrint( + fileMl.faceEmbedding.faces + .map((e) => e.detection.landmarks.toString()) + .toList() + .toString(), + ); + return true; + } + if (fileMl.width == null || fileMl.height == null) { + debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} " + "because width is null"); + return true; + } + return false; + } + + Future processImage(EnteFile enteFile) async { + _logger.info( + "`processImage` start processing image with uploadedFileID: ${enteFile.uploadedFileID}", + ); + + try { + final FaceMlResult? result = await _analyzeImageInSingleIsolate( + enteFile, + // preferUsingThumbnailForEverything: false, + // disposeImageIsolateAfterUse: false, + ); + if (result == null) { + _logger.severe( + "Failed to analyze image with uploadedFileID: ${enteFile.uploadedFileID}", + ); + return false; + } + final List faces = []; + if (!result.hasFaces) { + debugPrint( + 'No faces detected for file with name:${enteFile.displayName}', + ); + faces.add( + Face.empty(result.fileId, error: result.errorOccured), + ); + } else { + if (result.decodedImageSize.width == -1 || + result.decodedImageSize.height == -1) { + _logger + .severe("decodedImageSize is not stored correctly for image with " + "ID: ${enteFile.uploadedFileID}"); + _logger.info( + "Using aligned image size for image with ID: ${enteFile.uploadedFileID}. This size is ${result.decodedImageSize.width}x${result.decodedImageSize.height} compared to size of ${enteFile.width}x${enteFile.height} in the metadata", + ); + } + for (int i = 0; i < result.faces.length; ++i) { + final FaceResult faceRes = result.faces[i]; + final detection = face_detection.Detection( + box: FaceBox( + x: faceRes.detection.xMinBox, + y: faceRes.detection.yMinBox, + width: faceRes.detection.width, + height: faceRes.detection.height, + ), + landmarks: faceRes.detection.allKeypoints + .map( + (keypoint) => Landmark( + x: keypoint[0], + y: keypoint[1], + ), + ) + .toList(), + ); + faces.add( + Face( + faceRes.faceId, + result.fileId, + faceRes.embedding, + faceRes.detection.score, + detection, + faceRes.blurValue, + fileInfo: FileInfo( + imageHeight: result.decodedImageSize.height, + imageWidth: result.decodedImageSize.width, + ), + ), + ); + } + } + _logger.info("inserting ${faces.length} faces for ${result.fileId}"); + if (!result.errorOccured) { + await RemoteFileMLService.instance.putFileEmbedding( + enteFile, + FileMl( + enteFile.uploadedFileID!, + FaceEmbeddings( + faces, + result.mlVersion, + client: client, + ), + height: result.decodedImageSize.height, + width: result.decodedImageSize.width, + ), + ); + } else { + _logger.warning( + 'Skipped putting embedding because of error ${result.toJsonString()}', + ); + } + await FaceMLDataDB.instance.bulkInsertFaces(faces); + return true; + } catch (e, s) { + _logger.severe( + "Failed to analyze using FaceML for image with ID: ${enteFile.uploadedFileID}", + e, + s, + ); + return true; + } + } + + /// Analyzes the given image data by running the full pipeline for faces, using [analyzeImageSync] in the isolate. + Future _analyzeImageInSingleIsolate(EnteFile enteFile) async { + _checkEnteFileForID(enteFile); + await ensureInitialized(); + + final String? filePath = + await _getImagePathForML(enteFile, typeOfData: FileDataForML.fileData); + + if (filePath == null) { + _logger.severe( + "Failed to get any data for enteFile with uploadedFileID ${enteFile.uploadedFileID}", + ); + throw CouldNotRetrieveAnyFileData(); + } + + final Stopwatch stopwatch = Stopwatch()..start(); + late FaceMlResult result; + + try { + final resultJsonString = await _runInIsolate( + ( + FaceMlOperation.analyzeImage, + { + "enteFileID": enteFile.uploadedFileID ?? -1, + "filePath": filePath, + "faceDetectionAddress": + FaceDetectionService.instance.sessionAddress, + "faceEmbeddingAddress": + FaceEmbeddingService.instance.sessionAddress, + } + ), + ) as String?; + if (resultJsonString == null) { + _logger.severe('Analyzing image in isolate is giving back null'); + return null; + } + result = FaceMlResult.fromJsonString(resultJsonString); + } catch (e, s) { + _logger.severe( + "Could not analyze image with ID ${enteFile.uploadedFileID} \n", + e, + s, + ); + debugPrint( + "This image with ID ${enteFile.uploadedFileID} has name ${enteFile.displayName}.", + ); + final resultBuilder = FaceMlResultBuilder.fromEnteFile(enteFile); + return resultBuilder.buildErrorOccurred(); + } + stopwatch.stop(); + _logger.info( + "Finished Analyze image (${result.faces.length} faces) with uploadedFileID ${enteFile.uploadedFileID}, in " + "${stopwatch.elapsedMilliseconds} ms (including time waiting for inference engine availability)", + ); + + return result; + } + + static Future analyzeImageSync(Map args) async { + try { + final int enteFileID = args["enteFileID"] as int; + final String imagePath = args["filePath"] as String; + final int faceDetectionAddress = args["faceDetectionAddress"] as int; + final int faceEmbeddingAddress = args["faceEmbeddingAddress"] as int; + + final resultBuilder = FaceMlResultBuilder.fromEnteFileID(enteFileID); + + dev.log( + "Start analyzing image with uploadedFileID: $enteFileID inside the isolate", + ); + final stopwatchTotal = Stopwatch()..start(); + final stopwatch = Stopwatch()..start(); + + // Decode the image once to use for both face detection and alignment + final imageData = await File(imagePath).readAsBytes(); + final image = await decodeImageFromData(imageData); + final ByteData imgByteData = await getByteDataFromImage(image); + dev.log('Reading and decoding image took ' + '${stopwatch.elapsedMilliseconds} ms'); + stopwatch.reset(); + + // Get the faces + final List faceDetectionResult = + await FaceMlService.detectFacesSync( + image, + imgByteData, + faceDetectionAddress, + resultBuilder: resultBuilder, + ); + + dev.log( + "${faceDetectionResult.length} faces detected with scores ${faceDetectionResult.map((e) => e.score).toList()}: completed `detectFacesSync` function, in " + "${stopwatch.elapsedMilliseconds} ms"); + + // If no faces were detected, return a result with no faces. Otherwise, continue. + if (faceDetectionResult.isEmpty) { + dev.log( + "No faceDetectionResult, Completed analyzing image with uploadedFileID $enteFileID, in " + "${stopwatch.elapsedMilliseconds} ms"); + return resultBuilder.buildNoFaceDetected(); + } + + stopwatch.reset(); + // Align the faces + final Float32List faceAlignmentResult = + await FaceMlService.alignFacesSync( + image, + imgByteData, + faceDetectionResult, + resultBuilder: resultBuilder, + ); + + dev.log("Completed `alignFacesSync` function, in " + "${stopwatch.elapsedMilliseconds} ms"); + + stopwatch.reset(); + // Get the embeddings of the faces + final embeddings = await FaceMlService.embedFacesSync( + faceAlignmentResult, + faceEmbeddingAddress, + resultBuilder: resultBuilder, + ); + + dev.log("Completed `embedFacesSync` function, in " + "${stopwatch.elapsedMilliseconds} ms"); + + stopwatch.stop(); + stopwatchTotal.stop(); + dev.log("Finished Analyze image (${embeddings.length} faces) with " + "uploadedFileID $enteFileID, in " + "${stopwatchTotal.elapsedMilliseconds} ms"); + + return resultBuilder.build(); + } catch (e, s) { + dev.log("Could not analyze image: \n e: $e \n s: $s"); + rethrow; + } + } + + Future _getImagePathForML( + EnteFile enteFile, { + FileDataForML typeOfData = FileDataForML.fileData, + }) async { + String? imagePath; + + switch (typeOfData) { + case FileDataForML.fileData: + final stopwatch = Stopwatch()..start(); + File? file; + if (enteFile.fileType == FileType.video) { + file = await getThumbnailForUploadedFile(enteFile); + } else { + file = await getFile(enteFile, isOrigin: true); + // TODO: This is returning null for Pragadees for all files, so something is wrong here! + } + if (file == null) { + _logger.warning("Could not get file for $enteFile"); + imagePath = null; + break; + } + imagePath = file.path; + stopwatch.stop(); + _logger.info( + "Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms", + ); + break; + + case FileDataForML.thumbnailData: + final stopwatch = Stopwatch()..start(); + final File? thumbnail = await getThumbnailForUploadedFile(enteFile); + if (thumbnail == null) { + _logger.warning("Could not get thumbnail for $enteFile"); + imagePath = null; + break; + } + imagePath = thumbnail.path; + stopwatch.stop(); + _logger.info( + "Getting thumbnail data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms", + ); + break; + + case FileDataForML.compressedFileData: + _logger.warning( + "Getting compressed file data for uploadedFileID ${enteFile.uploadedFileID} is not implemented yet", + ); + imagePath = null; + break; + } + + return imagePath; + } + + /// Detects faces in the given image data. + /// + /// `imageData`: The image data to analyze. + /// + /// Returns a list of face detection results. + /// + /// Throws [CouldNotInitializeFaceDetector], [CouldNotRunFaceDetector] or [GeneralFaceMlException] if something goes wrong. + static Future> detectFacesSync( + Image image, + ByteData imageByteData, + int interpreterAddress, { + FaceMlResultBuilder? resultBuilder, + }) async { + try { + // Get the bounding boxes of the faces + final (List faces, dataSize) = + await FaceDetectionService.predictSync( + image, + imageByteData, + interpreterAddress, + ); + + // Add detected faces to the resultBuilder + if (resultBuilder != null) { + resultBuilder.addNewlyDetectedFaces(faces, dataSize); + } + + return faces; + } on YOLOFaceInterpreterInitializationException { + throw CouldNotInitializeFaceDetector(); + } on YOLOFaceInterpreterRunException { + throw CouldNotRunFaceDetector(); + } catch (e) { + dev.log('[SEVERE] Face detection failed: $e'); + throw GeneralFaceMlException('Face detection failed: $e'); + } + } + + /// Aligns multiple faces from the given image data. + /// + /// `imageData`: The image data in [Uint8List] that contains the faces. + /// `faces`: The face detection results in a list of [FaceDetectionAbsolute] for the faces to align. + /// + /// Returns a list of the aligned faces as image data. + /// + /// Throws [CouldNotWarpAffine] or [GeneralFaceMlException] if the face alignment fails. + static Future alignFacesSync( + Image image, + ByteData imageByteData, + List faces, { + FaceMlResultBuilder? resultBuilder, + }) async { + try { + final stopwatch = Stopwatch()..start(); + final (alignedFaces, alignmentResults, _, blurValues, _) = + await preprocessToMobileFaceNetFloat32List( + image, + imageByteData, + faces, + ); + stopwatch.stop(); + dev.log( + "Face alignment image decoding and processing took ${stopwatch.elapsedMilliseconds} ms", + ); + + if (resultBuilder != null) { + resultBuilder.addAlignmentResults( + alignmentResults, + blurValues, + ); + } + + return alignedFaces; + } catch (e, s) { + dev.log('[SEVERE] Face alignment failed: $e $s'); + throw CouldNotWarpAffine(); + } + } + + static Future>> embedFacesSync( + Float32List facesList, + int interpreterAddress, { + FaceMlResultBuilder? resultBuilder, + }) async { + try { + // Get the embedding of the faces + final List> embeddings = + await FaceEmbeddingService.predictSync(facesList, interpreterAddress); + + // Add the embeddings to the resultBuilder + if (resultBuilder != null) { + resultBuilder.addEmbeddingsToExistingFaces(embeddings); + } + + return embeddings; + } on MobileFaceNetInterpreterInitializationException { + throw CouldNotInitializeFaceEmbeddor(); + } on MobileFaceNetInterpreterRunException { + throw CouldNotRunFaceEmbeddor(); + } on MobileFaceNetEmptyInput { + throw InputProblemFaceEmbeddor("Input is empty"); + } on MobileFaceNetWrongInputSize { + throw InputProblemFaceEmbeddor("Input size is wrong"); + } on MobileFaceNetWrongInputRange { + throw InputProblemFaceEmbeddor("Input range is wrong"); + // ignore: avoid_catches_without_on_clauses + } catch (e) { + dev.log('[SEVERE] Face embedding (batch) failed: $e'); + throw GeneralFaceMlException('Face embedding (batch) failed: $e'); + } + } + + /// Checks if the ente file to be analyzed actually can be analyzed: it must be uploaded and in the correct format. + void _checkEnteFileForID(EnteFile enteFile) { + if (_skipAnalysisEnteFile(enteFile, {})) { + _logger.warning( + '''Skipped analysis of image with enteFile, it might be the wrong format or has no uploadedFileID, or MLController doesn't allow it to run. + enteFile: ${enteFile.toString()} + ''', + ); + _logStatus(); + throw CouldNotRetrieveAnyFileData(); + } + } + + Future _getIndexedDoneRatio() async { + final w = (kDebugMode ? EnteWatch('_getIndexedDoneRatio') : null)?..start(); + + final int alreadyIndexedCount = await FaceMLDataDB.instance + .getIndexedFileCount(minimumMlVersion: faceMlVersion); + final int totalIndexableCount = (await getIndexableFileIDs()).length; + final ratio = alreadyIndexedCount / totalIndexableCount; + + w?.log('getIndexedDoneRatio'); + + return ratio; + } + + static Future> getIndexableFileIDs() async { + return FilesDB.instance + .getOwnedFileIDs(Configuration.instance.getUserID()!); + } + + bool _skipAnalysisEnteFile(EnteFile enteFile, Map indexedFileIds) { + if (_isIndexingOrClusteringRunning == false || + _mlControllerStatus == false) { + return true; + } + // Skip if the file is not uploaded or not owned by the user + if (!enteFile.isUploaded || enteFile.isOwner == false) { + return true; + } + // I don't know how motionPhotos and livePhotos work, so I'm also just skipping them for now + if (enteFile.fileType == FileType.other) { + return true; + } + // Skip if the file is already analyzed with the latest ml version + final id = enteFile.uploadedFileID!; + + return indexedFileIds.containsKey(id) && + indexedFileIds[id]! >= faceMlVersion; + } + + bool _cannotRunMLFunction({String function = ""}) { + if (_isIndexingOrClusteringRunning) { + _logger.info( + "Cannot run $function because indexing or clustering is already running", + ); + _logStatus(); + return true; + } + if (_mlControllerStatus == false) { + _logger.info( + "Cannot run $function because MLController does not allow it", + ); + _logStatus(); + return true; + } + if (debugIndexingDisabled) { + _logger.info( + "Cannot run $function because debugIndexingDisabled is true", + ); + _logStatus(); + return true; + } + if (_shouldPauseIndexingAndClustering) { + // This should ideally not be triggered, because one of the above should be triggered instead. + _logger.warning( + "Cannot run $function because indexing and clustering is being paused", + ); + _logStatus(); + return true; + } + return false; + } + + void _logStatus() { + final String status = ''' + isInternalUser: ${flagService.internalUser} + isFaceIndexingEnabled: ${LocalSettings.instance.isFaceIndexingEnabled} + canRunMLController: $_mlControllerStatus + isIndexingOrClusteringRunning: $_isIndexingOrClusteringRunning + shouldPauseIndexingAndClustering: $_shouldPauseIndexingAndClustering + debugIndexingDisabled: $debugIndexingDisabled + shouldSyncPeople: $_shouldSyncPeople + '''; + _logger.info(status); + } +} diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_version.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_version.dart new file mode 100644 index 0000000000..a91c4c8434 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/face_ml_version.dart @@ -0,0 +1,15 @@ +abstract class VersionedMethod { + final String method; + final int version; + + VersionedMethod(this.method, [this.version = 0]); + + const VersionedMethod.empty() + : method = 'Empty method', + version = 0; + + Map toJson() => { + 'method': method, + 'version': version, + }; +} diff --git a/mobile/lib/services/machine_learning/face_ml/feedback/cluster_feedback.dart b/mobile/lib/services/machine_learning/face_ml/feedback/cluster_feedback.dart new file mode 100644 index 0000000000..8567e88685 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/feedback/cluster_feedback.dart @@ -0,0 +1,1226 @@ +import 'dart:developer' as dev; +import "dart:math" show Random, min; + +import "package:computer/computer.dart"; +import "package:flutter/foundation.dart"; +import "package:logging/logging.dart"; +import "package:ml_linalg/linalg.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/db/files_db.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/extensions/stop_watch.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/generated/protos/ente/common/vector.pb.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart"; +import "package:photos/services/machine_learning/face_ml/face_clustering/face_clustering_service.dart"; +import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart"; +import "package:photos/services/machine_learning/face_ml/face_ml_result.dart"; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import "package:photos/services/search_service.dart"; + +class ClusterSuggestion { + final int clusterIDToMerge; + final double distancePersonToCluster; + final bool usedOnlyMeanForSuggestion; + final List filesInCluster; + final List faceIDsInCluster; + + ClusterSuggestion( + this.clusterIDToMerge, + this.distancePersonToCluster, + this.usedOnlyMeanForSuggestion, + this.filesInCluster, + this.faceIDsInCluster, + ); +} + +class ClusterFeedbackService { + final Logger _logger = Logger("ClusterFeedbackService"); + final _computer = Computer.shared(); + ClusterFeedbackService._privateConstructor(); + + static final ClusterFeedbackService instance = + ClusterFeedbackService._privateConstructor(); + + static int lastViewedClusterID = -1; + static setLastViewedClusterID(int clusterID) { + lastViewedClusterID = clusterID; + } + + static resetLastViewedClusterID() { + lastViewedClusterID = -1; + } + + /// Returns a list of cluster suggestions for a person. Each suggestion is a tuple of the following elements: + /// 1. clusterID: the ID of the cluster + /// 2. distance: the distance between the person's cluster and the suggestion + /// 3. bool: whether the suggestion was found using the mean (true) or the median (false) + /// 4. List: the files in the cluster + Future> getSuggestionForPerson( + PersonEntity person, { + bool extremeFilesFirst = true, + }) async { + _logger.info( + 'getSuggestionForPerson ${kDebugMode ? person.data.name : person.remoteID}', + ); + + try { + // Get the suggestions for the person using centroids and median + final startTime = DateTime.now(); + final List<(int, double, bool)> foundSuggestions = + await _getSuggestions(person); + final findSuggestionsTime = DateTime.now(); + _logger.info( + 'getSuggestionForPerson `_getSuggestions`: Found ${foundSuggestions.length} suggestions in ${findSuggestionsTime.difference(startTime).inMilliseconds} ms', + ); + + // Get the files for the suggestions + final suggestionClusterIDs = foundSuggestions.map((e) => e.$1).toSet(); + final Map> fileIdToClusterID = + await FaceMLDataDB.instance.getFileIdToClusterIDSetForCluster( + suggestionClusterIDs, + ); + final clusterIdToFaceIDs = + await FaceMLDataDB.instance.getClusterToFaceIDs(suggestionClusterIDs); + final Map> clusterIDToFiles = {}; + final allFiles = await SearchService.instance.getAllFiles(); + for (final f in allFiles) { + if (!fileIdToClusterID.containsKey(f.uploadedFileID ?? -1)) { + continue; + } + final cluserIds = fileIdToClusterID[f.uploadedFileID ?? -1]!; + for (final cluster in cluserIds) { + if (clusterIDToFiles.containsKey(cluster)) { + clusterIDToFiles[cluster]!.add(f); + } else { + clusterIDToFiles[cluster] = [f]; + } + } + } + + final List finalSuggestions = []; + for (final clusterSuggestion in foundSuggestions) { + if (clusterIDToFiles.containsKey(clusterSuggestion.$1)) { + finalSuggestions.add( + ClusterSuggestion( + clusterSuggestion.$1, + clusterSuggestion.$2, + clusterSuggestion.$3, + clusterIDToFiles[clusterSuggestion.$1]!, + clusterIdToFaceIDs[clusterSuggestion.$1]!.toList(), + ), + ); + } + } + final getFilesTime = DateTime.now(); + + final sortingStartTime = DateTime.now(); + if (extremeFilesFirst) { + await _sortSuggestionsOnDistanceToPerson(person, finalSuggestions); + } + _logger.info( + 'getSuggestionForPerson post-processing suggestions took ${DateTime.now().difference(findSuggestionsTime).inMilliseconds} ms, of which sorting took ${DateTime.now().difference(sortingStartTime).inMilliseconds} ms and getting files took ${getFilesTime.difference(findSuggestionsTime).inMilliseconds} ms', + ); + + return finalSuggestions; + } catch (e, s) { + _logger.severe("Error in getClusterFilesForPersonID", e, s); + rethrow; + } + } + + Future removeFilesFromPerson( + List files, + PersonEntity p, + ) async { + try { + // Get the relevant faces to be removed + final faceIDs = await FaceMLDataDB.instance + .getFaceIDsForPerson(p.remoteID) + .then((iterable) => iterable.toList()); + faceIDs.retainWhere((faceID) { + final fileID = getFileIdFromFaceId(faceID); + return files.any((file) => file.uploadedFileID == fileID); + }); + final embeddings = + await FaceMLDataDB.instance.getFaceEmbeddingMapForFaces(faceIDs); + + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + + // Re-cluster within the deleted faces + final clusterResult = + await FaceClusteringService.instance.predictWithinClusterComputer( + embeddings, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: 0.20, + ); + if (clusterResult == null || clusterResult.isEmpty) { + return; + } + final newFaceIdToClusterID = clusterResult.newFaceIdToCluster; + + // Update the deleted faces + await FaceMLDataDB.instance.forceUpdateClusterIds(newFaceIdToClusterID); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusterResult.newClusterSummaries!); + + // Make sure the deleted faces don't get suggested in the future + final notClusterIdToPersonId = {}; + for (final clusterId in newFaceIdToClusterID.values.toSet()) { + notClusterIdToPersonId[clusterId] = p.remoteID; + } + await FaceMLDataDB.instance + .bulkCaptureNotPersonFeedback(notClusterIdToPersonId); + + Bus.instance.fire(PeopleChangedEvent()); + return; + } catch (e, s) { + _logger.severe("Error in removeFilesFromPerson", e, s); + rethrow; + } + } + + Future removeFilesFromCluster( + List files, + int clusterID, + ) async { + try { + // Get the relevant faces to be removed + final faceIDs = await FaceMLDataDB.instance + .getFaceIDsForCluster(clusterID) + .then((iterable) => iterable.toList()); + faceIDs.retainWhere((faceID) { + final fileID = getFileIdFromFaceId(faceID); + return files.any((file) => file.uploadedFileID == fileID); + }); + final embeddings = + await FaceMLDataDB.instance.getFaceEmbeddingMapForFaces(faceIDs); + + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + + // Re-cluster within the deleted faces + final clusterResult = + await FaceClusteringService.instance.predictWithinClusterComputer( + embeddings, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: 0.20, + ); + if (clusterResult == null || clusterResult.isEmpty) { + return; + } + final newFaceIdToClusterID = clusterResult.newFaceIdToCluster; + + // Update the deleted faces + await FaceMLDataDB.instance.forceUpdateClusterIds(newFaceIdToClusterID); + await FaceMLDataDB.instance + .clusterSummaryUpdate(clusterResult.newClusterSummaries!); + + Bus.instance.fire( + PeopleChangedEvent( + relevantFiles: files, + type: PeopleEventType.removedFilesFromCluster, + source: "$clusterID", + ), + ); + // Bus.instance.fire( + // LocalPhotosUpdatedEvent( + // files, + // type: EventType.peopleClusterChanged, + // source: "$clusterID", + // ), + // ); + return; + } catch (e, s) { + _logger.severe("Error in removeFilesFromCluster", e, s); + rethrow; + } + } + + Future addFilesToCluster(List faceIDs, int clusterID) async { + await FaceMLDataDB.instance.addFacesToCluster(faceIDs, clusterID); + Bus.instance.fire(PeopleChangedEvent()); + return; + } + + Future checkAndDoAutomaticMerges( + PersonEntity p, { + required int personClusterID, + }) async { + final faceMlDb = FaceMLDataDB.instance; + final faceIDs = await faceMlDb.getFaceIDsForCluster(personClusterID); + final ignoredClusters = await faceMlDb.getPersonIgnoredClusters(p.remoteID); + if (faceIDs.length < 2 * kMinimumClusterSizeSearchResult) { + final fileIDs = faceIDs.map(getFileIdFromFaceId).toSet(); + if (fileIDs.length < kMinimumClusterSizeSearchResult) { + _logger.info( + 'Cluster $personClusterID has less than $kMinimumClusterSizeSearchResult faces, not doing automatic merges', + ); + return false; + } + } + final allClusterIdsToCountMap = (await faceMlDb.clusterIdToFaceCount()); + _logger.info( + '${kDebugMode ? p.data.name : "private"} has existing clusterID $personClusterID, checking if we can automatically merge more', + ); + + // Get and update the cluster summary to get the avg (centroid) and count + final EnteWatch watch = EnteWatch("ClusterFeedbackService")..start(); + final Map clusterAvg = await _getUpdateClusterAvg( + allClusterIdsToCountMap, + ignoredClusters, + minClusterSize: kMinimumClusterSizeSearchResult, + ); + watch.log('computed avg for ${clusterAvg.length} clusters'); + + // Find the actual closest clusters for the person + final List<(int, double)> suggestions = await calcSuggestionsMeanInComputer( + clusterAvg, + {personClusterID}, + ignoredClusters, + 0.24, + ); + + if (suggestions.isEmpty) { + _logger.info( + 'No automatic merge suggestions for ${kDebugMode ? p.data.name : "private"}', + ); + return false; + } + + // log suggestions + _logger.info( + 'suggestions for ${kDebugMode ? p.data.name : "private"} for cluster ID ${p.remoteID} are suggestions $suggestions}', + ); + + for (final suggestion in suggestions) { + final clusterID = suggestion.$1; + await FaceMLDataDB.instance.assignClusterToPerson( + personID: p.remoteID, + clusterID: clusterID, + ); + } + + Bus.instance.fire(PeopleChangedEvent()); + + return true; + } + + Future ignoreCluster(int clusterID) async { + await PersonService.instance.addPerson('', clusterID); + Bus.instance.fire(PeopleChangedEvent()); + return; + } + + Future> checkForMixedClusters() async { + final faceMlDb = FaceMLDataDB.instance; + final allClusterToFaceCount = await faceMlDb.clusterIdToFaceCount(); + final clustersToInspect = []; + for (final clusterID in allClusterToFaceCount.keys) { + if (allClusterToFaceCount[clusterID]! > 20 && + allClusterToFaceCount[clusterID]! < 500) { + clustersToInspect.add(clusterID); + } + } + + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + + final susClusters = <(int, int)>[]; + + final inspectionStart = DateTime.now(); + for (final clusterID in clustersToInspect) { + final int originalClusterSize = allClusterToFaceCount[clusterID]!; + final faceIDs = await faceMlDb.getFaceIDsForCluster(clusterID); + + final embeddings = await faceMlDb.getFaceEmbeddingMapForFaces(faceIDs); + + final clusterResult = + await FaceClusteringService.instance.predictWithinClusterComputer( + embeddings, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: 0.22, + ); + + if (clusterResult == null || + clusterResult.newClusterIdToFaceIds == null || + clusterResult.isEmpty) { + _logger.warning( + '[CheckMixedClusters] Clustering did not seem to work for cluster $clusterID of size ${allClusterToFaceCount[clusterID]}', + ); + continue; + } + + final newClusterIdToCount = + clusterResult.newClusterIdToFaceIds!.map((key, value) { + return MapEntry(key, value.length); + }); + final amountOfNewClusters = newClusterIdToCount.length; + + _logger.info( + '[CheckMixedClusters] Broke up cluster $clusterID into $amountOfNewClusters clusters \n ${newClusterIdToCount.toString()}', + ); + + // Now find the sizes of the biggest and second biggest cluster + final int biggestClusterID = newClusterIdToCount.keys.reduce((a, b) { + return newClusterIdToCount[a]! > newClusterIdToCount[b]! ? a : b; + }); + final int biggestSize = newClusterIdToCount[biggestClusterID]!; + final biggestRatio = biggestSize / originalClusterSize; + if (newClusterIdToCount.length > 1) { + final List clusterIDs = newClusterIdToCount.keys.toList(); + clusterIDs.remove(biggestClusterID); + final int secondBiggestClusterID = clusterIDs.reduce((a, b) { + return newClusterIdToCount[a]! > newClusterIdToCount[b]! ? a : b; + }); + final int secondBiggestSize = + newClusterIdToCount[secondBiggestClusterID]!; + final secondBiggestRatio = secondBiggestSize / originalClusterSize; + + if (biggestRatio < 0.5 || secondBiggestRatio > 0.2) { + final faceIdsOfCluster = + await faceMlDb.getFaceIDsForCluster(clusterID); + final uniqueFileIDs = + faceIdsOfCluster.map(getFileIdFromFaceId).toSet(); + susClusters.add((clusterID, uniqueFileIDs.length)); + _logger.info( + '[CheckMixedClusters] Detected that cluster $clusterID with size ${uniqueFileIDs.length} might be mixed', + ); + } + } else { + _logger.info( + '[CheckMixedClusters] For cluster $clusterID we only found one cluster after reclustering', + ); + } + } + _logger.info( + '[CheckMixedClusters] Inspection took ${DateTime.now().difference(inspectionStart).inSeconds} seconds', + ); + if (susClusters.isNotEmpty) { + _logger.info( + '[CheckMixedClusters] Found ${susClusters.length} clusters that might be mixed: $susClusters', + ); + } else { + _logger.info('[CheckMixedClusters] No mixed clusters found'); + } + return susClusters; + } + + // TODO: iterate over this method to find sweet spot + Future breakUpCluster( + int clusterID, { + bool useDbscan = false, + }) async { + _logger.info( + 'breakUpCluster called for cluster $clusterID with dbscan $useDbscan', + ); + final faceMlDb = FaceMLDataDB.instance; + + final faceIDs = await faceMlDb.getFaceIDsForCluster(clusterID); + final originalFaceIDsSet = faceIDs.toSet(); + + final embeddings = await faceMlDb.getFaceEmbeddingMapForFaces(faceIDs); + + final fileIDToCreationTime = + await FilesDB.instance.getFileIDToCreationTime(); + + final clusterResult = + await FaceClusteringService.instance.predictWithinClusterComputer( + embeddings, + fileIDToCreationTime: fileIDToCreationTime, + distanceThreshold: 0.22, + ); + + if (clusterResult == null || clusterResult.newClusterIdToFaceIds == null || clusterResult.isEmpty) { + _logger.warning('No clusters found or something went wrong'); + return ClusteringResult(newFaceIdToCluster: {}); + } + + final clusterIdToCount = + clusterResult.newClusterIdToFaceIds!.map((key, value) { + return MapEntry(key, value.length); + }); + final amountOfNewClusters = clusterIdToCount.length; + + _logger.info( + 'Broke up cluster $clusterID into $amountOfNewClusters clusters \n ${clusterIdToCount.toString()}', + ); + + if (kDebugMode) { + final Set allClusteredFaceIDsSet = {}; + for (final List value + in clusterResult.newClusterIdToFaceIds!.values) { + allClusteredFaceIDsSet.addAll(value); + } + assert((originalFaceIDsSet.difference(allClusteredFaceIDsSet)).isEmpty); + } + + return clusterResult; + } + + /// WARNING: this method is purely for debugging purposes, never use in production + Future createFakeClustersByBlurValue() async { + try { + // Delete old clusters + await FaceMLDataDB.instance.dropClustersAndPersonTable(); + final List persons = + await PersonService.instance.getPersons(); + for (final PersonEntity p in persons) { + await PersonService.instance.deletePerson(p.remoteID); + } + + // Create new fake clusters based on blur value. One for values between 0 and 10, one for 10-20, etc till 200 + final int startClusterID = DateTime.now().microsecondsSinceEpoch; + final faceIDsToBlurValues = + await FaceMLDataDB.instance.getFaceIDsToBlurValues(200); + final faceIdToCluster = {}; + for (final entry in faceIDsToBlurValues.entries) { + final faceID = entry.key; + final blurValue = entry.value; + final newClusterID = startClusterID + blurValue ~/ 10; + faceIdToCluster[faceID] = newClusterID; + } + await FaceMLDataDB.instance.updateFaceIdToClusterId(faceIdToCluster); + + Bus.instance.fire(PeopleChangedEvent()); + } catch (e, s) { + _logger.severe("Error in createFakeClustersByBlurValue", e, s); + rethrow; + } + } + + Future debugLogClusterBlurValues( + int clusterID, { + int? clusterSize, + bool logClusterSummary = false, + bool logBlurValues = false, + }) async { + if (!kDebugMode) return; + + // Logging the clusterID + _logger.info( + "Debug logging for cluster $clusterID${clusterSize != null ? ' with $clusterSize photos' : ''}", + ); + const int biggestClusterID = 1715061228725148; + + // Logging the cluster summary for the cluster + if (logClusterSummary) { + final summaryMap = await FaceMLDataDB.instance.getClusterToClusterSummary( + [clusterID, biggestClusterID], + ); + final summary = summaryMap[clusterID]; + if (summary != null) { + _logger.info( + "Cluster summary for cluster $clusterID says the amount of faces is: ${summary.$2}", + ); + } + + final biggestClusterSummary = summaryMap[biggestClusterID]; + final clusterSummary = summaryMap[clusterID]; + if (biggestClusterSummary != null && clusterSummary != null) { + _logger.info( + "Cluster summary for biggest cluster $biggestClusterID says the size is: ${biggestClusterSummary.$2}", + ); + _logger.info( + "Cluster summary for current cluster $clusterID says the size is: ${clusterSummary.$2}", + ); + + // Mean distance + final biggestMean = Vector.fromList( + EVector.fromBuffer(biggestClusterSummary.$1).values, + dtype: DType.float32, + ); + final currentMean = Vector.fromList( + EVector.fromBuffer(clusterSummary.$1).values, + dtype: DType.float32, + ); + final bigClustersMeanDistance = + cosineDistanceSIMD(biggestMean, currentMean); + _logger.info( + "Mean distance between biggest cluster and current cluster: $bigClustersMeanDistance", + ); + _logger.info( + 'Element differences between the two means are ${biggestMean - currentMean}', + ); + final currentL2Norm = currentMean.norm(); + _logger.info( + 'L2 norm of current mean: $currentL2Norm', + ); + final trueDistance = + biggestMean.distanceTo(currentMean, distance: Distance.cosine); + _logger.info('True distance between the two means: $trueDistance'); + + // Median distance + const sampleSize = 100; + final Iterable biggestEmbeddings = await FaceMLDataDB + .instance + .getFaceEmbeddingsForCluster(biggestClusterID); + final List biggestSampledEmbeddingsProto = + _randomSampleWithoutReplacement( + biggestEmbeddings, + sampleSize, + ); + final List biggestSampledEmbeddings = + biggestSampledEmbeddingsProto + .map( + (embedding) => Vector.fromList( + EVector.fromBuffer(embedding).values, + dtype: DType.float32, + ), + ) + .toList(growable: false); + + final Iterable currentEmbeddings = + await FaceMLDataDB.instance.getFaceEmbeddingsForCluster(clusterID); + final List currentSampledEmbeddingsProto = + _randomSampleWithoutReplacement( + currentEmbeddings, + sampleSize, + ); + final List currentSampledEmbeddings = + currentSampledEmbeddingsProto + .map( + (embedding) => Vector.fromList( + EVector.fromBuffer(embedding).values, + dtype: DType.float32, + ), + ) + .toList(growable: false); + + // Calculate distances and find the median + final List distances = []; + final List trueDistances = []; + for (final biggestEmbedding in biggestSampledEmbeddings) { + for (final currentEmbedding in currentSampledEmbeddings) { + distances + .add(cosineDistanceSIMD(biggestEmbedding, currentEmbedding)); + trueDistances.add( + biggestEmbedding.distanceTo( + currentEmbedding, + distance: Distance.cosine, + ), + ); + } + } + distances.sort(); + trueDistances.sort(); + final double medianDistance = distances[distances.length ~/ 2]; + final double trueMedianDistance = + trueDistances[trueDistances.length ~/ 2]; + _logger.info( + "Median distance between biggest cluster and current cluster: $medianDistance (using sample of $sampleSize)", + ); + _logger.info( + 'True distance median between the two embeddings: $trueMedianDistance', + ); + } + } + + // Logging the blur values for the cluster + if (logBlurValues) { + final List blurValues = await FaceMLDataDB.instance + .getBlurValuesForCluster(clusterID) + .then((value) => value.toList()); + final blurValuesIntegers = + blurValues.map((value) => value.round()).toList(); + blurValuesIntegers.sort(); + _logger.info( + "Blur values for cluster $clusterID${clusterSize != null ? ' with $clusterSize photos' : ''}: $blurValuesIntegers", + ); + } + + return; + } + + /// Returns a list of suggestions. For each suggestion we return a record consisting of the following elements: + /// 1. clusterID: the ID of the cluster + /// 2. distance: the distance between the person's cluster and the suggestion + /// 3. usedMean: whether the suggestion was found using the mean (true) or the median (false) + Future> _getSuggestions( + PersonEntity p, { + int sampleSize = 50, + double maxMedianDistance = 0.62, + double goodMedianDistance = 0.55, + double maxMeanDistance = 0.65, + double goodMeanDistance = 0.45, + }) async { + final w = (kDebugMode ? EnteWatch('getSuggestions') : null)?..start(); + // Get all the cluster data + final faceMlDb = FaceMLDataDB.instance; + final allClusterIdsToCountMap = await faceMlDb.clusterIdToFaceCount(); + final ignoredClusters = await faceMlDb.getPersonIgnoredClusters(p.remoteID); + final personClusters = await faceMlDb.getPersonClusterIDs(p.remoteID); + final personFaceIDs = + await FaceMLDataDB.instance.getFaceIDsForPerson(p.remoteID); + final personFileIDs = personFaceIDs.map(getFileIdFromFaceId).toSet(); + w?.log( + '${p.data.name} has ${personClusters.length} existing clusters, getting all database data done', + ); + final allClusterIdToFaceIDs = + await FaceMLDataDB.instance.getAllClusterIdToFaceIDs(); + w?.log('getAllClusterIdToFaceIDs done'); + + // First only do a simple check on the big clusters, if the person does not have small clusters yet + final smallestPersonClusterSize = personClusters + .map((clusterID) => allClusterIdsToCountMap[clusterID] ?? 0) + .reduce((value, element) => min(value, element)); + final checkSizes = [100, 20, kMinimumClusterSizeSearchResult, 10, 5, 1]; + late Map clusterAvgBigClusters; + final List<(int, double)> suggestionsMean = []; + for (final minimumSize in checkSizes.toSet()) { + if (smallestPersonClusterSize >= + min(minimumSize, kMinimumClusterSizeSearchResult)) { + clusterAvgBigClusters = await _getUpdateClusterAvg( + allClusterIdsToCountMap, + ignoredClusters, + minClusterSize: minimumSize, + ); + w?.log( + 'Calculate avg for ${clusterAvgBigClusters.length} clusters of min size $minimumSize', + ); + final List<(int, double)> suggestionsMeanBigClusters = + await calcSuggestionsMeanInComputer( + clusterAvgBigClusters, + personClusters, + ignoredClusters, + (minimumSize == 100) ? goodMeanDistance + 0.15 : goodMeanDistance, + ); + w?.log( + 'Calculate suggestions using mean for ${clusterAvgBigClusters.length} clusters of min size $minimumSize', + ); + for (final suggestion in suggestionsMeanBigClusters) { + // Skip suggestions that have a high overlap with the person's files + final suggestionSet = allClusterIdToFaceIDs[suggestion.$1]! + .map((faceID) => getFileIdFromFaceId(faceID)) + .toSet(); + final overlap = personFileIDs.intersection(suggestionSet); + if (overlap.isNotEmpty && + ((overlap.length / suggestionSet.length) > 0.5)) { + await FaceMLDataDB.instance.captureNotPersonFeedback( + personID: p.remoteID, + clusterID: suggestion.$1, + ); + continue; + } + suggestionsMean.add(suggestion); + } + if (suggestionsMean.isNotEmpty) { + return suggestionsMean + .map((e) => (e.$1, e.$2, true)) + .toList(growable: false); + } + } + } + w?.reset(); + + // Find the other cluster candidates based on the median + final clusterAvg = clusterAvgBigClusters; + final List<(int, double)> moreSuggestionsMean = + await calcSuggestionsMeanInComputer( + clusterAvg, + personClusters, + ignoredClusters, + maxMeanDistance, + ); + if (moreSuggestionsMean.isEmpty) { + _logger + .info("No suggestions found using mean, even with higher threshold"); + return []; + } + + moreSuggestionsMean.sort((a, b) => a.$2.compareTo(b.$2)); + final otherClusterIdsCandidates = moreSuggestionsMean + .map( + (e) => e.$1, + ) + .toList(growable: false); + _logger.info( + "Found potential suggestions from loose mean for median test: $otherClusterIdsCandidates", + ); + + w?.logAndReset("Starting median test"); + // Take the embeddings from the person's clusters in one big list and sample from it + final List personEmbeddingsProto = []; + for (final clusterID in personClusters) { + final Iterable embeddings = + await FaceMLDataDB.instance.getFaceEmbeddingsForCluster(clusterID); + personEmbeddingsProto.addAll(embeddings); + } + final List sampledEmbeddingsProto = + _randomSampleWithoutReplacement( + personEmbeddingsProto, + sampleSize, + ); + final List sampledEmbeddings = sampledEmbeddingsProto + .map( + (embedding) => Vector.fromList( + EVector.fromBuffer(embedding).values, + dtype: DType.float32, + ), + ) + .toList(growable: false); + + // Find the actual closest clusters for the person using median + final List<(int, double)> suggestionsMedian = []; + final List<(int, double)> greatSuggestionsMedian = []; + double minMedianDistance = maxMedianDistance; + for (final otherClusterId in otherClusterIdsCandidates) { + final Iterable otherEmbeddingsProto = + await FaceMLDataDB.instance.getFaceEmbeddingsForCluster( + otherClusterId, + ); + final sampledOtherEmbeddingsProto = _randomSampleWithoutReplacement( + otherEmbeddingsProto, + sampleSize, + ); + final List sampledOtherEmbeddings = sampledOtherEmbeddingsProto + .map( + (embedding) => Vector.fromList( + EVector.fromBuffer(embedding).values, + dtype: DType.float32, + ), + ) + .toList(growable: false); + + // Calculate distances and find the median + final List distances = []; + for (final otherEmbedding in sampledOtherEmbeddings) { + for (final embedding in sampledEmbeddings) { + distances.add(cosineDistanceSIMD(embedding, otherEmbedding)); + } + } + distances.sort(); + final double medianDistance = distances[distances.length ~/ 2]; + if (medianDistance < minMedianDistance) { + suggestionsMedian.add((otherClusterId, medianDistance)); + minMedianDistance = medianDistance; + if (medianDistance < goodMedianDistance) { + greatSuggestionsMedian.add((otherClusterId, medianDistance)); + break; + } + } + } + w?.log("Finished median test"); + if (suggestionsMedian.isEmpty) { + _logger.info("No suggestions found using median"); + return []; + } else { + _logger.info("Found suggestions using median: $suggestionsMedian"); + } + + final List<(int, double, bool)> finalSuggestionsMedian = suggestionsMedian + .map(((e) => (e.$1, e.$2, false))) + .toList(growable: false) + .reversed + .toList(growable: false); + + if (greatSuggestionsMedian.isNotEmpty) { + _logger.info( + "Found great suggestion using median: $greatSuggestionsMedian", + ); + // // Return the largest size cluster by using allClusterIdsToCountMap + // final List greatSuggestionsMedianClusterIds = + // greatSuggestionsMedian.map((e) => e.$1).toList(growable: false); + // greatSuggestionsMedianClusterIds.sort( + // (a, b) => + // allClusterIdsToCountMap[b]!.compareTo(allClusterIdsToCountMap[a]!), + // ); + + // return [greatSuggestionsMedian.last.$1, ...finalSuggestionsMedian]; + } + + return finalSuggestionsMedian; + } + + Future> _getUpdateClusterAvg( + Map allClusterIdsToCountMap, + Set ignoredClusters, { + int minClusterSize = 1, + int maxClusterInCurrentRun = 500, + int maxEmbeddingToRead = 10000, + }) async { + final w = (kDebugMode ? EnteWatch('_getUpdateClusterAvg') : null)?..start(); + final startTime = DateTime.now(); + final faceMlDb = FaceMLDataDB.instance; + _logger.info( + 'start getUpdateClusterAvg for ${allClusterIdsToCountMap.length} clusters, minClusterSize $minClusterSize, maxClusterInCurrentRun $maxClusterInCurrentRun', + ); + + final Map clusterToSummary = + await faceMlDb.getAllClusterSummary(minClusterSize); + final Map updatesForClusterSummary = {}; + + w?.log( + 'getUpdateClusterAvg database call for getAllClusterSummary', + ); + + final serializationEmbeddings = await _computer.compute( + checkAndSerializeCurrentClusterMeans, + param: { + 'allClusterIdsToCountMap': allClusterIdsToCountMap, + 'minClusterSize': minClusterSize, + 'ignoredClusters': ignoredClusters, + 'clusterToSummary': clusterToSummary, + }, + ) as (Map, Set, int, int, int); + final clusterAvg = serializationEmbeddings.$1; + final allClusterIds = serializationEmbeddings.$2; + final ignoredClustersCnt = serializationEmbeddings.$3; + final alreadyUpdatedClustersCnt = serializationEmbeddings.$4; + final smallerClustersCnt = serializationEmbeddings.$5; + + // Assert that all existing clusterAvg are normalized + for (final avg in clusterAvg.values) { + assert((avg.norm() - 1.0).abs() < 1e-5); + } + + w?.log( + 'serialization of embeddings', + ); + _logger.info( + 'Ignored $ignoredClustersCnt clusters, already updated $alreadyUpdatedClustersCnt clusters, $smallerClustersCnt clusters are smaller than $minClusterSize', + ); + + if (allClusterIds.isEmpty) { + _logger.info( + 'No clusters to update, getUpdateClusterAvg done in ${DateTime.now().difference(startTime).inMilliseconds} ms', + ); + return clusterAvg; + } + + // get clusterIDs sorted by count in descending order + final sortedClusterIDs = allClusterIds.toList(); + sortedClusterIDs.sort( + (a, b) => + allClusterIdsToCountMap[b]!.compareTo(allClusterIdsToCountMap[a]!), + ); + int indexedInCurrentRun = 0; + w?.reset(); + + int currentPendingRead = 0; + final List clusterIdsToRead = []; + for (final clusterID in sortedClusterIDs) { + if (maxClusterInCurrentRun-- <= 0) { + break; + } + if (currentPendingRead == 0) { + currentPendingRead = allClusterIdsToCountMap[clusterID] ?? 0; + clusterIdsToRead.add(clusterID); + } else { + if ((currentPendingRead + allClusterIdsToCountMap[clusterID]!) < + maxEmbeddingToRead) { + clusterIdsToRead.add(clusterID); + currentPendingRead += allClusterIdsToCountMap[clusterID]!; + } else { + break; + } + } + } + + final Map> clusterEmbeddings = await FaceMLDataDB + .instance + .getFaceEmbeddingsForClusters(clusterIdsToRead); + + w?.logAndReset( + 'read $currentPendingRead embeddings for ${clusterEmbeddings.length} clusters', + ); + + for (final clusterID in clusterEmbeddings.keys) { + final Iterable embeddings = clusterEmbeddings[clusterID]!; + final Iterable vectors = embeddings.map( + (e) => Vector.fromList( + EVector.fromBuffer(e).values, + dtype: DType.float32, + ), + ); + final avg = vectors.reduce((a, b) => a + b) / vectors.length; + final avgNormalized = avg / avg.norm(); + final avgEmbeddingBuffer = EVector(values: avgNormalized).writeToBuffer(); + updatesForClusterSummary[clusterID] = + (avgEmbeddingBuffer, embeddings.length); + // store the intermediate updates + indexedInCurrentRun++; + if (updatesForClusterSummary.length > 100) { + await faceMlDb.clusterSummaryUpdate(updatesForClusterSummary); + updatesForClusterSummary.clear(); + if (kDebugMode) { + _logger.info( + 'getUpdateClusterAvg $indexedInCurrentRun clusters in current one', + ); + } + } + clusterAvg[clusterID] = avgNormalized; + } + if (updatesForClusterSummary.isNotEmpty) { + await faceMlDb.clusterSummaryUpdate(updatesForClusterSummary); + } + w?.logAndReset('done computing avg '); + _logger.info( + 'end getUpdateClusterAvg for ${clusterAvg.length} clusters, done in ${DateTime.now().difference(startTime).inMilliseconds} ms', + ); + + return clusterAvg; + } + + Future> calcSuggestionsMeanInComputer( + Map clusterAvg, + Set personClusters, + Set ignoredClusters, + double maxClusterDistance, + ) async { + return await _computer.compute( + _calcSuggestionsMean, + param: { + 'clusterAvg': clusterAvg, + 'personClusters': personClusters, + 'ignoredClusters': ignoredClusters, + 'maxClusterDistance': maxClusterDistance, + }, + ); + } + + List _randomSampleWithoutReplacement( + Iterable embeddings, + int sampleSize, + ) { + final random = Random(); + + if (sampleSize >= embeddings.length) { + return embeddings.toList(); + } + + // If sampleSize is more than half the list size, shuffle and take first sampleSize elements + if (sampleSize > embeddings.length / 2) { + final List shuffled = List.from(embeddings)..shuffle(random); + return shuffled.take(sampleSize).toList(growable: false); + } + + // Otherwise, use the set-based method for efficiency + final selectedIndices = {}; + final sampledEmbeddings = []; + while (sampledEmbeddings.length < sampleSize) { + final int index = random.nextInt(embeddings.length); + if (!selectedIndices.contains(index)) { + selectedIndices.add(index); + sampledEmbeddings.add(embeddings.elementAt(index)); + } + } + + return sampledEmbeddings; + } + + Future _sortSuggestionsOnDistanceToPerson( + PersonEntity person, + List suggestions, { + bool onlySortBigSuggestions = true, + }) async { + if (suggestions.isEmpty) { + debugPrint('No suggestions to sort'); + return; + } + if (onlySortBigSuggestions) { + final bigSuggestions = suggestions + .where( + (s) => s.filesInCluster.length > kMinimumClusterSizeSearchResult, + ) + .toList(); + if (bigSuggestions.isEmpty) { + debugPrint('No big suggestions to sort'); + return; + } + } + final startTime = DateTime.now(); + final faceMlDb = FaceMLDataDB.instance; + + // Get the cluster averages for the person's clusters and the suggestions' clusters + final personClusters = await faceMlDb.getPersonClusterIDs(person.remoteID); + final Map personClusterToSummary = + await faceMlDb.getClusterToClusterSummary(personClusters); + final clusterSummaryCallTime = DateTime.now(); + + // Calculate the avg embedding of the person + final w = (kDebugMode ? EnteWatch('sortSuggestions') : null)?..start(); + final personEmbeddingsCount = personClusters + .map((e) => personClusterToSummary[e]!.$2) + .reduce((a, b) => a + b); + Vector personAvg = Vector.filled(192, 0); + for (final personClusterID in personClusters) { + final personClusterBlob = personClusterToSummary[personClusterID]!.$1; + final personClusterAvg = Vector.fromList( + EVector.fromBuffer(personClusterBlob).values, + dtype: DType.float32, + ); + final clusterWeight = + personClusterToSummary[personClusterID]!.$2 / personEmbeddingsCount; + personAvg += personClusterAvg * clusterWeight; + } + w?.log('calculated person avg'); + + // Sort the suggestions based on the distance to the person + for (final suggestion in suggestions) { + if (onlySortBigSuggestions) { + if (suggestion.filesInCluster.length <= 8) { + continue; + } + } + final clusterID = suggestion.clusterIDToMerge; + final faceIDs = suggestion.faceIDsInCluster; + final faceIdToEmbeddingMap = await faceMlDb.getFaceEmbeddingMapForFaces( + faceIDs, + ); + final faceIdToVectorMap = faceIdToEmbeddingMap.map( + (key, value) => MapEntry( + key, + Vector.fromList( + EVector.fromBuffer(value).values, + dtype: DType.float32, + ), + ), + ); + w?.log( + 'got ${faceIdToEmbeddingMap.values.length} embeddings for ${suggestion.filesInCluster.length} files for cluster $clusterID', + ); + final fileIdToDistanceMap = {}; + for (final entry in faceIdToVectorMap.entries) { + fileIdToDistanceMap[getFileIdFromFaceId(entry.key)] = + cosineDistanceSIMD(personAvg, entry.value); + } + w?.log('calculated distances for cluster $clusterID'); + suggestion.filesInCluster.sort((b, a) { + //todo: review with @laurens, added this to avoid null safety issue + final double distanceA = fileIdToDistanceMap[a.uploadedFileID!] ?? -1; + final double distanceB = fileIdToDistanceMap[b.uploadedFileID!] ?? -1; + return distanceA.compareTo(distanceB); + }); + w?.log('sorted files for cluster $clusterID'); + + debugPrint( + "[${_logger.name}] Sorted suggestions for cluster $clusterID based on distance to person: ${suggestion.filesInCluster.map((e) => fileIdToDistanceMap[e.uploadedFileID]).toList()}", + ); + } + + final endTime = DateTime.now(); + _logger.info( + "Sorting suggestions based on distance to person took ${endTime.difference(startTime).inMilliseconds} ms for ${suggestions.length} suggestions, of which ${clusterSummaryCallTime.difference(startTime).inMilliseconds} ms was spent on the cluster summary call", + ); + } +} + +/// Returns a map of person's clusterID to map of closest clusterID to with disstance +List<(int, double)> _calcSuggestionsMean(Map args) { + // Fill in args + final Map clusterAvg = args['clusterAvg']; + final Set personClusters = args['personClusters']; + final Set ignoredClusters = args['ignoredClusters']; + final double maxClusterDistance = args['maxClusterDistance']; + + final Map> suggestions = {}; + const suggestionMax = 2000; + int suggestionCount = 0; + int comparisons = 0; + final w = (kDebugMode ? EnteWatch('getSuggestions') : null)?..start(); + + // ignore the clusters that belong to the person or is ignored + Set otherClusters = clusterAvg.keys.toSet().difference(personClusters); + otherClusters = otherClusters.difference(ignoredClusters); + + for (final otherClusterID in otherClusters) { + final Vector? otherAvg = clusterAvg[otherClusterID]; + if (otherAvg == null) { + dev.log('[WARNING] no avg for othercluster $otherClusterID'); + continue; + } + int? nearestPersonCluster; + double? minDistance; + for (final personCluster in personClusters) { + if (clusterAvg[personCluster] == null) { + dev.log('[WARNING] no avg for personcluster $personCluster'); + continue; + } + final Vector avg = clusterAvg[personCluster]!; + final distance = cosineDistanceSIMD(avg, otherAvg); + comparisons++; + if (distance < maxClusterDistance) { + if (minDistance == null || distance < minDistance) { + minDistance = distance; + nearestPersonCluster = personCluster; + } + } + } + if (nearestPersonCluster != null && minDistance != null) { + suggestions + .putIfAbsent(nearestPersonCluster, () => []) + .add((otherClusterID, minDistance)); + suggestionCount++; + } + if (suggestionCount >= suggestionMax) { + break; + } + } + w?.log( + 'calculation inside calcSuggestionsMean for ${personClusters.length} person clusters and ${otherClusters.length} other clusters (so ${personClusters.length * otherClusters.length} combinations, $comparisons comparisons made resulted in $suggestionCount suggestions)', + ); + + if (suggestions.isNotEmpty) { + final List<(int, double)> suggestClusterIds = []; + for (final List<(int, double)> suggestion in suggestions.values) { + suggestClusterIds.addAll(suggestion); + } + suggestClusterIds.sort( + (a, b) => a.$2.compareTo(b.$2), + ); // sort by distance + + dev.log( + "Already found ${suggestClusterIds.length} good suggestions using mean", + ); + return suggestClusterIds.sublist(0, min(suggestClusterIds.length, 20)); + } else { + dev.log("No suggestions found using mean"); + return <(int, double)>[]; + } +} + +Future<(Map, Set, int, int, int)> + checkAndSerializeCurrentClusterMeans( + Map args, +) async { + final Map allClusterIdsToCountMap = args['allClusterIdsToCountMap']; + final int minClusterSize = args['minClusterSize'] ?? 1; + final Set ignoredClusters = args['ignoredClusters'] ?? {}; + final Map clusterToSummary = args['clusterToSummary']; + + final Map clusterAvg = {}; + + final allClusterIds = allClusterIdsToCountMap.keys.toSet(); + int ignoredClustersCnt = 0, alreadyUpdatedClustersCnt = 0; + int smallerClustersCnt = 0; + for (final id in allClusterIdsToCountMap.keys) { + if (ignoredClusters.contains(id)) { + allClusterIds.remove(id); + ignoredClustersCnt++; + } + if (clusterToSummary[id]?.$2 == allClusterIdsToCountMap[id]) { + allClusterIds.remove(id); + clusterAvg[id] = Vector.fromList( + EVector.fromBuffer(clusterToSummary[id]!.$1).values, + dtype: DType.float32, + ); + alreadyUpdatedClustersCnt++; + } + if (allClusterIdsToCountMap[id]! < minClusterSize) { + allClusterIds.remove(id); + smallerClustersCnt++; + } + } + + return ( + clusterAvg, + allClusterIds, + ignoredClustersCnt, + alreadyUpdatedClustersCnt, + smallerClustersCnt + ); +} diff --git a/mobile/lib/services/machine_learning/face_ml/person/person_service.dart b/mobile/lib/services/machine_learning/face_ml/person/person_service.dart new file mode 100644 index 0000000000..7517d057d5 --- /dev/null +++ b/mobile/lib/services/machine_learning/face_ml/person/person_service.dart @@ -0,0 +1,294 @@ +import "dart:convert"; +import "dart:developer"; + +import "package:flutter/foundation.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/extensions/stop_watch.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/api/entity/type.dart"; +import "package:photos/services/entity_service.dart"; +import "package:shared_preferences/shared_preferences.dart"; + +class PersonService { + final EntityService entityService; + final FaceMLDataDB faceMLDataDB; + final SharedPreferences prefs; + PersonService(this.entityService, this.faceMLDataDB, this.prefs); + // instance + static PersonService? _instance; + static PersonService get instance { + if (_instance == null) { + throw Exception("PersonService not initialized"); + } + return _instance!; + } + + late Logger logger = Logger("PersonService"); + + static init( + EntityService entityService, + FaceMLDataDB faceMLDataDB, + SharedPreferences prefs, + ) { + _instance = PersonService(entityService, faceMLDataDB, prefs); + } + + Future> getPersons() async { + final entities = await entityService.getEntities(EntityType.person); + return entities + .map( + (e) => PersonEntity(e.id, PersonData.fromJson(json.decode(e.data))), + ) + .toList(); + } + + Future getPerson(String id) { + return entityService.getEntity(EntityType.person, id).then((e) { + if (e == null) { + return null; + } + return PersonEntity(e.id, PersonData.fromJson(json.decode(e.data))); + }); + } + + Future> getPersonsMap() async { + final entities = await entityService.getEntities(EntityType.person); + final Map map = {}; + for (var e in entities) { + final person = + PersonEntity(e.id, PersonData.fromJson(json.decode(e.data))); + map[person.remoteID] = person; + } + return map; + } + + Future> personIDs() async { + final entities = await entityService.getEntities(EntityType.person); + return entities.map((e) => e.id).toSet(); + } + + Future reconcileClusters() async { + final EnteWatch? w = kDebugMode ? EnteWatch("reconcileClusters") : null; + w?.start(); + await storeRemoteFeedback(); + w?.log("Stored remote feedback"); + final dbPersonClusterInfo = + await faceMLDataDB.getPersonToClusterIdToFaceIds(); + w?.log("Got DB person cluster info"); + final persons = await getPersonsMap(); + w?.log("Got persons"); + for (var personID in dbPersonClusterInfo.keys) { + final person = persons[personID]; + if (person == null) { + logger.warning("Person $personID not found"); + continue; + } + final personData = person.data; + final Map> dbPersonCluster = + dbPersonClusterInfo[personID]!; + if (_shouldUpdateRemotePerson(personData, dbPersonCluster)) { + final personData = person.data; + personData.assigned = dbPersonCluster.entries + .map( + (e) => ClusterInfo( + id: e.key, + faces: e.value, + ), + ) + .toList(); + entityService + .addOrUpdate( + EntityType.person, + json.encode(personData.toJson()), + id: personID, + ) + .ignore(); + personData.logStats(); + } + } + w?.log("Reconciled clusters for ${persons.length} persons"); + } + + bool _shouldUpdateRemotePerson( + PersonData personData, + Map> dbPersonCluster, + ) { + bool result = false; + if ((personData.assigned?.length ?? 0) != dbPersonCluster.length) { + log( + "Person ${personData.name} has ${personData.assigned?.length} clusters, but ${dbPersonCluster.length} clusters found in DB", + name: "PersonService", + ); + result = true; + } else { + for (ClusterInfo info in personData.assigned!) { + final dbCluster = dbPersonCluster[info.id]; + if (dbCluster == null) { + log( + "Cluster ${info.id} not found in DB for person ${personData.name}", + name: "PersonService", + ); + result = true; + continue; + } + if (info.faces.length != dbCluster.length) { + log( + "Cluster ${info.id} has ${info.faces.length} faces, but ${dbCluster.length} faces found in DB", + name: "PersonService", + ); + result = true; + } + for (var faceId in info.faces) { + if (!dbCluster.contains(faceId)) { + log( + "Face $faceId not found in cluster ${info.id} for person ${personData.name}", + name: "PersonService", + ); + result = true; + } + } + } + } + return result; + } + + Future addPerson( + String name, + int clusterID, { + bool isHidden = false, + }) async { + final faceIds = await faceMLDataDB.getFaceIDsForCluster(clusterID); + final data = PersonData( + name: name, + assigned: [ + ClusterInfo( + id: clusterID, + faces: faceIds.toSet(), + ), + ], + isHidden: isHidden, + ); + final result = await entityService.addOrUpdate( + EntityType.person, + json.encode(data.toJson()), + ); + await faceMLDataDB.assignClusterToPerson( + personID: result.id, + clusterID: clusterID, + ); + return PersonEntity(result.id, data); + } + + Future removeClusterToPerson({ + required String personID, + required int clusterID, + }) async { + final person = (await getPerson(personID))!; + final personData = person.data; + personData.assigned!.removeWhere((element) => element.id != clusterID); + await entityService.addOrUpdate( + EntityType.person, + json.encode(personData.toJson()), + id: personID, + ); + await faceMLDataDB.removeClusterToPerson( + personID: personID, + clusterID: clusterID, + ); + personData.logStats(); + } + + Future deletePerson(String personID, {bool onlyMapping = false}) async { + if (onlyMapping) { + final PersonEntity? entity = await getPerson(personID); + if (entity == null) { + return; + } + final PersonEntity justName = + PersonEntity(personID, PersonData(name: entity.data.name)); + await entityService.addOrUpdate( + EntityType.person, + json.encode(justName.data.toJson()), + id: personID, + ); + await faceMLDataDB.removePerson(personID); + justName.data.logStats(); + } else { + await entityService.deleteEntry(personID); + await faceMLDataDB.removePerson(personID); + } + + // fire PeopleChangeEvent + Bus.instance.fire(PeopleChangedEvent()); + } + + Future storeRemoteFeedback() async { + await entityService.syncEntities(); + final entities = await entityService.getEntities(EntityType.person); + entities.sort((a, b) => a.updatedAt.compareTo(b.updatedAt)); + final Map faceIdToClusterID = {}; + final Map clusterToPersonID = {}; + for (var e in entities) { + final personData = PersonData.fromJson(json.decode(e.data)); + int faceCount = 0; + for (var cluster in personData.assigned!) { + faceCount += cluster.faces.length; + for (var faceId in cluster.faces) { + if (faceIdToClusterID.containsKey(faceId)) { + final otherPersonID = clusterToPersonID[faceIdToClusterID[faceId]!]; + if (otherPersonID != e.id) { + final otherPerson = await getPerson(otherPersonID!); + throw Exception( + "Face $faceId is already assigned to person $otherPersonID (${otherPerson!.data.name}) and person ${e.id} (${personData.name})", + ); + } + } + faceIdToClusterID[faceId] = cluster.id; + } + clusterToPersonID[cluster.id] = e.id; + } + if (kDebugMode) { + logger.info( + "Person ${e.id} ${personData.name} has ${personData.assigned!.length} clusters with $faceCount faces", + ); + } + } + + logger.info("Storing feedback for ${faceIdToClusterID.length} faces"); + await faceMLDataDB.updateFaceIdToClusterId(faceIdToClusterID); + await faceMLDataDB.bulkAssignClusterToPersonID(clusterToPersonID); + } + + Future updateAttributes( + String id, { + String? name, + String? avatarFaceId, + bool? isHidden, + int? version, + String? birthDate, + }) async { + final person = (await getPerson(id))!; + final updatedPerson = person.copyWith( + data: person.data.copyWith( + name: name, + avatarFaceId: avatarFaceId, + isHidden: isHidden, + version: version, + birthDate: birthDate, + ), + ); + await _updatePerson(updatedPerson); + } + + Future _updatePerson(PersonEntity updatePerson) async { + await entityService.addOrUpdate( + EntityType.person, + json.encode(updatePerson.data.toJson()), + id: updatePerson.remoteID, + ); + updatePerson.data.logStats(); + } +} diff --git a/mobile/lib/services/machine_learning/file_ml/file_ml.dart b/mobile/lib/services/machine_learning/file_ml/file_ml.dart new file mode 100644 index 0000000000..9909902766 --- /dev/null +++ b/mobile/lib/services/machine_learning/file_ml/file_ml.dart @@ -0,0 +1,89 @@ +import "package:photos/face/model/face.dart"; + +class FileMl { + final int fileID; + final int? height; + final int? width; + final FaceEmbeddings faceEmbedding; + final ClipEmbedding? clipEmbedding; + + FileMl( + this.fileID, + this.faceEmbedding, { + this.height, + this.width, + this.clipEmbedding, + }); + + // toJson + Map toJson() => { + 'fileID': fileID, + 'height': height, + 'width': width, + 'faceEmbedding': faceEmbedding.toJson(), + 'clipEmbedding': clipEmbedding?.toJson(), + }; + // fromJson + factory FileMl.fromJson(Map json) { + return FileMl( + json['fileID'] as int, + FaceEmbeddings.fromJson(json['faceEmbedding'] as Map), + height: json['height'] as int?, + width: json['width'] as int?, + clipEmbedding: json['clipEmbedding'] == null + ? null + : ClipEmbedding.fromJson( + json['clipEmbedding'] as Map, + ), + ); + } +} + +class FaceEmbeddings { + final List faces; + final int version; + // pkgname/version + final String client; + + FaceEmbeddings( + this.faces, + this.version, { + required this.client, + }); + + // toJson + Map toJson() => { + 'faces': faces.map((x) => x.toJson()).toList(), + 'version': version, + 'client': client, + }; + // fromJson + factory FaceEmbeddings.fromJson(Map json) { + return FaceEmbeddings( + List.from( + json['faces'].map((x) => Face.fromJson(x as Map)), + ), + json['version'] as int, + client: json['client'] ?? + 'unknown', + ); + } +} + +class ClipEmbedding { + final int? version; + final List embedding; + ClipEmbedding(this.embedding, {this.version}); + // toJson + Map toJson() => { + 'version': version, + 'embedding': embedding, + }; + // fromJson + factory ClipEmbedding.fromJson(Map json) { + return ClipEmbedding( + List.from(json['embedding'] as List), + version: json['version'] as int?, + ); + } +} diff --git a/mobile/lib/services/machine_learning/file_ml/files_ml_data_response.dart b/mobile/lib/services/machine_learning/file_ml/files_ml_data_response.dart new file mode 100644 index 0000000000..475f52d0a3 --- /dev/null +++ b/mobile/lib/services/machine_learning/file_ml/files_ml_data_response.dart @@ -0,0 +1,19 @@ +import 'package:photos/services/machine_learning/file_ml/file_ml.dart'; + +class FilesMLDataResponse { + final Map mlData; + // fileIDs that were indexed but they don't contain any meaningful embeddings + // and hence should be discarded for re-indexing + final Set noEmbeddingFileIDs; + // fetchErrorFileIDs are the fileIDs for whom we failed failed to fetch embeddings + // from the storage + final Set fetchErrorFileIDs; + // pendingIndexFileIDs are the fileIDs that were never indexed + final Set pendingIndexFileIDs; + FilesMLDataResponse( + this.mlData, { + required this.noEmbeddingFileIDs, + required this.fetchErrorFileIDs, + required this.pendingIndexFileIDs, + }); +} diff --git a/mobile/lib/services/machine_learning/file_ml/remote_fileml_service.dart b/mobile/lib/services/machine_learning/file_ml/remote_fileml_service.dart new file mode 100644 index 0000000000..eafbc6323d --- /dev/null +++ b/mobile/lib/services/machine_learning/file_ml/remote_fileml_service.dart @@ -0,0 +1,138 @@ +import "dart:async"; +import "dart:convert"; + +import "package:logging/logging.dart"; +import "package:photos/core/network/network.dart"; +import "package:photos/db/files_db.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/services/machine_learning/file_ml/file_ml.dart'; +import "package:photos/services/machine_learning/file_ml/files_ml_data_response.dart"; +import "package:photos/services/machine_learning/semantic_search/embedding_store.dart"; +import "package:photos/services/machine_learning/semantic_search/remote_embedding.dart"; +import "package:photos/utils/crypto_util.dart"; +import "package:photos/utils/file_download_util.dart"; +import "package:shared_preferences/shared_preferences.dart"; + +class RemoteFileMLService { + RemoteFileMLService._privateConstructor(); + + static final RemoteFileMLService instance = + RemoteFileMLService._privateConstructor(); + + final _logger = Logger("RemoteFileMLService"); + final _dio = NetworkClient.instance.enteDio; + + void init(SharedPreferences prefs) {} + + Future putFileEmbedding(EnteFile file, FileMl fileML) async { + final encryptionKey = getFileKey(file); + final embeddingJSON = jsonEncode(fileML.toJson()); + final encryptedEmbedding = await CryptoUtil.encryptChaCha( + utf8.encode(embeddingJSON), + encryptionKey, + ); + final encryptedData = + CryptoUtil.bin2base64(encryptedEmbedding.encryptedData!); + final header = CryptoUtil.bin2base64(encryptedEmbedding.header!); + try { + final _ = await _dio.put( + "/embeddings", + data: { + "fileID": file.uploadedFileID!, + "model": 'file-ml-clip-face', + "encryptedEmbedding": encryptedData, + "decryptionHeader": header, + }, + ); + // final updationTime = response.data["updatedAt"]; + } catch (e, s) { + _logger.severe("Failed to put embedding", e, s); + rethrow; + } + } + + Future getFilessEmbedding( + List fileIds, + ) async { + try { + final res = await _dio.post( + "/embeddings/files", + data: { + "fileIDs": fileIds, + "model": 'file-ml-clip-face', + }, + ); + final remoteEmb = res.data['embeddings'] as List; + final pendingIndexFiles = res.data['pendingIndexFileIDs'] as List; + final noEmbeddingFiles = res.data['noEmbeddingFileIDs'] as List; + final errFileIds = res.data['errFileIDs'] as List; + + final List remoteEmbeddings = []; + for (var entry in remoteEmb) { + final embedding = RemoteEmbedding.fromMap(entry); + remoteEmbeddings.add(embedding); + } + + final fileIDToFileMl = await decryptFileMLData(remoteEmbeddings); + return FilesMLDataResponse( + fileIDToFileMl, + noEmbeddingFileIDs: + Set.from(noEmbeddingFiles.map((x) => x as int)), + fetchErrorFileIDs: Set.from(errFileIds.map((x) => x as int)), + pendingIndexFileIDs: + Set.from(pendingIndexFiles.map((x) => x as int)), + ); + } catch (e, s) { + _logger.severe("Failed to get embeddings", e, s); + rethrow; + } + } + + Future> decryptFileMLData( + List remoteEmbeddings, + ) async { + final result = {}; + if (remoteEmbeddings.isEmpty) { + return result; + } + final inputs = []; + final fileMap = await FilesDB.instance + .getFilesFromIDs(remoteEmbeddings.map((e) => e.fileID).toList()); + for (final embedding in remoteEmbeddings) { + final file = fileMap[embedding.fileID]; + if (file == null) { + continue; + } + final fileKey = getFileKey(file); + final input = EmbeddingsDecoderInput(embedding, fileKey); + inputs.add(input); + } + // todo: use compute or isolate + return decryptFileMLComputer( + { + "inputs": inputs, + }, + ); + } + + Future> decryptFileMLComputer( + Map args, + ) async { + final result = {}; + final inputs = args["inputs"] as List; + for (final input in inputs) { + final decryptArgs = {}; + decryptArgs["source"] = + CryptoUtil.base642bin(input.embedding.encryptedEmbedding); + decryptArgs["key"] = input.decryptionKey; + decryptArgs["header"] = + CryptoUtil.base642bin(input.embedding.decryptionHeader); + final embeddingData = chachaDecryptData(decryptArgs); + final decodedJson = jsonDecode(utf8.decode(embeddingData)); + final FileMl decodedEmbedding = + FileMl.fromJson(decodedJson as Map); + result[input.embedding.fileID] = decodedEmbedding; + } + return result; + } +} diff --git a/mobile/lib/services/machine_learning/machine_learning_controller.dart b/mobile/lib/services/machine_learning/machine_learning_controller.dart index 145670f2c8..852ebcd5b5 100644 --- a/mobile/lib/services/machine_learning/machine_learning_controller.dart +++ b/mobile/lib/services/machine_learning/machine_learning_controller.dart @@ -3,6 +3,8 @@ import "dart:io"; import "package:battery_info/battery_info_plugin.dart"; import "package:battery_info/model/android_battery_info.dart"; +import "package:battery_info/model/iso_battery_info.dart"; +import "package:flutter/foundation.dart" show kDebugMode; import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; import "package:photos/events/machine_learning_control_event.dart"; @@ -17,12 +19,13 @@ class MachineLearningController { static const kMaximumTemperature = 42; // 42 degree celsius static const kMinimumBatteryLevel = 20; // 20% - static const kDefaultInteractionTimeout = Duration(seconds: 15); + static const kDefaultInteractionTimeout = + kDebugMode ? Duration(seconds: 3) : Duration(seconds: 5); static const kUnhealthyStates = ["over_heat", "over_voltage", "dead"]; bool _isDeviceHealthy = true; bool _isUserInteracting = true; - bool _isRunningML = false; + bool _canRunML = false; late Timer _userInteractionTimer; void init() { @@ -31,12 +34,17 @@ class MachineLearningController { BatteryInfoPlugin() .androidBatteryInfoStream .listen((AndroidBatteryInfo? batteryInfo) { - _onBatteryStateUpdate(batteryInfo); + _onAndroidBatteryStateUpdate(batteryInfo); }); - } else { - // Always run Machine Learning on iOS - Bus.instance.fire(MachineLearningControlEvent(true)); } + if (Platform.isIOS) { + BatteryInfoPlugin() + .iosBatteryInfoStream + .listen((IosBatteryInfo? batteryInfo) { + _oniOSBatteryStateUpdate(batteryInfo); + }); + } + _fireControlEvent(); } void onUserInteraction() { @@ -52,11 +60,12 @@ class MachineLearningController { } void _fireControlEvent() { - final shouldRunML = _isDeviceHealthy && !_isUserInteracting; - if (shouldRunML != _isRunningML) { - _isRunningML = shouldRunML; + final shouldRunML = + _isDeviceHealthy && (Platform.isAndroid ? !_isUserInteracting : true); + if (shouldRunML != _canRunML) { + _canRunML = shouldRunML; _logger.info( - "Firing event with device health: $_isDeviceHealthy and user interaction: $_isUserInteracting", + "Firing event with $shouldRunML, device health: $_isDeviceHealthy and user interaction: $_isUserInteracting", ); Bus.instance.fire(MachineLearningControlEvent(shouldRunML)); } @@ -75,18 +84,28 @@ class MachineLearningController { _startInteractionTimer(); } - void _onBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) { + void _onAndroidBatteryStateUpdate(AndroidBatteryInfo? batteryInfo) { _logger.info("Battery info: ${batteryInfo!.toJson()}"); - _isDeviceHealthy = _computeIsDeviceHealthy(batteryInfo); + _isDeviceHealthy = _computeIsAndroidDeviceHealthy(batteryInfo); _fireControlEvent(); } - bool _computeIsDeviceHealthy(AndroidBatteryInfo info) { + void _oniOSBatteryStateUpdate(IosBatteryInfo? batteryInfo) { + _logger.info("Battery info: ${batteryInfo!.toJson()}"); + _isDeviceHealthy = _computeIsiOSDeviceHealthy(batteryInfo); + _fireControlEvent(); + } + + bool _computeIsAndroidDeviceHealthy(AndroidBatteryInfo info) { return _hasSufficientBattery(info.batteryLevel ?? kMinimumBatteryLevel) && _isAcceptableTemperature(info.temperature ?? kMaximumTemperature) && _isBatteryHealthy(info.health ?? ""); } + bool _computeIsiOSDeviceHealthy(IosBatteryInfo info) { + return _hasSufficientBattery(info.batteryLevel ?? kMinimumBatteryLevel); + } + bool _hasSufficientBattery(int batteryLevel) { return batteryLevel >= kMinimumBatteryLevel; } diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart index 420b8c97f7..485e1f2c91 100644 --- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart +++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart @@ -145,9 +145,12 @@ class EmbeddingStore { } _logger.info("${remoteEmbeddings.length} embeddings fetched"); + return RemoteEmbeddings( remoteEmbeddings, - remoteEmbeddings.length == limit, + // keep fetching until we get all embeddings. Avoid limit check as + // some embedding fetch might fail on server + remoteEmbeddings.isNotEmpty, ); } diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index 337ca913ff..db1713c2c3 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -1,6 +1,6 @@ import "dart:async"; import "dart:collection"; -import "dart:io"; +import "dart:math" show min; import "package:computer/computer.dart"; import "package:logging/logging.dart"; @@ -102,17 +102,13 @@ class SemanticSearchService { if (shouldSyncImmediately) { unawaited(sync()); } - if (Platform.isAndroid) { - Bus.instance.on().listen((event) { - if (event.shouldRun) { - _startIndexing(); - } else { - _pauseIndexing(); - } - }); - } else { - _startIndexing(); - } + Bus.instance.on().listen((event) { + if (event.shouldRun) { + _startIndexing(); + } else { + _pauseIndexing(); + } + }); } Future release() async { @@ -164,8 +160,10 @@ class SemanticSearchService { } Future getIndexStatus() async { + final indexableFileIDs = await FilesDB.instance + .getOwnedFileIDs(Configuration.instance.getUserID()!); return IndexStatus( - _cachedEmbeddings.length, + min(_cachedEmbeddings.length, indexableFileIDs.length), (await _getFileIDsToBeIndexed()).length, ); } @@ -190,6 +188,7 @@ class SemanticSearchService { _logger.info( "Loading ${_cachedEmbeddings.length} took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch)}ms", ); + Bus.instance.fire(EmbeddingCacheUpdatedEvent()); _logger.info("Cached embeddings: " + _cachedEmbeddings.length.toString()); } @@ -225,7 +224,9 @@ class SemanticSearchService { Future> _getFileIDsToBeIndexed() async { final uploadedFileIDs = await FilesDB.instance .getOwnedFileIDs(Configuration.instance.getUserID()!); - final embeddedFileIDs = _cachedEmbeddings.map((e) => e.fileID).toSet(); + final embeddedFileIDs = + await EmbeddingsDB.instance.getFileIDs(_currentModel); + uploadedFileIDs.removeWhere( (id) => embeddedFileIDs.contains(id), ); diff --git a/mobile/lib/services/remote_assets_service.dart b/mobile/lib/services/remote_assets_service.dart index 251ce6c156..1e2cb3b6df 100644 --- a/mobile/lib/services/remote_assets_service.dart +++ b/mobile/lib/services/remote_assets_service.dart @@ -1,5 +1,7 @@ +import "dart:async"; import "dart:io"; +import "package:flutter/foundation.dart"; import "package:logging/logging.dart"; import "package:path_provider/path_provider.dart"; import "package:photos/core/network/network.dart"; @@ -8,6 +10,10 @@ class RemoteAssetsService { static final _logger = Logger("RemoteAssetsService"); RemoteAssetsService._privateConstructor(); + final StreamController<(String, int, int)> _progressController = + StreamController<(String, int, int)>.broadcast(); + + Stream<(String, int, int)> get progressStream => _progressController.stream; static final RemoteAssetsService instance = RemoteAssetsService._privateConstructor(); @@ -57,7 +63,19 @@ class RemoteAssetsService { if (await existingFile.exists()) { await existingFile.delete(); } - await NetworkClient.instance.getDio().download(url, savePath); + + await NetworkClient.instance.getDio().download( + url, + savePath, + onReceiveProgress: (received, total) { + if (received > 0 && total > 0) { + _progressController.add((url, received, total)); + } else if (kDebugMode) { + debugPrint("$url Received: $received, Total: $total"); + } + }, + ); + _logger.info("Downloaded " + url); } } diff --git a/mobile/lib/services/search_service.dart b/mobile/lib/services/search_service.dart index e27ca7582f..1ff73dbc89 100644 --- a/mobile/lib/services/search_service.dart +++ b/mobile/lib/services/search_service.dart @@ -11,6 +11,8 @@ import 'package:photos/data/years.dart'; import 'package:photos/db/files_db.dart'; import 'package:photos/events/local_photos_updated_event.dart'; import "package:photos/extensions/string_ext.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; import "package:photos/models/api/collection/user.dart"; import 'package:photos/models/collection/collection.dart'; import 'package:photos/models/collection/collection_items.dart'; @@ -22,19 +24,25 @@ import "package:photos/models/location/location.dart"; import "package:photos/models/location_tag/location_tag.dart"; import 'package:photos/models/search/album_search_result.dart'; import 'package:photos/models/search/generic_search_result.dart'; +import "package:photos/models/search/search_constants.dart"; import "package:photos/models/search/search_types.dart"; import 'package:photos/services/collections_service.dart'; import "package:photos/services/location_service.dart"; +import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart"; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart'; import "package:photos/states/location_screen_state.dart"; import "package:photos/ui/viewer/location/add_location_sheet.dart"; import "package:photos/ui/viewer/location/location_screen.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/ui/viewer/people/people_page.dart"; import 'package:photos/utils/date_time_util.dart'; import "package:photos/utils/navigation_util.dart"; import 'package:tuple/tuple.dart'; class SearchService { Future>? _cachedFilesFuture; + Future>? _cachedHiddenFilesFuture; final _logger = Logger((SearchService).toString()); final _collectionService = CollectionsService.instance; static const _maximumResultsLimit = 20; @@ -47,6 +55,7 @@ class SearchService { Bus.instance.on().listen((event) { // only invalidate, let the load happen on demand _cachedFilesFuture = null; + _cachedHiddenFilesFuture = null; }); } @@ -66,8 +75,21 @@ class SearchService { return _cachedFilesFuture!; } + Future> getHiddenFiles() async { + if (_cachedHiddenFilesFuture != null) { + return _cachedHiddenFilesFuture!; + } + _logger.fine("Reading hidden files from db"); + final hiddenCollections = + CollectionsService.instance.getHiddenCollectionIds(); + _cachedHiddenFilesFuture = + FilesDB.instance.getAllFilesFromCollections(hiddenCollections); + return _cachedHiddenFilesFuture!; + } + void clearCache() { _cachedFilesFuture = null; + _cachedHiddenFilesFuture = null; } // getFilteredCollectionsWithThumbnail removes deleted or archived or @@ -704,6 +726,170 @@ class SearchService { return searchResults; } + Future>> getClusterFilesForPersonID( + String personID, + ) async { + _logger.info('getClusterFilesForPersonID $personID'); + final Map> fileIdToClusterID = + await FaceMLDataDB.instance.getFileIdToClusterIDSet(personID); + _logger.info('faceDbDone getClusterFilesForPersonID $personID'); + final Map> clusterIDToFiles = {}; + final allFiles = await getAllFiles(); + for (final f in allFiles) { + if (!fileIdToClusterID.containsKey(f.uploadedFileID ?? -1)) { + continue; + } + final cluserIds = fileIdToClusterID[f.uploadedFileID ?? -1]!; + for (final cluster in cluserIds) { + if (clusterIDToFiles.containsKey(cluster)) { + clusterIDToFiles[cluster]!.add(f); + } else { + clusterIDToFiles[cluster] = [f]; + } + } + } + _logger.info('done getClusterFilesForPersonID $personID'); + return clusterIDToFiles; + } + + Future> getAllFace(int? limit) async { + try { + // Don't return anything if clustering is not nearly complete yet + final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount(); + final clusteredFaces = + await FaceMLDataDB.instance.getClusteredFaceCount(); + final clusteringDoneRatio = clusteredFaces / foundFaces; + if (clusteringDoneRatio < 0.9) { + return []; + } + + debugPrint("getting faces"); + final Map> fileIdToClusterID = + await FaceMLDataDB.instance.getFileIdToClusterIds(); + final Map personIdToPerson = + await PersonService.instance.getPersonsMap(); + final clusterIDToPersonID = + await FaceMLDataDB.instance.getClusterIDToPersonID(); + + final List facesResult = []; + final Map> clusterIdToFiles = {}; + final Map> personIdToFiles = {}; + final allFiles = await getAllFiles(); + for (final f in allFiles) { + if (!fileIdToClusterID.containsKey(f.uploadedFileID ?? -1)) { + continue; + } + final cluserIds = fileIdToClusterID[f.uploadedFileID ?? -1]!; + for (final cluster in cluserIds) { + final PersonEntity? p = + personIdToPerson[clusterIDToPersonID[cluster] ?? ""]; + if (p != null) { + if (personIdToFiles.containsKey(p.remoteID)) { + personIdToFiles[p.remoteID]!.add(f); + } else { + personIdToFiles[p.remoteID] = [f]; + } + } else { + if (clusterIdToFiles.containsKey(cluster)) { + clusterIdToFiles[cluster]!.add(f); + } else { + clusterIdToFiles[cluster] = [f]; + } + } + } + } + // get sorted personId by files count + final sortedPersonIds = personIdToFiles.keys.toList() + ..sort( + (a, b) => personIdToFiles[b]!.length.compareTo( + personIdToFiles[a]!.length, + ), + ); + for (final personID in sortedPersonIds) { + final files = personIdToFiles[personID]!; + if (files.isEmpty) { + continue; + } + final PersonEntity p = personIdToPerson[personID]!; + if (p.data.isIgnored) continue; + facesResult.add( + GenericSearchResult( + ResultType.faces, + p.data.name, + files, + params: { + kPersonParamID: personID, + kFileID: files.first.uploadedFileID, + }, + onResultTap: (ctx) { + routeToPage( + ctx, + PeoplePage( + tagPrefix: "${ResultType.faces.toString()}_${p.data.name}", + person: p, + ), + ); + }, + ), + ); + } + final sortedClusterIds = clusterIdToFiles.keys.toList() + ..sort( + (a, b) => clusterIdToFiles[b]! + .length + .compareTo(clusterIdToFiles[a]!.length), + ); + + for (final clusterId in sortedClusterIds) { + final files = clusterIdToFiles[clusterId]!; + // final String clusterName = "ID:$clusterId, ${files.length}"; + // final String clusterName = "${files.length}"; + // const String clusterName = ""; + final String clusterName = "$clusterId"; + + if (clusterIDToPersonID[clusterId] != null) { + // This should not happen, means a faceID is assigned to multiple persons. + _logger.severe( + "`getAllFace`: Cluster $clusterId should not have person id ${clusterIDToPersonID[clusterId]}", + ); + } + if (files.length < kMinimumClusterSizeSearchResult && + sortedClusterIds.length > 3) { + continue; + } + facesResult.add( + GenericSearchResult( + ResultType.faces, + clusterName, + files, + params: { + kClusterParamId: clusterId, + kFileID: files.first.uploadedFileID, + }, + onResultTap: (ctx) { + routeToPage( + ctx, + ClusterPage( + files, + tagPrefix: "${ResultType.faces.toString()}_$clusterName", + clusterID: clusterId, + ), + ); + }, + ), + ); + } + if (limit != null) { + return facesResult.sublist(0, min(limit, facesResult.length)); + } else { + return facesResult; + } + } catch (e, s) { + _logger.severe("Error in getAllFace", e, s); + rethrow; + } + } + Future> getAllLocationTags(int? limit) async { try { final Map, List> tagToItemsMap = {}; diff --git a/mobile/lib/states/all_sections_examples_state.dart b/mobile/lib/states/all_sections_examples_state.dart index fdeb6fcdf4..a40ecd9255 100644 --- a/mobile/lib/states/all_sections_examples_state.dart +++ b/mobile/lib/states/all_sections_examples_state.dart @@ -6,6 +6,7 @@ import "package:logging/logging.dart"; import "package:photos/core/constants.dart"; import "package:photos/core/event_bus.dart"; import "package:photos/events/files_updated_event.dart"; +import "package:photos/events/people_changed_event.dart"; import "package:photos/events/tab_changed_event.dart"; import "package:photos/models/search/search_result.dart"; import "package:photos/models/search/search_types.dart"; @@ -31,6 +32,7 @@ class _AllSectionsExamplesProviderState Future>> allSectionsExamplesFuture = Future.value([]); late StreamSubscription _filesUpdatedEvent; + late StreamSubscription _onPeopleChangedEvent; late StreamSubscription _tabChangeEvent; bool hasPendingUpdate = false; bool isOnSearchTab = false; @@ -46,16 +48,11 @@ class _AllSectionsExamplesProviderState super.initState(); //add all common events for all search sections to reload to here. _filesUpdatedEvent = Bus.instance.on().listen((event) { - if (!isOnSearchTab) { - if (kDebugMode) { - _logger.finest('Skip reload till user clicks on search tab'); - } - hasPendingUpdate = true; - return; - } else { - hasPendingUpdate = false; - reloadAllSections(); - } + onDataUpdate(); + }); + _onPeopleChangedEvent = + Bus.instance.on().listen((event) { + onDataUpdate(); }); _tabChangeEvent = Bus.instance.on().listen((event) { if (event.source == TabChangedEventSource.pageView && @@ -72,6 +69,18 @@ class _AllSectionsExamplesProviderState reloadAllSections(); } + void onDataUpdate() { + if (!isOnSearchTab) { + if (kDebugMode) { + _logger.finest('Skip reload till user clicks on search tab'); + } + hasPendingUpdate = true; + } else { + hasPendingUpdate = false; + reloadAllSections(); + } + } + void reloadAllSections() { _logger.info('queue reload all sections'); _debouncer.run(() async { @@ -79,22 +88,28 @@ class _AllSectionsExamplesProviderState _logger.info("'_debounceTimer: reloading all sections in search tab"); final allSectionsExamples = >>[]; for (SectionType sectionType in SectionType.values) { - if (sectionType == SectionType.face || - sectionType == SectionType.content) { + if (sectionType == SectionType.content) { continue; } allSectionsExamples.add( sectionType.getData(context, limit: kSearchSectionLimit), ); } - allSectionsExamplesFuture = - Future.wait>(allSectionsExamples); + try { + allSectionsExamplesFuture = Future.wait>( + allSectionsExamples, + eagerError: false, + ); + } catch (e) { + _logger.severe("Error reloading all sections: $e"); + } }); }); } @override void dispose() { + _onPeopleChangedEvent.cancel(); _filesUpdatedEvent.cancel(); _tabChangeEvent.cancel(); _debouncer.cancelDebounce(); diff --git a/mobile/lib/ui/components/bottom_action_bar/bottom_action_bar_widget.dart b/mobile/lib/ui/components/bottom_action_bar/bottom_action_bar_widget.dart index b896e0f1f9..a0c50be21d 100644 --- a/mobile/lib/ui/components/bottom_action_bar/bottom_action_bar_widget.dart +++ b/mobile/lib/ui/components/bottom_action_bar/bottom_action_bar_widget.dart @@ -1,5 +1,6 @@ import 'package:flutter/material.dart'; import 'package:photos/core/constants.dart'; +import "package:photos/face/model/person.dart"; import 'package:photos/models/collection/collection.dart'; import "package:photos/models/gallery_type.dart"; import 'package:photos/models/selected_files.dart'; @@ -11,6 +12,8 @@ import "package:photos/ui/viewer/actions/file_selection_actions_widget.dart"; class BottomActionBarWidget extends StatelessWidget { final GalleryType galleryType; final Collection? collection; + final PersonEntity? person; + final int? clusterID; final SelectedFiles selectedFiles; final VoidCallback? onCancel; final Color? backgroundColor; @@ -19,6 +22,8 @@ class BottomActionBarWidget extends StatelessWidget { required this.galleryType, required this.selectedFiles, this.collection, + this.person, + this.clusterID, this.onCancel, this.backgroundColor, super.key, @@ -54,6 +59,8 @@ class BottomActionBarWidget extends StatelessWidget { galleryType, selectedFiles, collection: collection, + person: person, + clusterID: clusterID, ), const DividerWidget(dividerType: DividerType.bottomBar), ActionBarWidget( diff --git a/mobile/lib/ui/components/buttons/icon_button_widget.dart b/mobile/lib/ui/components/buttons/icon_button_widget.dart index 258b339d7a..3e51f87898 100644 --- a/mobile/lib/ui/components/buttons/icon_button_widget.dart +++ b/mobile/lib/ui/components/buttons/icon_button_widget.dart @@ -17,6 +17,7 @@ class IconButtonWidget extends StatefulWidget { final Color? pressedColor; final Color? iconColor; final double size; + final bool roundedIcon; const IconButtonWidget({ required this.icon, required this.iconButtonType, @@ -26,6 +27,7 @@ class IconButtonWidget extends StatefulWidget { this.pressedColor, this.iconColor, this.size = 24, + this.roundedIcon = true, super.key, }); @@ -68,22 +70,31 @@ class _IconButtonWidgetState extends State { Widget _iconButton(EnteColorScheme colorTheme) { return Padding( padding: const EdgeInsets.all(4.0), - child: AnimatedContainer( - duration: const Duration(milliseconds: 20), - padding: const EdgeInsets.all(8), - decoration: BoxDecoration( - borderRadius: BorderRadius.circular(widget.size), - color: iconStateColor, - ), - child: Icon( - widget.icon, - color: widget.iconColor ?? - (widget.iconButtonType == IconButtonType.secondary - ? colorTheme.strokeMuted - : colorTheme.strokeBase), - size: widget.size, - ), - ), + child: widget.roundedIcon + ? AnimatedContainer( + duration: const Duration(milliseconds: 20), + padding: const EdgeInsets.all(8), + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(widget.size), + color: iconStateColor, + ), + child: Icon( + widget.icon, + color: widget.iconColor ?? + (widget.iconButtonType == IconButtonType.secondary + ? colorTheme.strokeMuted + : colorTheme.strokeBase), + size: widget.size, + ), + ) + : Icon( + widget.icon, + color: widget.iconColor ?? + (widget.iconButtonType == IconButtonType.secondary + ? colorTheme.strokeMuted + : colorTheme.strokeBase), + size: widget.size, + ), ); } diff --git a/mobile/lib/ui/components/info_item_widget.dart b/mobile/lib/ui/components/info_item_widget.dart index 5bec95ccfc..73517e0520 100644 --- a/mobile/lib/ui/components/info_item_widget.dart +++ b/mobile/lib/ui/components/info_item_widget.dart @@ -11,6 +11,7 @@ class InfoItemWidget extends StatelessWidget { final Widget? endSection; final Future> subtitleSection; final bool hasChipButtons; + final bool biggerSpinner; final VoidCallback? onTap; const InfoItemWidget({ required this.leadingIcon, @@ -19,6 +20,7 @@ class InfoItemWidget extends StatelessWidget { this.endSection, required this.subtitleSection, this.hasChipButtons = false, + this.biggerSpinner = false, this.onTap, super.key, }); @@ -57,10 +59,11 @@ class InfoItemWidget extends StatelessWidget { } } else { child = EnteLoadingWidget( - padding: 3, - size: 11, + padding: biggerSpinner ? 6 : 3, + size: biggerSpinner ? 20 : 11, color: getEnteColorScheme(context).strokeMuted, - alignment: Alignment.centerLeft, + alignment: + biggerSpinner ? Alignment.center : Alignment.centerLeft, ); } return AnimatedSwitcher( diff --git a/mobile/lib/ui/components/notification_widget.dart b/mobile/lib/ui/components/notification_widget.dart index 6779a58fae..864e4c29c0 100644 --- a/mobile/lib/ui/components/notification_widget.dart +++ b/mobile/lib/ui/components/notification_widget.dart @@ -10,6 +10,7 @@ import 'package:photos/ui/components/buttons/icon_button_widget.dart'; enum NotificationType { warning, banner, + greenBanner, goldenBanner, notice, } @@ -67,6 +68,18 @@ class NotificationWidget extends StatelessWidget { ); boxShadow = Theme.of(context).colorScheme.enteTheme.shadowMenu; break; + case NotificationType.greenBanner: + backgroundGradient = LinearGradient( + colors: [ + getEnteColorScheme(context).primary700, + getEnteColorScheme(context).primary500, + ], + stops: const [0.25, 1], + begin: Alignment.bottomCenter, + end: Alignment.topCenter, + ); + boxShadow = Theme.of(context).colorScheme.enteTheme.shadowMenu; + break; case NotificationType.notice: backgroundColor = colorScheme.backgroundElevated2; mainTextStyle = textTheme.bodyBold; diff --git a/mobile/lib/ui/settings/debug_section_widget.dart b/mobile/lib/ui/settings/debug/debug_section_widget.dart similarity index 99% rename from mobile/lib/ui/settings/debug_section_widget.dart rename to mobile/lib/ui/settings/debug/debug_section_widget.dart index 039655ca31..56070c214e 100644 --- a/mobile/lib/ui/settings/debug_section_widget.dart +++ b/mobile/lib/ui/settings/debug/debug_section_widget.dart @@ -67,7 +67,6 @@ class DebugSectionWidget extends StatelessWidget { showShortToast(context, "Done"); }, ), - sectionOptionSpacing, ], ); } diff --git a/mobile/lib/ui/settings/debug/face_debug_section_widget.dart b/mobile/lib/ui/settings/debug/face_debug_section_widget.dart new file mode 100644 index 0000000000..726a9f2ceb --- /dev/null +++ b/mobile/lib/ui/settings/debug/face_debug_section_widget.dart @@ -0,0 +1,347 @@ +import "dart:async"; + +import "package:flutter/foundation.dart"; +import 'package:flutter/material.dart'; +import "package:logging/logging.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart'; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import 'package:photos/theme/ente_theme.dart'; +import 'package:photos/ui/components/captioned_text_widget.dart'; +import 'package:photos/ui/components/expandable_menu_item_widget.dart'; +import 'package:photos/ui/components/menu_item_widget/menu_item_widget.dart'; +import 'package:photos/ui/settings/common_settings.dart'; +import "package:photos/utils/dialog_util.dart"; +import "package:photos/utils/local_settings.dart"; +import 'package:photos/utils/toast_util.dart'; + +class FaceDebugSectionWidget extends StatefulWidget { + const FaceDebugSectionWidget({Key? key}) : super(key: key); + + @override + State createState() => _FaceDebugSectionWidgetState(); +} + +class _FaceDebugSectionWidgetState extends State { + Timer? _timer; + @override + void initState() { + super.initState(); + _timer = Timer.periodic(const Duration(seconds: 5), (timer) { + setState(() { + // Your state update logic here + }); + }); + } + + @override + void dispose() { + _timer?.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return ExpandableMenuItemWidget( + title: "Faces Debug", + selectionOptionsWidget: _getSectionOptions(context), + leadingIcon: Icons.bug_report_outlined, + ); + } + + Widget _getSectionOptions(BuildContext context) { + final Logger _logger = Logger("FaceDebugSectionWidget"); + return Column( + children: [ + MenuItemWidget( + captionedTextWidget: FutureBuilder( + future: FaceMLDataDB.instance.getIndexedFileCount(), + builder: (context, snapshot) { + if (snapshot.hasData) { + return CaptionedTextWidget( + title: LocalSettings.instance.isFaceIndexingEnabled + ? "Disable faces (${snapshot.data!} files done)" + : "Enable faces (${snapshot.data!} files done)", + ); + } + return const SizedBox.shrink(); + }, + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + final isEnabled = + await LocalSettings.instance.toggleFaceIndexing(); + if (!isEnabled) { + FaceMlService.instance.pauseIndexingAndClustering(); + } + if (mounted) { + setState(() {}); + } + } catch (e, s) { + _logger.warning('indexing failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: LocalSettings.instance.remoteFetchEnabled + ? "Remote fetch enabled" + : "Remote fetch disabled", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + await LocalSettings.instance.toggleRemoteFetch(); + if (mounted) { + setState(() {}); + } + } catch (e, s) { + _logger.warning('Remote fetch toggle failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: FaceMlService.instance.debugIndexingDisabled + ? "Debug enable indexing again" + : "Debug disable indexing", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + FaceMlService.instance.debugIndexingDisabled = + !FaceMlService.instance.debugIndexingDisabled; + if (FaceMlService.instance.debugIndexingDisabled) { + FaceMlService.instance.pauseIndexingAndClustering(); + } + if (mounted) { + setState(() {}); + } + } catch (e, s) { + _logger.warning('debugIndexingDisabled toggle failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Run sync, indexing, clustering", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + FaceMlService.instance.debugIndexingDisabled = false; + unawaited(FaceMlService.instance.indexAndClusterAll()); + } catch (e, s) { + _logger.warning('indexAndClusterAll failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Run indexing", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + FaceMlService.instance.debugIndexingDisabled = false; + unawaited(FaceMlService.instance.indexAllImages()); + } catch (e, s) { + _logger.warning('indexing failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: FutureBuilder( + future: FaceMLDataDB.instance.getClusteredToTotalFacesRatio(), + builder: (context, snapshot) { + if (snapshot.hasData) { + return CaptionedTextWidget( + title: + "Run clustering (${(100 * snapshot.data!).toStringAsFixed(0)}% done)", + ); + } + return const SizedBox.shrink(); + }, + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + await PersonService.instance.storeRemoteFeedback(); + FaceMlService.instance.debugIndexingDisabled = false; + await FaceMlService.instance + .clusterAllImages(clusterInBuckets: true); + Bus.instance.fire(PeopleChangedEvent()); + showShortToast(context, "Done"); + } catch (e, s) { + _logger.warning('clustering failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Check for mixed clusters", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + final susClusters = + await ClusterFeedbackService.instance.checkForMixedClusters(); + for (final clusterinfo in susClusters) { + Future.delayed(const Duration(seconds: 4), () { + showToast( + context, + 'Cluster with ${clusterinfo.$2} photos is sus', + ); + }); + } + } catch (e, s) { + _logger.warning('Checking for mixed clusters failed', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Sync person mappings ", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + try { + await PersonService.instance.reconcileClusters(); + Bus.instance.fire(PeopleChangedEvent()); + showShortToast(context, "Done"); + } catch (e, s) { + _logger.warning('sync person mappings failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Reset feedback", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + alwaysShowSuccessState: true, + onTap: () async { + await showChoiceDialog( + context, + title: "Are you sure?", + body: + "This will drop all people and their related feedback. It will keep clustering labels and embeddings untouched.", + firstButtonLabel: "Yes, confirm", + firstButtonOnTap: () async { + try { + await FaceMLDataDB.instance.dropFeedbackTables(); + Bus.instance.fire(PeopleChangedEvent()); + showShortToast(context, "Done"); + } catch (e, s) { + _logger.warning('reset feedback failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ); + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Reset feedback and clustering", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + await showChoiceDialog( + context, + title: "Are you sure?", + body: + "This will delete all people, their related feedback and clustering labels. It will keep embeddings untouched.", + firstButtonLabel: "Yes, confirm", + firstButtonOnTap: () async { + try { + final List persons = + await PersonService.instance.getPersons(); + for (final PersonEntity p in persons) { + await PersonService.instance.deletePerson(p.remoteID); + } + await FaceMLDataDB.instance.dropClustersAndPersonTable(); + Bus.instance.fire(PeopleChangedEvent()); + showShortToast(context, "Done"); + } catch (e, s) { + _logger.warning('peopleToPersonMapping remove failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ); + }, + ), + sectionOptionSpacing, + MenuItemWidget( + captionedTextWidget: const CaptionedTextWidget( + title: "Reset everything (embeddings)", + ), + pressedColor: getEnteColorScheme(context).fillFaint, + trailingIcon: Icons.chevron_right_outlined, + trailingIconIsMuted: true, + onTap: () async { + await showChoiceDialog( + context, + title: "Are you sure?", + body: + "You will need to again re-index all the faces. You can drop feedback if you want to label again", + firstButtonLabel: "Yes, confirm", + firstButtonOnTap: () async { + try { + await FaceMLDataDB.instance + .dropClustersAndPersonTable(faces: true); + Bus.instance.fire(PeopleChangedEvent()); + showShortToast(context, "Done"); + } catch (e, s) { + _logger.warning('drop feedback failed ', e, s); + await showGenericErrorDialog(context: context, error: e); + } + }, + ); + }, + ), + ], + ); + } +} diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 3306ea36f7..47e2166282 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -1,13 +1,19 @@ import "dart:async"; +import "dart:math" show max, min; import "package:flutter/material.dart"; import "package:intl/intl.dart"; +import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; import 'package:photos/events/embedding_updated_event.dart'; +import "package:photos/face/db.dart"; import "package:photos/generated/l10n.dart"; +import "package:photos/models/ml/ml_versions.dart"; import "package:photos/service_locator.dart"; +import "package:photos/services/machine_learning/face_ml/face_ml_service.dart"; import 'package:photos/services/machine_learning/semantic_search/frameworks/ml_framework.dart'; import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart'; +import "package:photos/services/remote_assets_service.dart"; import "package:photos/theme/ente_theme.dart"; import "package:photos/ui/common/loading_widget.dart"; import "package:photos/ui/components/buttons/icon_button_widget.dart"; @@ -18,8 +24,11 @@ import "package:photos/ui/components/menu_section_title.dart"; import "package:photos/ui/components/title_bar_title_widget.dart"; import "package:photos/ui/components/title_bar_widget.dart"; import "package:photos/ui/components/toggle_switch_widget.dart"; +import "package:photos/utils/data_util.dart"; import "package:photos/utils/local_settings.dart"; +final _logger = Logger("MachineLearningSettingsPage"); + class MachineLearningSettingsPage extends StatefulWidget { const MachineLearningSettingsPage({super.key}); @@ -58,6 +67,8 @@ class _MachineLearningSettingsPageState @override Widget build(BuildContext context) { + final bool facesFlag = flagService.faceSearchEnabled; + _logger.info("On page open, facesFlag: $facesFlag"); return Scaffold( body: CustomScrollView( primary: false, @@ -89,6 +100,10 @@ class _MachineLearningSettingsPageState mainAxisSize: MainAxisSize.min, children: [ _getMagicSearchSettings(context), + const SizedBox(height: 12), + facesFlag + ? _getFacesSearchSettings(context) + : const SizedBox.shrink(), ], ), ), @@ -174,9 +189,54 @@ class _MachineLearningSettingsPageState ], ); } + + Widget _getFacesSearchSettings(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + final hasEnabled = LocalSettings.instance.isFaceIndexingEnabled; + return Column( + children: [ + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: S.of(context).faceRecognition, + ), + menuItemColor: colorScheme.fillFaint, + trailingWidget: ToggleSwitchWidget( + value: () => LocalSettings.instance.isFaceIndexingEnabled, + onChanged: () async { + final isEnabled = + await LocalSettings.instance.toggleFaceIndexing(); + if (isEnabled) { + unawaited(FaceMlService.instance.ensureInitialized()); + } else { + FaceMlService.instance.pauseIndexingAndClustering(); + } + if (mounted) { + setState(() {}); + } + }, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + ), + const SizedBox( + height: 4, + ), + MenuSectionDescriptionWidget( + content: S.of(context).faceRecognitionIndexingDescription, + ), + const SizedBox( + height: 12, + ), + hasEnabled + ? const FaceRecognitionStatusWidget() + : const SizedBox.shrink(), + ], + ); + } } -class ModelLoadingState extends StatelessWidget { +class ModelLoadingState extends StatefulWidget { final InitializationState state; const ModelLoadingState( @@ -184,6 +244,38 @@ class ModelLoadingState extends StatelessWidget { Key? key, }) : super(key: key); + @override + State createState() => _ModelLoadingStateState(); +} + +class _ModelLoadingStateState extends State { + StreamSubscription<(String, int, int)>? _progressStream; + final Map _progressMap = {}; + @override + void initState() { + _progressStream = + RemoteAssetsService.instance.progressStream.listen((event) { + final String url = event.$1; + String title = ""; + if (url.contains("clip-image")) { + title = "Image Model"; + } else if (url.contains("clip-text")) { + title = "Text Model"; + } + if (title.isNotEmpty) { + _progressMap[title] = (event.$2, event.$3); + setState(() {}); + } + }); + super.initState(); + } + + @override + void dispose() { + super.dispose(); + _progressStream?.cancel(); + } + @override Widget build(BuildContext context) { return Column( @@ -201,12 +293,31 @@ class ModelLoadingState extends StatelessWidget { alignCaptionedTextToLeft: true, isGestureDetectorDisabled: true, ), + // show the progress map if in debug mode + if (flagService.internalUser) + ..._progressMap.entries.map((entry) { + return MenuItemWidget( + key: ValueKey(entry.value), + captionedTextWidget: CaptionedTextWidget( + title: entry.key, + ), + trailingWidget: Text( + entry.value.$1 == entry.value.$2 + ? "Done" + : "${formatBytes(entry.value.$1)} / ${formatBytes(entry.value.$2)}", + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + ); + }).toList(), ], ); } String _getTitle(BuildContext context) { - switch (state) { + switch (widget.state) { case InitializationState.waitingForNetwork: return S.of(context).waitingForWifi; default: @@ -228,13 +339,13 @@ class MagicSearchIndexStatsWidget extends StatefulWidget { class _MagicSearchIndexStatsWidgetState extends State { IndexStatus? _status; - late StreamSubscription _eventSubscription; + late StreamSubscription _eventSubscription; @override void initState() { super.initState(); _eventSubscription = - Bus.instance.on().listen((event) { + Bus.instance.on().listen((event) { _fetchIndexStatus(); }); _fetchIndexStatus(); @@ -303,3 +414,140 @@ class _MagicSearchIndexStatsWidgetState ); } } + +class FaceRecognitionStatusWidget extends StatefulWidget { + const FaceRecognitionStatusWidget({ + super.key, + }); + + @override + State createState() => + FaceRecognitionStatusWidgetState(); +} + +class FaceRecognitionStatusWidgetState + extends State { + Timer? _timer; + @override + void initState() { + super.initState(); + _timer = Timer.periodic(const Duration(seconds: 10), (timer) { + setState(() { + // Your state update logic here + }); + }); + } + + Future<(int, int, int, double)> getIndexStatus() async { + try { + final indexedFiles = await FaceMLDataDB.instance + .getIndexedFileCount(minimumMlVersion: faceMlVersion); + final indexableFiles = (await FaceMlService.getIndexableFileIDs()).length; + final showIndexedFiles = min(indexedFiles, indexableFiles); + final pendingFiles = max(indexableFiles - indexedFiles, 0); + final foundFaces = await FaceMLDataDB.instance.getTotalFaceCount(); + final clusteredFaces = + await FaceMLDataDB.instance.getClusteredFaceCount(); + final clusteringDoneRatio = clusteredFaces / foundFaces; + + return (showIndexedFiles, pendingFiles, foundFaces, clusteringDoneRatio); + } catch (e, s) { + _logger.severe('Error getting face recognition status', e, s); + rethrow; + } + } + + @override + void dispose() { + _timer?.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Column( + children: [ + Row( + children: [ + MenuSectionTitle(title: S.of(context).status), + Expanded(child: Container()), + ], + ), + FutureBuilder( + future: getIndexStatus(), + builder: (context, snapshot) { + if (snapshot.hasData) { + final int indexedFiles = snapshot.data!.$1; + final int pendingFiles = snapshot.data!.$2; + final int foundFaces = snapshot.data!.$3; + final double clusteringDoneRatio = snapshot.data!.$4; + final double clusteringPercentage = + (clusteringDoneRatio * 100).clamp(0, 100); + + return Column( + children: [ + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: S.of(context).indexedItems, + ), + trailingWidget: Text( + NumberFormat().format(indexedFiles), + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + key: ValueKey("indexed_items_" + indexedFiles.toString()), + ), + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: S.of(context).pendingItems, + ), + trailingWidget: Text( + NumberFormat().format(pendingFiles), + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + key: ValueKey("pending_items_" + pendingFiles.toString()), + ), + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: S.of(context).foundFaces, + ), + trailingWidget: Text( + NumberFormat().format(foundFaces), + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + key: ValueKey("found_faces_" + foundFaces.toString()), + ), + MenuItemWidget( + captionedTextWidget: CaptionedTextWidget( + title: S.of(context).clusteringProgress, + ), + trailingWidget: Text( + "${clusteringPercentage.toStringAsFixed(0)}%", + style: Theme.of(context).textTheme.bodySmall, + ), + singleBorderRadius: 8, + alignCaptionedTextToLeft: true, + isGestureDetectorDisabled: true, + key: ValueKey( + "clustering_progress_" + + clusteringPercentage.toStringAsFixed(0), + ), + ), + ], + ); + } + return const EnteLoadingWidget(); + }, + ), + ], + ); + } +} diff --git a/mobile/lib/ui/settings_page.dart b/mobile/lib/ui/settings_page.dart index d5ba1254f6..cc0064a306 100644 --- a/mobile/lib/ui/settings_page.dart +++ b/mobile/lib/ui/settings_page.dart @@ -17,7 +17,8 @@ import 'package:photos/ui/settings/about_section_widget.dart'; import 'package:photos/ui/settings/account_section_widget.dart'; import 'package:photos/ui/settings/app_version_widget.dart'; import 'package:photos/ui/settings/backup/backup_section_widget.dart'; -import 'package:photos/ui/settings/debug_section_widget.dart'; +import 'package:photos/ui/settings/debug/debug_section_widget.dart'; +import "package:photos/ui/settings/debug/face_debug_section_widget.dart"; import "package:photos/ui/settings/developer_settings_widget.dart"; import 'package:photos/ui/settings/general_section_widget.dart'; import 'package:photos/ui/settings/inherited_settings_state.dart'; @@ -53,6 +54,7 @@ class SettingsPage extends StatelessWidget { final hasLoggedIn = Configuration.instance.isLoggedIn(); final enteTextTheme = getEnteTextTheme(context); final List contents = []; + const sectionSpacing = SizedBox(height: 8); contents.add( GestureDetector( onDoubleTap: () { @@ -82,7 +84,7 @@ class SettingsPage extends StatelessWidget { ), ), ); - const sectionSpacing = SizedBox(height: 8); + contents.add(const SizedBox(height: 8)); if (hasLoggedIn) { final showStorageBonusBanner = @@ -142,6 +144,9 @@ class SettingsPage extends StatelessWidget { if (hasLoggedIn && flagService.internalUser) { contents.addAll([sectionSpacing, const DebugSectionWidget()]); + if (flagService.faceSearchEnabled) { + contents.addAll([sectionSpacing, const FaceDebugSectionWidget()]); + } } contents.add(const AppVersionWidget()); contents.add(const DeveloperSettingsWidget()); diff --git a/mobile/lib/ui/tools/app_lock.dart b/mobile/lib/ui/tools/app_lock.dart index c27555df0a..c9af24f711 100644 --- a/mobile/lib/ui/tools/app_lock.dart +++ b/mobile/lib/ui/tools/app_lock.dart @@ -113,6 +113,7 @@ class _AppLockState extends State with WidgetsBindingObserver { theme: widget.lightTheme, darkTheme: widget.darkTheme, locale: widget.locale, + debugShowCheckedModeBanner: false, supportedLocales: appSupportedLocales, localeListResolutionCallback: localResolutionCallBack, localizationsDelegates: const [ diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart index e805927a64..beeb9164d5 100644 --- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart +++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart @@ -1,11 +1,15 @@ import "dart:async"; import 'package:fast_base58/fast_base58.dart'; +import "package:flutter/cupertino.dart"; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import "package:logging/logging.dart"; import "package:modal_bottom_sheet/modal_bottom_sheet.dart"; import 'package:photos/core/configuration.dart'; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/model/person.dart"; import "package:photos/generated/l10n.dart"; import 'package:photos/models/collection/collection.dart'; import 'package:photos/models/device_collection.dart'; @@ -17,6 +21,8 @@ import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; import 'package:photos/services/collections_service.dart'; import 'package:photos/services/hidden_service.dart'; +import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; import "package:photos/theme/colors.dart"; import "package:photos/theme/ente_theme.dart"; import 'package:photos/ui/actions/collection/collection_file_actions.dart'; @@ -42,12 +48,16 @@ class FileSelectionActionsWidget extends StatefulWidget { final Collection? collection; final DeviceCollection? deviceCollection; final SelectedFiles selectedFiles; + final PersonEntity? person; + final int? clusterID; const FileSelectionActionsWidget( this.type, this.selectedFiles, { Key? key, this.collection, + this.person, + this.clusterID, this.deviceCollection, }) : super(key: key); @@ -123,7 +133,24 @@ class _FileSelectionActionsWidgetState //and set [shouldShow] to false for items that should not be shown and true //for items that should be shown. final List items = []; - + if (widget.type == GalleryType.peopleTag && widget.person != null) { + items.add( + SelectionActionButton( + icon: Icons.remove_circle_outline, + labelText: 'Not ${widget.person!.data.name}?', + onTap: anyUploadedFiles ? _onNotpersonClicked : null, + ), + ); + if (ownedFilesCount == 1) { + items.add( + SelectionActionButton( + icon: Icons.image_outlined, + labelText: 'Use as cover', + onTap: anyUploadedFiles ? _setPersonCover : null, + ), + ); + } + } if (widget.type.showCreateLink()) { if (_cachedCollectionForSharedLink != null && anyUploadedFiles) { items.add( @@ -390,36 +417,50 @@ class _FileSelectionActionsWidgetState ), ); - final scrollController = ScrollController(); - // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 - return MediaQuery( - data: MediaQuery.of(context).removePadding(removeBottom: true), - child: SafeArea( - child: Scrollbar( - radius: const Radius.circular(1), - thickness: 2, - controller: scrollController, - thumbVisibility: true, - child: SingleChildScrollView( - physics: const BouncingScrollPhysics( - decelerationRate: ScrollDecelerationRate.fast, - ), - scrollDirection: Axis.horizontal, - child: Container( - padding: const EdgeInsets.only(bottom: 24), - child: Row( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - const SizedBox(width: 4), - ...items, - const SizedBox(width: 4), - ], + // if (widget.type == GalleryType.cluster && widget.clusterID != null) { + if (widget.type == GalleryType.cluster && widget.clusterID != null) { + items.add( + SelectionActionButton( + labelText: 'Remove', + icon: CupertinoIcons.minus, + onTap: anyUploadedFiles ? _onRemoveFromClusterClicked : null, + ), + ); + } + + if (items.isNotEmpty) { + final scrollController = ScrollController(); + // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 + return MediaQuery( + data: MediaQuery.of(context).removePadding(removeBottom: true), + child: SafeArea( + child: Scrollbar( + radius: const Radius.circular(1), + thickness: 2, + controller: scrollController, + thumbVisibility: true, + child: SingleChildScrollView( + physics: const BouncingScrollPhysics( + decelerationRate: ScrollDecelerationRate.fast, + ), + scrollDirection: Axis.horizontal, + child: Container( + padding: const EdgeInsets.only(bottom: 24), + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const SizedBox(width: 4), + ...items, + const SizedBox(width: 4), + ], + ), ), ), ), ), - ), - ); + ); + } + return const SizedBox(); } Future _moveFiles() async { @@ -620,6 +661,101 @@ class _FileSelectionActionsWidgetState } } + Future _setPersonCover() async { + final EnteFile file = widget.selectedFiles.files.first; + await PersonService.instance.updateAttributes( + widget.person!.remoteID, + avatarFaceId: file.uploadedFileID.toString(), + ); + widget.selectedFiles.clearAll(); + if (mounted) { + setState(() => {}); + } + Bus.instance.fire(PeopleChangedEvent()); + } + + Future _onNotpersonClicked() async { + final actionResult = await showActionSheet( + context: context, + buttons: [ + ButtonWidget( + labelText: S.of(context).yesRemove, + buttonType: ButtonType.neutral, + buttonSize: ButtonSize.large, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.first, + isInAlert: true, + ), + ButtonWidget( + labelText: S.of(context).cancel, + buttonType: ButtonType.secondary, + buttonSize: ButtonSize.large, + buttonAction: ButtonAction.second, + shouldStickToDarkTheme: true, + isInAlert: true, + ), + ], + title: "Remove these photos for ${widget.person!.data.name}?", + actionSheetType: ActionSheetType.defaultActionSheet, + ); + if (actionResult?.action != null) { + if (actionResult!.action == ButtonAction.first) { + await ClusterFeedbackService.instance.removeFilesFromPerson( + widget.selectedFiles.files.toList(), + widget.person!, + ); + } + Bus.instance.fire(PeopleChangedEvent()); + } + widget.selectedFiles.clearAll(); + if (mounted) { + setState(() => {}); + } + } + + Future _onRemoveFromClusterClicked() async { + if (widget.clusterID == null) { + showShortToast(context, 'Cluster ID is null. Cannot remove files.'); + return; + } + final actionResult = await showActionSheet( + context: context, + buttons: [ + ButtonWidget( + labelText: S.of(context).yesRemove, + buttonType: ButtonType.neutral, + buttonSize: ButtonSize.large, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.first, + isInAlert: true, + ), + ButtonWidget( + labelText: S.of(context).cancel, + buttonType: ButtonType.secondary, + buttonSize: ButtonSize.large, + buttonAction: ButtonAction.second, + shouldStickToDarkTheme: true, + isInAlert: true, + ), + ], + title: "Remove these photos?", + actionSheetType: ActionSheetType.defaultActionSheet, + ); + if (actionResult?.action != null) { + if (actionResult!.action == ButtonAction.first) { + await ClusterFeedbackService.instance.removeFilesFromCluster( + widget.selectedFiles.files.toList(), + widget.clusterID!, + ); + } + Bus.instance.fire(PeopleChangedEvent()); + } + widget.selectedFiles.clearAll(); + if (mounted) { + setState(() => {}); + } + } + Future _copyLink() async { if (_cachedCollectionForSharedLink != null) { final String collectionKey = Base58Encode( diff --git a/mobile/lib/ui/viewer/actions/file_selection_overlay_bar.dart b/mobile/lib/ui/viewer/actions/file_selection_overlay_bar.dart index bc832c5736..8e2260c74d 100644 --- a/mobile/lib/ui/viewer/actions/file_selection_overlay_bar.dart +++ b/mobile/lib/ui/viewer/actions/file_selection_overlay_bar.dart @@ -1,4 +1,5 @@ import 'package:flutter/material.dart'; +import "package:photos/face/model/person.dart"; import 'package:photos/models/collection/collection.dart'; import 'package:photos/models/gallery_type.dart'; import 'package:photos/models/selected_files.dart'; @@ -10,12 +11,16 @@ class FileSelectionOverlayBar extends StatefulWidget { final SelectedFiles selectedFiles; final Collection? collection; final Color? backgroundColor; + final PersonEntity? person; + final int? clusterID; const FileSelectionOverlayBar( this.galleryType, this.selectedFiles, { this.collection, this.backgroundColor, + this.person, + this.clusterID, Key? key, }) : super(key: key); @@ -65,6 +70,8 @@ class _FileSelectionOverlayBarState extends State { selectedFiles: widget.selectedFiles, galleryType: widget.galleryType, collection: widget.collection, + person: widget.person, + clusterID: widget.clusterID, onCancel: () { if (widget.selectedFiles.files.isNotEmpty) { widget.selectedFiles.clearAll(); diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart index 2f2c8d0614..aa46de55a2 100644 --- a/mobile/lib/ui/viewer/file/file_app_bar.dart +++ b/mobile/lib/ui/viewer/file/file_app_bar.dart @@ -131,9 +131,13 @@ class FileAppBarState extends State { ), ); } - // only show fav option for files owned by the user if (!isFileHidden && isFileUploaded) { - _actions.add(FavoriteWidget(widget.file)); + _actions.add( + Padding( + padding: const EdgeInsets.all(8), + child: FavoriteWidget(widget.file), + ), + ); } if (!isFileUploaded) { _actions.add( diff --git a/mobile/lib/ui/viewer/file/file_details_widget.dart b/mobile/lib/ui/viewer/file/file_details_widget.dart index f8e7abb8ee..d87a806cc4 100644 --- a/mobile/lib/ui/viewer/file/file_details_widget.dart +++ b/mobile/lib/ui/viewer/file/file_details_widget.dart @@ -1,7 +1,11 @@ +import "dart:async" show StreamSubscription; + import "package:exif/exif.dart"; import "package:flutter/material.dart"; import "package:logging/logging.dart"; import "package:photos/core/configuration.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; import "package:photos/generated/l10n.dart"; import 'package:photos/models/file/file.dart'; import 'package:photos/models/file/file_type.dart'; @@ -18,9 +22,9 @@ import "package:photos/ui/viewer/file_details/albums_item_widget.dart"; import 'package:photos/ui/viewer/file_details/backed_up_time_item_widget.dart'; import "package:photos/ui/viewer/file_details/creation_time_item_widget.dart"; import 'package:photos/ui/viewer/file_details/exif_item_widgets.dart'; +import "package:photos/ui/viewer/file_details/faces_item_widget.dart"; import "package:photos/ui/viewer/file_details/file_properties_item_widget.dart"; import "package:photos/ui/viewer/file_details/location_tags_widget.dart"; -import "package:photos/ui/viewer/file_details/objects_item_widget.dart"; import "package:photos/utils/exif_util.dart"; class FileDetailsWidget extends StatefulWidget { @@ -51,6 +55,8 @@ class _FileDetailsWidgetState extends State { "longRef": null, }; + late final StreamSubscription _peopleChangedEvent; + bool _isImage = false; late int _currentUserID; bool showExifListTile = false; @@ -65,6 +71,10 @@ class _FileDetailsWidgetState extends State { _isImage = widget.file.fileType == FileType.image || widget.file.fileType == FileType.livePhoto; + _peopleChangedEvent = Bus.instance.on().listen((event) { + setState(() {}); + }); + _exifNotifier.addListener(() { if (_exifNotifier.value != null && !widget.file.hasLocation) { _updateLocationFromExif(_exifNotifier.value!).ignore(); @@ -93,6 +103,7 @@ class _FileDetailsWidgetState extends State { @override void dispose() { _exifNotifier.dispose(); + _peopleChangedEvent.cancel(); super.dispose(); } @@ -221,7 +232,8 @@ class _FileDetailsWidgetState extends State { if (!UpdateService.instance.isFdroidFlavor()) { fileDetailsTiles.addAll([ - ObjectsItemWidget(file), + // ObjectsItemWidget(file), + FacesItemWidget(file), const FileDetailsDivider(), ]); } diff --git a/mobile/lib/ui/viewer/file_details/face_widget.dart b/mobile/lib/ui/viewer/file_details/face_widget.dart new file mode 100644 index 0000000000..1ec7a2eb2d --- /dev/null +++ b/mobile/lib/ui/viewer/file_details/face_widget.dart @@ -0,0 +1,506 @@ +import "dart:developer" show log; +import "dart:typed_data"; + +import "package:flutter/cupertino.dart"; +import "package:flutter/foundation.dart" show kDebugMode; +import "package:flutter/material.dart"; +import "package:photos/extensions/stop_watch.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/face.dart"; +import "package:photos/face/model/person.dart"; +import 'package:photos/models/file/file.dart'; +import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart"; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import "package:photos/services/search_service.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/viewer/file/no_thumbnail_widget.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/ui/viewer/people/cropped_face_image_view.dart"; +import "package:photos/ui/viewer/people/people_page.dart"; +import "package:photos/utils/face/face_box_crop.dart"; +import "package:photos/utils/thumbnail_util.dart"; +// import "package:photos/utils/toast_util.dart"; + +const useGeneratedFaceCrops = true; + +class FaceWidget extends StatefulWidget { + final EnteFile file; + final Face face; + final Future?>? faceCrops; + final PersonEntity? person; + final int? clusterID; + final bool highlight; + final bool editMode; + + const FaceWidget( + this.file, + this.face, { + this.faceCrops, + this.person, + this.clusterID, + this.highlight = false, + this.editMode = false, + Key? key, + }) : super(key: key); + + @override + State createState() => _FaceWidgetState(); +} + +class _FaceWidgetState extends State { + bool isJustRemoved = false; + + @override + Widget build(BuildContext context) { + final bool givenFaces = widget.faceCrops != null; + if (useGeneratedFaceCrops) { + return _buildFaceImageGenerated(givenFaces); + } else { + return _buildFaceImageFlutterZoom(); + } + } + + Widget _buildFaceImageGenerated(bool givenFaces) { + return FutureBuilder?>( + future: givenFaces ? widget.faceCrops : getFaceCrop(), + builder: (context, snapshot) { + if (snapshot.hasData) { + final ImageProvider imageProvider = + MemoryImage(snapshot.data![widget.face.faceID]!); + + return GestureDetector( + onTap: () async { + if (widget.editMode) return; + + log( + "FaceWidget is tapped, with person ${widget.person} and clusterID ${widget.clusterID}", + name: "FaceWidget", + ); + if (widget.person == null && widget.clusterID == null) { + // Get faceID and double check that it doesn't belong to an existing clusterID. If it does, push that cluster page + final w = (kDebugMode ? EnteWatch('FaceWidget') : null) + ?..start(); + final existingClusterID = await FaceMLDataDB.instance + .getClusterIDForFaceID(widget.face.faceID); + w?.log('getting existing clusterID for faceID'); + if (existingClusterID != null) { + final fileIdsToClusterIds = + await FaceMLDataDB.instance.getFileIdToClusterIds(); + final files = await SearchService.instance.getAllFiles(); + final clusterFiles = files + .where( + (file) => + fileIdsToClusterIds[file.uploadedFileID] + ?.contains(existingClusterID) ?? + false, + ) + .toList(); + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + clusterFiles, + clusterID: existingClusterID, + ), + ), + ); + } + + // Create new clusterID for the faceID and update DB to assign the faceID to the new clusterID + final int newClusterID = DateTime.now().microsecondsSinceEpoch; + await FaceMLDataDB.instance.updateFaceIdToClusterId( + {widget.face.faceID: newClusterID}, + ); + + // Push page for the new cluster + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + [widget.file], + clusterID: newClusterID, + ), + ), + ); + } + if (widget.person != null) { + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => PeoplePage( + person: widget.person!, + ), + ), + ); + } else if (widget.clusterID != null) { + final fileIdsToClusterIds = + await FaceMLDataDB.instance.getFileIdToClusterIds(); + final files = await SearchService.instance.getAllFiles(); + final clusterFiles = files + .where( + (file) => + fileIdsToClusterIds[file.uploadedFileID] + ?.contains(widget.clusterID) ?? + false, + ) + .toList(); + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + clusterFiles, + clusterID: widget.clusterID!, + ), + ), + ); + } + }, + child: Column( + children: [ + Stack( + children: [ + Container( + height: 60, + width: 60, + decoration: ShapeDecoration( + shape: RoundedRectangleBorder( + borderRadius: const BorderRadius.all( + Radius.elliptical(16, 12), + ), + side: widget.highlight + ? BorderSide( + color: getEnteColorScheme(context).primary700, + width: 1.0, + ) + : BorderSide.none, + ), + ), + child: ClipRRect( + borderRadius: + const BorderRadius.all(Radius.elliptical(16, 12)), + child: SizedBox( + width: 60, + height: 60, + child: Image( + image: imageProvider, + fit: BoxFit.cover, + ), + ), + ), + ), + // TODO: the edges of the green line are still not properly rounded around ClipRRect + if (widget.editMode) + Positioned( + right: 0, + top: 0, + child: GestureDetector( + onTap: _cornerIconPressed, + child: isJustRemoved + ? const Icon( + CupertinoIcons.add_circled_solid, + color: Colors.green, + ) + : const Icon( + Icons.cancel, + color: Colors.red, + ), + ), + ), + ], + ), + const SizedBox(height: 8), + if (widget.person != null) + Text( + widget.person!.data.isIgnored + ? '(ignored)' + : widget.person!.data.name.trim(), + style: Theme.of(context).textTheme.bodySmall, + overflow: TextOverflow.ellipsis, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'S: ${widget.face.score.toStringAsFixed(3)}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'B: ${widget.face.blur.toStringAsFixed(0)}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'D: ${widget.face.detection.getFaceDirection().toDirectionString()}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'Sideways: ${widget.face.detection.faceIsSideways().toString()}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode && widget.face.score < 0.75) + Text( + '[Debug only]', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + ], + ), + ); + } else { + if (snapshot.connectionState == ConnectionState.waiting) { + return const ClipRRect( + borderRadius: BorderRadius.all(Radius.elliptical(16, 12)), + child: SizedBox( + width: 60, + height: 60, + child: CircularProgressIndicator(), + ), + ); + } + if (snapshot.hasError) { + log('Error getting face: ${snapshot.error}'); + } + return const ClipRRect( + borderRadius: BorderRadius.all(Radius.elliptical(16, 12)), + child: SizedBox( + width: 60, + height: 60, + child: NoThumbnailWidget(), + ), + ); + } + }, + ); + } + + void _cornerIconPressed() async { + log('face widget (file info) corner icon is pressed'); + try { + if (isJustRemoved) { + await ClusterFeedbackService.instance + .addFilesToCluster([widget.face.faceID], widget.clusterID!); + } else { + await ClusterFeedbackService.instance + .removeFilesFromCluster([widget.file], widget.clusterID!); + } + + setState(() { + isJustRemoved = !isJustRemoved; + }); + } catch (e, s) { + log("removing face/file from cluster from file info widget failed: $e, \n $s"); + } + } + + Future?> getFaceCrop() async { + try { + final Uint8List? cachedFace = faceCropCache.get(widget.face.faceID); + if (cachedFace != null) { + return {widget.face.faceID: cachedFace}; + } + final faceCropCacheFile = cachedFaceCropPath(widget.face.faceID); + if ((await faceCropCacheFile.exists())) { + final data = await faceCropCacheFile.readAsBytes(); + faceCropCache.put(widget.face.faceID, data); + return {widget.face.faceID: data}; + } + + final result = await poolFullFileFaceGenerations.withResource( + () async => await getFaceCrops( + widget.file, + { + widget.face.faceID: widget.face.detection.box, + }, + ), + ); + final Uint8List? computedCrop = result?[widget.face.faceID]; + if (computedCrop != null) { + faceCropCache.put(widget.face.faceID, computedCrop); + faceCropCacheFile.writeAsBytes(computedCrop).ignore(); + } + return {widget.face.faceID: computedCrop!}; + } catch (e, s) { + log( + "Error getting face for faceID: ${widget.face.faceID}", + error: e, + stackTrace: s, + ); + return null; + } + } + + Widget _buildFaceImageFlutterZoom() { + return Builder( + builder: (context) { + return GestureDetector( + onTap: () async { + log( + "FaceWidget is tapped, with person ${widget.person} and clusterID ${widget.clusterID}", + name: "FaceWidget", + ); + if (widget.person == null && widget.clusterID == null) { + // Get faceID and double check that it doesn't belong to an existing clusterID. If it does, push that cluster page + final w = (kDebugMode ? EnteWatch('FaceWidget') : null)?..start(); + final existingClusterID = await FaceMLDataDB.instance + .getClusterIDForFaceID(widget.face.faceID); + w?.log('getting existing clusterID for faceID'); + if (existingClusterID != null) { + final fileIdsToClusterIds = + await FaceMLDataDB.instance.getFileIdToClusterIds(); + final files = await SearchService.instance.getAllFiles(); + final clusterFiles = files + .where( + (file) => + fileIdsToClusterIds[file.uploadedFileID] + ?.contains(existingClusterID) ?? + false, + ) + .toList(); + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + clusterFiles, + clusterID: existingClusterID, + ), + ), + ); + } + + // Create new clusterID for the faceID and update DB to assign the faceID to the new clusterID + final int newClusterID = DateTime.now().microsecondsSinceEpoch; + await FaceMLDataDB.instance.updateFaceIdToClusterId( + {widget.face.faceID: newClusterID}, + ); + + // Push page for the new cluster + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + [widget.file], + clusterID: newClusterID, + ), + ), + ); + } + if (widget.person != null) { + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => PeoplePage( + person: widget.person!, + ), + ), + ); + } else if (widget.clusterID != null) { + final fileIdsToClusterIds = + await FaceMLDataDB.instance.getFileIdToClusterIds(); + final files = await SearchService.instance.getAllFiles(); + final clusterFiles = files + .where( + (file) => + fileIdsToClusterIds[file.uploadedFileID] + ?.contains(widget.clusterID) ?? + false, + ) + .toList(); + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + clusterFiles, + clusterID: widget.clusterID!, + ), + ), + ); + } + }, + child: Column( + children: [ + Stack( + children: [ + Container( + height: 60, + width: 60, + decoration: ShapeDecoration( + shape: RoundedRectangleBorder( + borderRadius: const BorderRadius.all( + Radius.elliptical(16, 12), + ), + side: widget.highlight + ? BorderSide( + color: getEnteColorScheme(context).primary700, + width: 1.0, + ) + : BorderSide.none, + ), + ), + child: ClipRRect( + borderRadius: + const BorderRadius.all(Radius.elliptical(16, 12)), + child: SizedBox( + width: 60, + height: 60, + child: CroppedFaceImageView( + enteFile: widget.file, + face: widget.face, + ), + ), + ), + ), + if (widget.editMode) + Positioned( + right: 0, + top: 0, + child: GestureDetector( + onTap: _cornerIconPressed, + child: isJustRemoved + ? const Icon( + CupertinoIcons.add_circled_solid, + color: Colors.green, + ) + : const Icon( + Icons.cancel, + color: Colors.red, + ), + ), + ), + ], + ), + const SizedBox(height: 8), + if (widget.person != null) + Text( + widget.person!.data.name.trim(), + style: Theme.of(context).textTheme.bodySmall, + overflow: TextOverflow.ellipsis, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'S: ${widget.face.score.toStringAsFixed(3)}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'B: ${widget.face.blur.toStringAsFixed(0)}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'D: ${widget.face.detection.getFaceDirection().toDirectionString()}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + if (kDebugMode) + Text( + 'Sideways: ${widget.face.detection.faceIsSideways().toString()}', + style: Theme.of(context).textTheme.bodySmall, + maxLines: 1, + ), + ], + ), + ); + }, + ); + } +} diff --git a/mobile/lib/ui/viewer/file_details/faces_item_widget.dart b/mobile/lib/ui/viewer/file_details/faces_item_widget.dart new file mode 100644 index 0000000000..ed2fb0f12e --- /dev/null +++ b/mobile/lib/ui/viewer/file_details/faces_item_widget.dart @@ -0,0 +1,229 @@ +import "dart:developer" as dev show log; + +import "package:flutter/foundation.dart" show Uint8List, kDebugMode; +import "package:flutter/material.dart"; +import "package:logging/logging.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/box.dart"; +import "package:photos/face/model/face.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import "package:photos/ui/components/buttons/chip_button_widget.dart"; +import "package:photos/ui/components/info_item_widget.dart"; +import "package:photos/ui/viewer/file_details/face_widget.dart"; +import "package:photos/utils/face/face_box_crop.dart"; +import "package:photos/utils/thumbnail_util.dart"; + +class FacesItemWidget extends StatefulWidget { + final EnteFile file; + const FacesItemWidget(this.file, {super.key}); + + @override + State createState() => _FacesItemWidgetState(); +} + +class _FacesItemWidgetState extends State { + bool editMode = false; + + @override + void initState() { + super.initState(); + setState(() {}); + } + + @override + Widget build(BuildContext context) { + return InfoItemWidget( + key: const ValueKey("Faces"), + leadingIcon: Icons.face_retouching_natural_outlined, + subtitleSection: _faceWidgets(context, widget.file, editMode), + hasChipButtons: true, + biggerSpinner: true, + // editOnTap: _toggleEditMode, // TODO: re-enable at later time when the UI is less ugly + ); + } + + void _toggleEditMode() { + setState(() { + editMode = !editMode; + }); + } + + Future> _faceWidgets( + BuildContext context, + EnteFile file, + bool editMode, + ) async { + try { + if (file.uploadedFileID == null) { + return [ + const ChipButtonWidget( + "File not uploaded yet", + noChips: true, + ), + ]; + } + + final List? faces = await FaceMLDataDB.instance + .getFacesForGivenFileID(file.uploadedFileID!); + if (faces == null) { + return [ + const ChipButtonWidget( + "Image not analyzed", + noChips: true, + ), + ]; + } + + // Remove faces with low scores + if (!kDebugMode) { + faces.removeWhere((face) => (face.score < 0.75)); + } else { + faces.removeWhere((face) => (face.score < 0.5)); + } + + if (faces.isEmpty) { + return [ + const ChipButtonWidget( + "No faces found", + noChips: true, + ), + ]; + } + + final faceIdsToClusterIds = await FaceMLDataDB.instance + .getFaceIdsToClusterIds(faces.map((face) => face.faceID)); + final Map persons = + await PersonService.instance.getPersonsMap(); + final clusterIDToPerson = + await FaceMLDataDB.instance.getClusterIDToPersonID(); + + // Sort faces by name and score + final faceIdToPersonID = {}; + for (final face in faces) { + final clusterID = faceIdsToClusterIds[face.faceID]; + if (clusterID != null) { + final personID = clusterIDToPerson[clusterID]; + if (personID != null) { + faceIdToPersonID[face.faceID] = personID; + } + } + } + faces.sort((Face a, Face b) { + final aPersonID = faceIdToPersonID[a.faceID]; + final bPersonID = faceIdToPersonID[b.faceID]; + if (aPersonID != null && bPersonID == null) { + return -1; + } else if (aPersonID == null && bPersonID != null) { + return 1; + } else { + return b.score.compareTo(a.score); + } + }); + // Make sure hidden faces are last + faces.sort((Face a, Face b) { + final aIsHidden = + persons[faceIdToPersonID[a.faceID]]?.data.isIgnored ?? false; + final bIsHidden = + persons[faceIdToPersonID[b.faceID]]?.data.isIgnored ?? false; + if (aIsHidden && !bIsHidden) { + return 1; + } else if (!aIsHidden && bIsHidden) { + return -1; + } else { + return 0; + } + }); + + final lastViewedClusterID = ClusterFeedbackService.lastViewedClusterID; + + final faceWidgets = []; + + // await generation of the face crops here, so that the file info shows one central loading spinner + final _ = await getRelevantFaceCrops(faces); + + final faceCrops = getRelevantFaceCrops(faces); + for (final Face face in faces) { + final int? clusterID = faceIdsToClusterIds[face.faceID]; + final PersonEntity? person = clusterIDToPerson[clusterID] != null + ? persons[clusterIDToPerson[clusterID]!] + : null; + final highlight = + (clusterID == lastViewedClusterID) && (person == null); + faceWidgets.add( + FaceWidget( + file, + face, + faceCrops: faceCrops, + clusterID: clusterID, + person: person, + highlight: highlight, + editMode: highlight ? editMode : false, + ), + ); + } + + return faceWidgets; + } catch (e, s) { + Logger("FacesItemWidget").info(e, s); + return []; + } + } + + Future?> getRelevantFaceCrops( + Iterable faces, + ) async { + try { + final faceIdToCrop = {}; + final facesWithoutCrops = {}; + for (final face in faces) { + final Uint8List? cachedFace = faceCropCache.get(face.faceID); + if (cachedFace != null) { + faceIdToCrop[face.faceID] = cachedFace; + } else { + final faceCropCacheFile = cachedFaceCropPath(face.faceID); + if ((await faceCropCacheFile.exists())) { + final data = await faceCropCacheFile.readAsBytes(); + faceCropCache.put(face.faceID, data); + faceIdToCrop[face.faceID] = data; + } else { + facesWithoutCrops[face.faceID] = face.detection.box; + } + } + } + + if (facesWithoutCrops.isEmpty) { + return faceIdToCrop; + } + + final result = await poolFullFileFaceGenerations.withResource( + () async => await getFaceCrops( + widget.file, + facesWithoutCrops, + ), + ); + if (result == null) { + return (faceIdToCrop.isEmpty) ? null : faceIdToCrop; + } + for (final entry in result.entries) { + final Uint8List? computedCrop = result[entry.key]; + if (computedCrop != null) { + faceCropCache.put(entry.key, computedCrop); + final faceCropCacheFile = cachedFaceCropPath(entry.key); + faceCropCacheFile.writeAsBytes(computedCrop).ignore(); + faceIdToCrop[entry.key] = computedCrop; + } + } + return (faceIdToCrop.isEmpty) ? null : faceIdToCrop; + } catch (e, s) { + dev.log( + "Error getting face crops for faceIDs: ${faces.map((face) => face.faceID).toList()}", + error: e, + stackTrace: s, + ); + return null; + } + } +} diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart index f9d6434908..3371b14421 100644 --- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart +++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart @@ -50,7 +50,6 @@ class _FavoriteWidgetState extends State { : LikeButton( size: 24, isLiked: isLiked, - padding: const EdgeInsets.all(2), onTap: (oldValue) async { if (widget.file.uploadedFileID == null || widget.file.ownerID != diff --git a/mobile/lib/ui/viewer/file_details/objects_item_widget.dart b/mobile/lib/ui/viewer/file_details/objects_item_widget.dart index 5b91b9b12d..c02576c116 100644 --- a/mobile/lib/ui/viewer/file_details/objects_item_widget.dart +++ b/mobile/lib/ui/viewer/file_details/objects_item_widget.dart @@ -27,6 +27,7 @@ class ObjectsItemWidget extends StatelessWidget { try { final chipButtons = []; var objectTags = {}; + // final thumbnail = await getThumbnail(file); // if (thumbnail != null) { // objectTags = await ObjectDetectionService.instance.predict(thumbnail); diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart index d2b7a6ec3d..c62d1f7389 100644 --- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart +++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart @@ -3,6 +3,7 @@ import 'dart:io'; import 'dart:math' as math; import "package:flutter/cupertino.dart"; +import "package:flutter/foundation.dart"; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; import 'package:photos/core/configuration.dart'; @@ -736,7 +737,7 @@ class _GalleryAppBarWidgetState extends State { // stop any existing cast session gw.revokeAllTokens().ignore(); - if (!Platform.isAndroid) { + if (!Platform.isAndroid && !kDebugMode) { await _pairWithPin(gw, ''); } else { final result = await showDialog( diff --git a/mobile/lib/ui/viewer/people/add_person_action_sheet.dart b/mobile/lib/ui/viewer/people/add_person_action_sheet.dart new file mode 100644 index 0000000000..7a0c3a4713 --- /dev/null +++ b/mobile/lib/ui/viewer/people/add_person_action_sheet.dart @@ -0,0 +1,324 @@ +import "dart:async"; +import "dart:developer"; +import "dart:math" as math; + +import 'package:flutter/material.dart'; +import "package:logging/logging.dart"; +import 'package:modal_bottom_sheet/modal_bottom_sheet.dart'; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import "package:photos/services/search_service.dart"; +import 'package:photos/theme/colors.dart'; +import 'package:photos/theme/ente_theme.dart'; +import 'package:photos/ui/common/loading_widget.dart'; +import 'package:photos/ui/components/bottom_of_title_bar_widget.dart'; +import 'package:photos/ui/components/buttons/button_widget.dart'; +import 'package:photos/ui/components/models/button_type.dart'; +import "package:photos/ui/components/text_input_widget.dart"; +import 'package:photos/ui/components/title_bar_title_widget.dart'; +import "package:photos/ui/viewer/people/new_person_item_widget.dart"; +import "package:photos/ui/viewer/people/person_row_item.dart"; +import "package:photos/utils/dialog_util.dart"; +import "package:photos/utils/toast_util.dart"; + +enum PersonActionType { + assignPerson, +} + +String _actionName( + BuildContext context, + PersonActionType type, +) { + String text = ""; + switch (type) { + case PersonActionType.assignPerson: + text = "Add name"; + break; + } + return text; +} + +Future showAssignPersonAction( + BuildContext context, { + required int clusterID, + PersonActionType actionType = PersonActionType.assignPerson, + bool showOptionToCreateNewAlbum = true, +}) { + return showBarModalBottomSheet( + context: context, + builder: (context) { + return PersonActionSheet( + actionType: actionType, + showOptionToCreateNewPerson: showOptionToCreateNewAlbum, + cluserID: clusterID, + ); + }, + shape: const RoundedRectangleBorder( + side: BorderSide(width: 0), + borderRadius: BorderRadius.vertical( + top: Radius.circular(5), + ), + ), + topControl: const SizedBox.shrink(), + backgroundColor: getEnteColorScheme(context).backgroundElevated, + barrierColor: backdropFaintDark, + enableDrag: false, + ); +} + +class PersonActionSheet extends StatefulWidget { + final PersonActionType actionType; + final int cluserID; + final bool showOptionToCreateNewPerson; + const PersonActionSheet({ + required this.actionType, + required this.cluserID, + required this.showOptionToCreateNewPerson, + super.key, + }); + + @override + State createState() => _PersonActionSheetState(); +} + +class _PersonActionSheetState extends State { + static const int cancelButtonSize = 80; + String _searchQuery = ""; + bool userAlreadyAssigned = false; + + @override + void initState() { + super.initState(); + } + + @override + Widget build(BuildContext context) { + final bottomInset = MediaQuery.of(context).viewInsets.bottom; + final isKeyboardUp = bottomInset > 100; + return Padding( + padding: EdgeInsets.only( + bottom: isKeyboardUp ? bottomInset - cancelButtonSize : 0, + ), + child: Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + ConstrainedBox( + constraints: BoxConstraints( + maxWidth: math.min(428, MediaQuery.of(context).size.width), + ), + child: Padding( + padding: const EdgeInsets.fromLTRB(0, 32, 0, 8), + child: Column( + mainAxisSize: MainAxisSize.max, + children: [ + Expanded( + child: Column( + children: [ + BottomOfTitleBarWidget( + title: TitleBarTitleWidget( + title: _actionName(context, widget.actionType), + ), + // caption: 'Select or create a ', + ), + Padding( + padding: const EdgeInsets.only( + top: 16, + left: 16, + right: 16, + ), + child: TextInputWidget( + hintText: 'Person name', + prefixIcon: Icons.search_rounded, + onChange: (value) { + setState(() { + _searchQuery = value; + }); + }, + isClearable: true, + shouldUnfocusOnClearOrSubmit: true, + borderRadius: 2, + ), + ), + _getPersonItems(), + ], + ), + ), + SafeArea( + child: Container( + //inner stroke of 1pt + 15 pts of top padding = 16 pts + padding: const EdgeInsets.fromLTRB(16, 15, 16, 8), + decoration: BoxDecoration( + border: Border( + top: BorderSide( + color: getEnteColorScheme(context).strokeFaint, + ), + ), + ), + child: ButtonWidget( + buttonType: ButtonType.secondary, + buttonAction: ButtonAction.cancel, + isInAlert: true, + labelText: S.of(context).cancel, + ), + ), + ), + ], + ), + ), + ), + ], + ), + ); + } + + Flexible _getPersonItems() { + return Flexible( + child: Padding( + padding: const EdgeInsets.fromLTRB(16, 24, 4, 0), + child: FutureBuilder>( + future: _getPersons(), + builder: (context, snapshot) { + if (snapshot.hasError) { + log("Error: ${snapshot.error} ${snapshot.stackTrace}}"); + //Need to show an error on the UI here + return const SizedBox.shrink(); + } else if (snapshot.hasData) { + final persons = snapshot.data!; + final searchResults = _searchQuery.isNotEmpty + ? persons + .where( + (element) => element.$1.data.name + .toLowerCase() + .contains(_searchQuery), + ) + .toList() + : persons; + final shouldShowAddPerson = widget.showOptionToCreateNewPerson && + (_searchQuery.isEmpty || searchResults.isEmpty); + + return Scrollbar( + thumbVisibility: true, + radius: const Radius.circular(2), + child: Padding( + padding: const EdgeInsets.only(right: 12), + child: ListView.separated( + itemCount: + searchResults.length + (shouldShowAddPerson ? 1 : 0), + itemBuilder: (context, index) { + if (index == 0 && shouldShowAddPerson) { + return GestureDetector( + behavior: HitTestBehavior.opaque, + child: const NewPersonItemWidget(), + onTap: () async => { + addNewPerson( + context, + initValue: _searchQuery.trim(), + clusterID: widget.cluserID, + ), + }, + ); + } + final person = + searchResults[index - (shouldShowAddPerson ? 1 : 0)]; + return PersonRowItem( + person: person.$1, + personFile: person.$2, + onTap: () async { + if (userAlreadyAssigned) { + return; + } + userAlreadyAssigned = true; + await FaceMLDataDB.instance.assignClusterToPerson( + personID: person.$1.remoteID, + clusterID: widget.cluserID, + ); + Bus.instance.fire(PeopleChangedEvent()); + + Navigator.pop(context, person); + }, + ); + }, + separatorBuilder: (context, index) { + return const SizedBox(height: 6); + }, + ), + ), + ); + } else { + return const EnteLoadingWidget(); + } + }, + ), + ), + ); + } + + Future addNewPerson( + BuildContext context, { + String initValue = '', + required int clusterID, + }) async { + final result = await showTextInputDialog( + context, + title: "New person", + submitButtonLabel: 'Add', + hintText: 'Add name', + alwaysShowSuccessState: false, + initialValue: initValue, + textCapitalization: TextCapitalization.words, + onSubmit: (String text) async { + if (userAlreadyAssigned) { + return; + } + // indicates user cancelled the rename request + if (text.trim() == "") { + return; + } + try { + userAlreadyAssigned = true; + final PersonEntity p = + await PersonService.instance.addPerson(text, clusterID); + final bool extraPhotosFound = await ClusterFeedbackService.instance + .checkAndDoAutomaticMerges(p, personClusterID: clusterID); + if (extraPhotosFound) { + showShortToast(context, "Extra photos found for $text"); + } + Bus.instance.fire(PeopleChangedEvent()); + Navigator.pop(context, p); + } catch (e, s) { + Logger("_PersonActionSheetState") + .severe("Failed to add person", e, s); + rethrow; + } + }, + ); + if (result is Exception) { + await showGenericErrorDialog(context: context, error: result); + } + } + + Future> _getPersons({ + bool excludeHidden = true, + }) async { + final persons = await PersonService.instance.getPersons(); + if (excludeHidden) { + persons.removeWhere((person) => person.data.isIgnored); + } + final List<(PersonEntity, EnteFile)> personAndFileID = []; + for (final person in persons) { + final clustersToFiles = + await SearchService.instance.getClusterFilesForPersonID( + person.remoteID, + ); + final files = clustersToFiles.values.expand((e) => e).toList(); + personAndFileID.add((person, files.first)); + } + return personAndFileID; + } +} diff --git a/mobile/lib/ui/viewer/people/cluster_app_bar.dart b/mobile/lib/ui/viewer/people/cluster_app_bar.dart new file mode 100644 index 0000000000..0896d06896 --- /dev/null +++ b/mobile/lib/ui/viewer/people/cluster_app_bar.dart @@ -0,0 +1,341 @@ +import 'dart:async'; + +import "package:flutter/foundation.dart"; +import 'package:flutter/material.dart'; +import 'package:logging/logging.dart'; +import "package:ml_linalg/linalg.dart"; +import 'package:photos/core/configuration.dart'; +import 'package:photos/core/event_bus.dart'; +import "package:photos/db/files_db.dart"; +import "package:photos/events/people_changed_event.dart"; +import 'package:photos/events/subscription_purchased_event.dart'; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/generated/protos/ente/common/vector.pb.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/models/gallery_type.dart'; +import 'package:photos/models/selected_files.dart'; +import 'package:photos/services/collections_service.dart'; +import "package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart"; +import "package:photos/services/machine_learning/face_ml/face_ml_result.dart"; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import 'package:photos/ui/actions/collection/collection_sharing_actions.dart'; +import "package:photos/ui/common/popup_item.dart"; +import "package:photos/ui/viewer/people/cluster_breakup_page.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/utils/dialog_util.dart"; + +class ClusterAppBar extends StatefulWidget { + final GalleryType type; + final String? title; + final SelectedFiles selectedFiles; + final int clusterID; + final PersonEntity? person; + + const ClusterAppBar( + this.type, + this.title, + this.selectedFiles, + this.clusterID, { + this.person, + Key? key, + }) : super(key: key); + + @override + State createState() => _AppBarWidgetState(); +} + +enum ClusterPopupAction { + setCover, + breakupCluster, + breakupClusterDebug, + ignore, +} + +class _AppBarWidgetState extends State { + final _logger = Logger("_AppBarWidgetState"); + late StreamSubscription _userAuthEventSubscription; + late Function() _selectedFilesListener; + String? _appBarTitle; + late CollectionActions collectionActions; + final GlobalKey shareButtonKey = GlobalKey(); + bool isQuickLink = false; + late GalleryType galleryType; + + @override + void initState() { + super.initState(); + _selectedFilesListener = () { + setState(() {}); + }; + collectionActions = CollectionActions(CollectionsService.instance); + widget.selectedFiles.addListener(_selectedFilesListener); + _userAuthEventSubscription = + Bus.instance.on().listen((event) { + setState(() {}); + }); + _appBarTitle = widget.title; + galleryType = widget.type; + } + + @override + void dispose() { + _userAuthEventSubscription.cancel(); + widget.selectedFiles.removeListener(_selectedFilesListener); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return AppBar( + elevation: 0, + centerTitle: false, + title: Text( + _appBarTitle!, + style: + Theme.of(context).textTheme.headlineSmall!.copyWith(fontSize: 16), + maxLines: 2, + overflow: TextOverflow.ellipsis, + ), + actions: kDebugMode ? _getDefaultActions(context) : null, + ); + } + + List _getDefaultActions(BuildContext context) { + final List actions = []; + // If the user has selected files, don't show any actions + if (widget.selectedFiles.files.isNotEmpty || + !Configuration.instance.hasConfiguredAccount()) { + return actions; + } + + final List> items = []; + + items.addAll( + [ + EntePopupMenuItem( + "Ignore person", + value: ClusterPopupAction.ignore, + icon: Icons.hide_image_outlined, + ), + EntePopupMenuItem( + "Mixed grouping?", + value: ClusterPopupAction.breakupCluster, + icon: Icons.analytics_outlined, + ), + ], + ); + if (kDebugMode) { + items.add( + EntePopupMenuItem( + "Debug mixed grouping", + value: ClusterPopupAction.breakupClusterDebug, + icon: Icons.analytics_outlined, + ), + ); + } + + if (items.isNotEmpty) { + actions.add( + PopupMenuButton( + itemBuilder: (context) { + return items; + }, + onSelected: (ClusterPopupAction value) async { + if (value == ClusterPopupAction.breakupCluster) { + // ignore: unawaited_futures + await _breakUpCluster(context); + } else if (value == ClusterPopupAction.ignore) { + await _onIgnoredClusterClicked(context); + } else if (value == ClusterPopupAction.breakupClusterDebug) { + await _breakUpClusterDebug(context); + } + // else if (value == ClusterPopupAction.setCover) { + // await setCoverPhoto(context); + }, + ), + ); + } + + return actions; + } + + @Deprecated( + 'Used for debugging an issue with conflicts on cluster IDs, resolved now', + ) + Future _validateCluster(BuildContext context) async { + _logger.info('_validateCluster called'); + final faceMlDb = FaceMLDataDB.instance; + + final faceIDs = await faceMlDb.getFaceIDsForCluster(widget.clusterID); + final fileIDs = faceIDs.map((e) => getFileIdFromFaceId(e)).toList(); + + final embeddingsBlobs = await faceMlDb.getFaceEmbeddingMapForFile(fileIDs); + embeddingsBlobs.removeWhere((key, value) => !faceIDs.contains(key)); + final embeddings = embeddingsBlobs + .map((key, value) => MapEntry(key, EVector.fromBuffer(value).values)); + + for (final MapEntry> embedding in embeddings.entries) { + double closestDistance = double.infinity; + double closestDistance32 = double.infinity; + double closestDistance64 = double.infinity; + String? closestFaceID; + for (final MapEntry> otherEmbedding + in embeddings.entries) { + if (embedding.key == otherEmbedding.key) { + continue; + } + final distance64 = cosineDistanceSIMD( + Vector.fromList(embedding.value, dtype: DType.float64), + Vector.fromList(otherEmbedding.value, dtype: DType.float64), + ); + final distance32 = cosineDistanceSIMD( + Vector.fromList(embedding.value, dtype: DType.float32), + Vector.fromList(otherEmbedding.value, dtype: DType.float32), + ); + final distance = cosineDistForNormVectors( + embedding.value, + otherEmbedding.value, + ); + if (distance < closestDistance) { + closestDistance = distance; + closestDistance32 = distance32; + closestDistance64 = distance64; + closestFaceID = otherEmbedding.key; + } + } + if (closestDistance > 0.3) { + _logger.severe( + "Face ${embedding.key} is similar to $closestFaceID with distance $closestDistance, and float32 distance $closestDistance32, and float64 distance $closestDistance64", + ); + } + } + } + + Future _onIgnoredClusterClicked(BuildContext context) async { + await showChoiceDialog( + context, + title: "Are you sure you want to ignore this person?", + body: + "The person grouping will not be displayed in the discovery tap anymore. Photos will remain untouched.", + firstButtonLabel: "Yes, confirm", + firstButtonOnTap: () async { + try { + await ClusterFeedbackService.instance.ignoreCluster(widget.clusterID); + Navigator.of(context).pop(); // Close the cluster page + } catch (e, s) { + _logger.severe('Ignoring a cluster failed', e, s); + // await showGenericErrorDialog(context: context, error: e); + } + }, + ); + } + + Future _breakUpCluster(BuildContext context) async { + bool userConfirmed = false; + List biggestClusterFiles = []; + int biggestClusterID = -1; + await showChoiceDialog( + context, + title: "Does this grouping contain multiple people?", + body: + "We will automatically analyze the grouping to determine if there are multiple people present, and separate them out again. This may take a few seconds.", + firstButtonLabel: "Yes, confirm", + firstButtonOnTap: () async { + try { + final breakupResult = await ClusterFeedbackService.instance + .breakUpCluster(widget.clusterID); + final Map> newClusterIDToFaceIDs = + breakupResult.newClusterIdToFaceIds!; + final Map newFaceIdToClusterID = + breakupResult.newFaceIdToCluster; + + // Update to delete the old clusters and save the new clusters + await FaceMLDataDB.instance.deleteClusterSummary(widget.clusterID); + await FaceMLDataDB.instance + .clusterSummaryUpdate(breakupResult.newClusterSummaries!); + await FaceMLDataDB.instance + .updateFaceIdToClusterId(newFaceIdToClusterID); + + // Find the biggest cluster + biggestClusterID = -1; + int biggestClusterSize = 0; + for (final MapEntry> clusterToFaces + in newClusterIDToFaceIDs.entries) { + if (clusterToFaces.value.length > biggestClusterSize) { + biggestClusterSize = clusterToFaces.value.length; + biggestClusterID = clusterToFaces.key; + } + } + // Get the files for the biggest new cluster + final biggestClusterFileIDs = newClusterIDToFaceIDs[biggestClusterID]! + .map((e) => getFileIdFromFaceId(e)) + .toList(); + biggestClusterFiles = await FilesDB.instance + .getFilesFromIDs( + biggestClusterFileIDs, + ) + .then((mapping) => mapping.values.toList()); + // Sort the files to prevent issues with the order of the files in gallery + biggestClusterFiles + .sort((a, b) => b.creationTime!.compareTo(a.creationTime!)); + + userConfirmed = true; + } catch (e, s) { + _logger.severe('Breakup cluster failed', e, s); + // await showGenericErrorDialog(context: context, error: e); + } + }, + ); + if (userConfirmed) { + // Close the old cluster page + Navigator.of(context).pop(); + + // Push the new cluster page + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + biggestClusterFiles, + clusterID: biggestClusterID, + ), + ), + ); + Bus.instance.fire(PeopleChangedEvent()); + } + } + + Future _breakUpClusterDebug(BuildContext context) async { + final breakupResult = + await ClusterFeedbackService.instance.breakUpCluster(widget.clusterID); + + final Map> newClusterIDToFaceIDs = + breakupResult.newClusterIdToFaceIds!; + + final allFileIDs = newClusterIDToFaceIDs.values + .expand((e) => e) + .map((e) => getFileIdFromFaceId(e)) + .toList(); + + final fileIDtoFile = await FilesDB.instance.getFilesFromIDs( + allFileIDs, + ); + + final newClusterIDToFiles = newClusterIDToFaceIDs.map( + (key, value) => MapEntry( + key, + value + .map((faceId) => fileIDtoFile[getFileIdFromFaceId(faceId)]!) + .toList(), + ), + ); + + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterBreakupPage( + newClusterIDToFiles, + "(Analysis)", + ), + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/cluster_breakup_page.dart b/mobile/lib/ui/viewer/people/cluster_breakup_page.dart new file mode 100644 index 0000000000..e91909f471 --- /dev/null +++ b/mobile/lib/ui/viewer/people/cluster_breakup_page.dart @@ -0,0 +1,124 @@ +import "package:flutter/material.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/viewer/file/no_thumbnail_widget.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; + +class ClusterBreakupPage extends StatefulWidget { + final Map> newClusterIDsToFiles; + final String title; + + const ClusterBreakupPage( + this.newClusterIDsToFiles, + this.title, { + super.key, + }); + + @override + State createState() => _ClusterBreakupPageState(); +} + +class _ClusterBreakupPageState extends State { + @override + Widget build(BuildContext context) { + final keys = widget.newClusterIDsToFiles.keys.toList(); + final clusterIDsToFiles = widget.newClusterIDsToFiles; + + return Scaffold( + appBar: AppBar( + title: Text(widget.title), + ), + body: ListView.builder( + itemCount: widget.newClusterIDsToFiles.keys.length, + itemBuilder: (context, index) { + final int clusterID = keys[index]; + final List files = clusterIDsToFiles[keys[index]]!; + return InkWell( + onTap: () { + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + files, + clusterID: index, + appendTitle: "(Analysis)", + ), + ), + ); + }, + child: Container( + padding: const EdgeInsets.all(8.0), + child: Row( + children: [ + SizedBox( + width: 64, + height: 64, + child: files.isNotEmpty + ? ClipRRect( + borderRadius: const BorderRadius.all( + Radius.elliptical(16, 12),), + child: PersonFaceWidget( + files.first, + clusterID: clusterID, + ), + ) + : const ClipRRect( + borderRadius: + BorderRadius.all(Radius.elliptical(16, 12)), + child: NoThumbnailWidget( + addBorder: false, + ), + ), + ), + const SizedBox( + width: 8.0, + ), // Add some spacing between the thumbnail and the text + Expanded( + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 8.0), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + Text( + "${clusterIDsToFiles[keys[index]]!.length} photos", + style: getEnteTextTheme(context).body, + ), + // GestureDetector( + // onTap: () async { + // try { + // final int result = await FaceMLDataDB + // .instance + // .removeClusterToPerson( + // personID: widget.person.remoteID, + // clusterID: clusterID, + // ); + // _logger.info( + // "Removed cluster $clusterID from person ${widget.person.remoteID}, result: $result", + // ); + // Bus.instance.fire(PeopleChangedEvent()); + // setState(() {}); + // } catch (e) { + // _logger.severe( + // "removing cluster from person,", + // e, + // ); + // } + // }, + // child: const Icon( + // CupertinoIcons.minus_circled, + // color: Colors.red, + // ), + // ), + ], + ), + ), + ), + ], + ), + ), + ); + }, + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/cluster_page.dart b/mobile/lib/ui/viewer/people/cluster_page.dart new file mode 100644 index 0000000000..f6b720f023 --- /dev/null +++ b/mobile/lib/ui/viewer/people/cluster_page.dart @@ -0,0 +1,208 @@ +import "dart:async"; + +import "package:flutter/foundation.dart"; +import 'package:flutter/material.dart'; +import 'package:photos/core/event_bus.dart'; +import 'package:photos/events/files_updated_event.dart'; +import 'package:photos/events/local_photos_updated_event.dart'; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/generated/l10n.dart"; +import 'package:photos/models/file/file.dart'; +import 'package:photos/models/file_load_result.dart'; +import 'package:photos/models/gallery_type.dart'; +import 'package:photos/models/selected_files.dart'; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import 'package:photos/ui/viewer/actions/file_selection_overlay_bar.dart'; +import 'package:photos/ui/viewer/gallery/gallery.dart'; +import "package:photos/ui/viewer/people/add_person_action_sheet.dart"; +import "package:photos/ui/viewer/people/cluster_app_bar.dart"; +import "package:photos/ui/viewer/people/people_banner.dart"; +import "package:photos/ui/viewer/people/people_page.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; +import "package:photos/ui/viewer/search/result/search_result_page.dart"; +import "package:photos/utils/navigation_util.dart"; +import "package:photos/utils/toast_util.dart"; + +class ClusterPage extends StatefulWidget { + final List searchResult; + final bool enableGrouping; + final String tagPrefix; + final int clusterID; + final PersonEntity? personID; + final String appendTitle; + final bool showNamingBanner; + + static const GalleryType appBarType = GalleryType.cluster; + static const GalleryType overlayType = GalleryType.cluster; + + const ClusterPage( + this.searchResult, { + this.enableGrouping = true, + this.tagPrefix = "", + required this.clusterID, + this.personID, + this.appendTitle = "", + this.showNamingBanner = true, + Key? key, + }) : super(key: key); + + @override + State createState() => _ClusterPageState(); +} + +class _ClusterPageState extends State { + final _selectedFiles = SelectedFiles(); + late final List files; + late final StreamSubscription _filesUpdatedEvent; + late final StreamSubscription _peopleChangedEvent; + + bool get showNamingBanner => + (!userDismissedNamingBanner && widget.showNamingBanner); + + bool userDismissedNamingBanner = false; + + @override + void initState() { + super.initState(); + ClusterFeedbackService.setLastViewedClusterID(widget.clusterID); + files = widget.searchResult; + _filesUpdatedEvent = + Bus.instance.on().listen((event) { + if (event.type == EventType.deletedFromDevice || + event.type == EventType.deletedFromEverywhere || + event.type == EventType.deletedFromRemote || + event.type == EventType.hide) { + for (var updatedFile in event.updatedFiles) { + files.remove(updatedFile); + } + setState(() {}); + } + }); + _peopleChangedEvent = Bus.instance.on().listen((event) { + if (event.type == PeopleEventType.removedFilesFromCluster && + (event.source == widget.clusterID.toString())) { + for (var updatedFile in event.relevantFiles!) { + files.remove(updatedFile); + } + setState(() {}); + } + }); + kDebugMode + ? ClusterFeedbackService.instance.debugLogClusterBlurValues( + widget.clusterID, + clusterSize: files.length, + ) + : null; + } + + @override + void dispose() { + _filesUpdatedEvent.cancel(); + _peopleChangedEvent.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + final gallery = Gallery( + asyncLoader: (creationStartTime, creationEndTime, {limit, asc}) { + final result = files + .where( + (file) => + file.creationTime! >= creationStartTime && + file.creationTime! <= creationEndTime, + ) + .toList(); + return Future.value( + FileLoadResult( + result, + result.length < files.length, + ), + ); + }, + reloadEvent: Bus.instance.on(), + forceReloadEvents: [Bus.instance.on()], + removalEventTypes: const { + EventType.deletedFromRemote, + EventType.deletedFromEverywhere, + EventType.hide, + EventType.peopleClusterChanged, + }, + tagPrefix: widget.tagPrefix + widget.tagPrefix, + selectedFiles: _selectedFiles, + enableFileGrouping: widget.enableGrouping, + initialFiles: [widget.searchResult.first], + ); + return Scaffold( + appBar: PreferredSize( + preferredSize: const Size.fromHeight(50.0), + child: ClusterAppBar( + SearchResultPage.appBarType, + "${files.length} memories${widget.appendTitle}", + _selectedFiles, + widget.clusterID, + key: ValueKey(files.length), + ), + ), + body: Column( + children: [ + Expanded( + child: Stack( + alignment: Alignment.bottomCenter, + children: [ + gallery, + FileSelectionOverlayBar( + ClusterPage.overlayType, + _selectedFiles, + clusterID: widget.clusterID, + ), + ], + ), + ), + showNamingBanner + ? Dismissible( + key: const Key("namingBanner"), + direction: DismissDirection.horizontal, + onDismissed: (direction) { + setState(() { + userDismissedNamingBanner = true; + }); + }, + child: PeopleBanner( + type: PeopleBannerType.addName, + faceWidget: PersonFaceWidget( + files.first, + clusterID: widget.clusterID, + ), + actionIcon: Icons.add_outlined, + text: S.of(context).addAName, + subText: S.of(context).findPeopleByName, + onTap: () async { + if (widget.personID == null) { + final result = await showAssignPersonAction( + context, + clusterID: widget.clusterID, + ); + if (result != null && + result is (PersonEntity, EnteFile)) { + Navigator.pop(context); + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result.$1)); + } else if (result != null && result is PersonEntity) { + Navigator.pop(context); + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result)); + } + } else { + showShortToast(context, "No personID or clusterID"); + } + }, + ), + ) + : const SizedBox.shrink(), + ], + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/cropped_face_image_view.dart b/mobile/lib/ui/viewer/people/cropped_face_image_view.dart new file mode 100644 index 0000000000..a76dbe5f0a --- /dev/null +++ b/mobile/lib/ui/viewer/people/cropped_face_image_view.dart @@ -0,0 +1,121 @@ +import 'dart:developer' show log; +import "dart:io" show File; + +import 'package:flutter/material.dart'; +import "package:photos/face/model/face.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/models/file/file_type.dart"; +import "package:photos/ui/viewer/file/thumbnail_widget.dart"; +import "package:photos/utils/file_util.dart"; +import "package:photos/utils/thumbnail_util.dart"; + +class CroppedFaceInfo { + final Image image; + final double scale; + final double offsetX; + final double offsetY; + + const CroppedFaceInfo({ + required this.image, + required this.scale, + required this.offsetX, + required this.offsetY, + }); +} + +class CroppedFaceImageView extends StatelessWidget { + final EnteFile enteFile; + final Face face; + + const CroppedFaceImageView({ + Key? key, + required this.enteFile, + required this.face, + }) : super(key: key); + + @override + Widget build(BuildContext context) { + return FutureBuilder( + future: getImage(), + builder: (context, snapshot) { + if (snapshot.hasData) { + return LayoutBuilder( + builder: ((context, constraints) { + final double imageAspectRatio = enteFile.width / enteFile.height; + final Image image = snapshot.data!; + + final double viewWidth = constraints.maxWidth; + final double viewHeight = constraints.maxHeight; + + final faceBox = face.detection.box; + + final double relativeFaceCenterX = faceBox.x + faceBox.width / 2; + final double relativeFaceCenterY = faceBox.y + faceBox.height / 2; + + const double desiredFaceHeightRelativeToWidget = 8 / 10; + final double scale = + (1 / faceBox.height) * desiredFaceHeightRelativeToWidget; + + final double widgetCenterX = viewWidth / 2; + final double widgetCenterY = viewHeight / 2; + + final double widgetAspectRatio = viewWidth / viewHeight; + final double imageToWidgetRatio = + imageAspectRatio / widgetAspectRatio; + + double offsetX = + (widgetCenterX - relativeFaceCenterX * viewWidth) * scale; + double offsetY = + (widgetCenterY - relativeFaceCenterY * viewHeight) * scale; + + if (imageAspectRatio < widgetAspectRatio) { + // Landscape Image: Adjust offsetX more conservatively + offsetX = offsetX * imageToWidgetRatio; + } else { + // Portrait Image: Adjust offsetY more conservatively + offsetY = offsetY / imageToWidgetRatio; + } + return ClipRRect( + borderRadius: const BorderRadius.all(Radius.elliptical(16, 12)), + child: Transform.translate( + offset: Offset( + offsetX, + offsetY, + ), + child: Transform.scale( + scale: scale, + child: image, + ), + ), + ); + }), + ); + } else { + if (snapshot.hasError) { + log('Error getting cover face for person: ${snapshot.error}'); + } + return ThumbnailWidget( + enteFile, + ); + } + }, + ); + } + + Future getImage() async { + final File? ioFile; + if (enteFile.fileType == FileType.video) { + ioFile = await getThumbnailForUploadedFile(enteFile); + } else { + ioFile = await getFile(enteFile); + } + if (ioFile == null) { + return null; + } + + final imageData = await ioFile.readAsBytes(); + final image = Image.memory(imageData, fit: BoxFit.contain); + + return image; + } +} diff --git a/mobile/lib/ui/viewer/people/new_person_item_widget.dart b/mobile/lib/ui/viewer/people/new_person_item_widget.dart new file mode 100644 index 0000000000..c60f892590 --- /dev/null +++ b/mobile/lib/ui/viewer/people/new_person_item_widget.dart @@ -0,0 +1,73 @@ +import 'package:dotted_border/dotted_border.dart'; +import 'package:flutter/material.dart'; +import 'package:photos/theme/ente_theme.dart'; + +///https://www.figma.com/file/SYtMyLBs5SAOkTbfMMzhqt/ente-Visual-Design?node-id=10854%3A57947&t=H5AvR79OYDnB9ekw-4 +class NewPersonItemWidget extends StatelessWidget { + const NewPersonItemWidget({ + super.key, + }); + + @override + Widget build(BuildContext context) { + final textTheme = getEnteTextTheme(context); + final colorScheme = getEnteColorScheme(context); + const sideOfThumbnail = 60.0; + return LayoutBuilder( + builder: (context, constraints) { + return Stack( + alignment: Alignment.center, + children: [ + Row( + children: [ + ClipRRect( + borderRadius: const BorderRadius.horizontal( + left: Radius.circular(4), + ), + child: SizedBox( + height: sideOfThumbnail, + width: sideOfThumbnail, + child: Icon( + Icons.add_outlined, + color: colorScheme.strokeMuted, + ), + ), + ), + Padding( + padding: const EdgeInsets.only(left: 12), + child: Text( + 'Add new person', + style: + textTheme.body.copyWith(color: colorScheme.textMuted), + ), + ), + ], + ), + IgnorePointer( + child: DottedBorder( + dashPattern: const [4], + color: colorScheme.strokeFainter, + strokeWidth: 1, + padding: const EdgeInsets.all(0), + borderType: BorderType.RRect, + radius: const Radius.circular(4), + child: SizedBox( + //Have to decrease the height and width by 1 pt as the stroke + //dotted border gives is of strokeAlign.center, so 0.5 inside and + // outside. Here for the row, stroke should be inside so we + //decrease the size of this sizedBox by 1 (so it shrinks 0.5 from + //every side) so that the strokeAlign.center of this sizedBox + //looks like a strokeAlign.inside in the row. + height: sideOfThumbnail - 1, + //This width will work for this only if the row widget takes up the + //full size it's parent (stack). + width: constraints.maxWidth - 1, + ), + ), + ), + ], + ); + }, + ); + } +} diff --git a/mobile/lib/ui/viewer/people/people_app_bar.dart b/mobile/lib/ui/viewer/people/people_app_bar.dart new file mode 100644 index 0000000000..d53059327f --- /dev/null +++ b/mobile/lib/ui/viewer/people/people_app_bar.dart @@ -0,0 +1,337 @@ +import 'dart:async'; + +import "package:flutter/cupertino.dart"; +import 'package:flutter/material.dart'; +import 'package:logging/logging.dart'; +import 'package:photos/core/configuration.dart'; +import 'package:photos/core/event_bus.dart'; +import "package:photos/events/people_changed_event.dart"; +import 'package:photos/events/subscription_purchased_event.dart'; +import "package:photos/face/model/person.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/models/gallery_type.dart'; +import 'package:photos/models/selected_files.dart'; +import 'package:photos/services/collections_service.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import 'package:photos/ui/actions/collection/collection_sharing_actions.dart'; +import "package:photos/ui/viewer/people/add_person_action_sheet.dart"; +import "package:photos/ui/viewer/people/people_page.dart"; +import "package:photos/ui/viewer/people/person_cluster_suggestion.dart"; +import 'package:photos/ui/viewer/people/person_clusters_page.dart'; +import "package:photos/utils/dialog_util.dart"; +import "package:photos/utils/navigation_util.dart"; + +class PeopleAppBar extends StatefulWidget { + final GalleryType type; + final String? title; + final SelectedFiles selectedFiles; + final PersonEntity person; + + bool get isIgnored => person.data.isIgnored; + + const PeopleAppBar( + this.type, + this.title, + this.selectedFiles, + this.person, { + Key? key, + }) : super(key: key); + + @override + State createState() => _AppBarWidgetState(); +} + +enum PeoplePopupAction { + rename, + setCover, + removeLabel, + viewPhotos, + confirmPhotos, + unignore, +} + +class _AppBarWidgetState extends State { + final _logger = Logger("_AppBarWidgetState"); + late StreamSubscription _userAuthEventSubscription; + late Function() _selectedFilesListener; + String? _appBarTitle; + late CollectionActions collectionActions; + final GlobalKey shareButtonKey = GlobalKey(); + bool isQuickLink = false; + late GalleryType galleryType; + + @override + void initState() { + super.initState(); + _selectedFilesListener = () { + setState(() {}); + }; + collectionActions = CollectionActions(CollectionsService.instance); + widget.selectedFiles.addListener(_selectedFilesListener); + _userAuthEventSubscription = + Bus.instance.on().listen((event) { + setState(() {}); + }); + _appBarTitle = widget.title; + galleryType = widget.type; + } + + @override + void dispose() { + _userAuthEventSubscription.cancel(); + widget.selectedFiles.removeListener(_selectedFilesListener); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return AppBar( + elevation: 0, + centerTitle: false, + title: Text( + _appBarTitle!, + style: + Theme.of(context).textTheme.headlineSmall!.copyWith(fontSize: 16), + maxLines: 2, + overflow: TextOverflow.ellipsis, + ), + actions: _getDefaultActions(context), + ); + } + + Future _renamePerson(BuildContext context) async { + final result = await showTextInputDialog( + context, + title: S.of(context).rename, + submitButtonLabel: S.of(context).done, + hintText: S.of(context).enterPersonName, + alwaysShowSuccessState: true, + initialValue: widget.person.data.name, + textCapitalization: TextCapitalization.words, + onSubmit: (String text) async { + // indicates user cancelled the rename request + if (text == "" || text == _appBarTitle!) { + return; + } + + try { + await PersonService.instance + .updateAttributes(widget.person.remoteID, name: text); + if (mounted) { + _appBarTitle = text; + setState(() {}); + } + Bus.instance.fire(PeopleChangedEvent()); + } catch (e, s) { + _logger.severe("Failed to rename album", e, s); + rethrow; + } + }, + ); + if (result is Exception) { + await showGenericErrorDialog(context: context, error: result); + } + } + + List _getDefaultActions(BuildContext context) { + final List actions = []; + // If the user has selected files, don't show any actions + if (widget.selectedFiles.files.isNotEmpty || + !Configuration.instance.hasConfiguredAccount()) { + return actions; + } + + final List> items = []; + + if (!widget.isIgnored) { + items.addAll( + [ + PopupMenuItem( + value: PeoplePopupAction.rename, + child: Row( + children: [ + const Icon(Icons.edit), + const Padding( + padding: EdgeInsets.all(8), + ), + Text(S.of(context).rename), + ], + ), + ), + // PopupMenuItem( + // value: PeoplPopupAction.setCover, + // child: Row( + // children: [ + // const Icon(Icons.image_outlined), + // const Padding( + // padding: EdgeInsets.all(8), + // ), + // Text(S.of(context).setCover), + // ], + // ), + // ), + + PopupMenuItem( + value: PeoplePopupAction.removeLabel, + child: Row( + children: [ + const Icon(Icons.remove_circle_outline), + const Padding( + padding: EdgeInsets.all(8), + ), + Text(S.of(context).removePersonLabel), + ], + ), + ), + const PopupMenuItem( + value: PeoplePopupAction.viewPhotos, + child: Row( + children: [ + Icon(Icons.view_array_outlined), + Padding( + padding: EdgeInsets.all(8), + ), + Text('View confirmed photos'), + ], + ), + ), + const PopupMenuItem( + value: PeoplePopupAction.confirmPhotos, + child: Row( + children: [ + Icon(CupertinoIcons.square_stack_3d_down_right), + Padding( + padding: EdgeInsets.all(8), + ), + Text('Review suggestions'), + ], + ), + ), + ], + ); + } else { + items.addAll( + [ + const PopupMenuItem( + value: PeoplePopupAction.unignore, + child: Row( + children: [ + Icon(Icons.visibility_outlined), + Padding( + padding: EdgeInsets.all(8), + ), + Text("Show person"), + ], + ), + ), + ], + ); + } + + if (items.isNotEmpty) { + actions.add( + PopupMenuButton( + itemBuilder: (context) { + return items; + }, + onSelected: (PeoplePopupAction value) async { + if (value == PeoplePopupAction.viewPhotos) { + // ignore: unawaited_futures + unawaited( + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => PersonClustersPage(widget.person), + ), + ), + ); + } else if (value == PeoplePopupAction.confirmPhotos) { + // ignore: unawaited_futures + unawaited( + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => + PersonReviewClusterSuggestion(widget.person), + ), + ), + ); + } else if (value == PeoplePopupAction.rename) { + await _renamePerson(context); + } else if (value == PeoplePopupAction.setCover) { + await setCoverPhoto(context); + } else if (value == PeoplePopupAction.unignore) { + await _showPerson(context); + } else if (value == PeoplePopupAction.removeLabel) { + await _removePersonLabel(context); + } + }, + ), + ); + } + + return actions; + } + + Future _removePersonLabel(BuildContext context) async { + await showChoiceDialog( + context, + title: "Are you sure you want to remove this person label?", + body: + "All groupings for this person will be reset, and you will lose all suggestions made for this person", + firstButtonLabel: "Yes, remove person", + firstButtonOnTap: () async { + try { + await PersonService.instance.deletePerson(widget.person.remoteID); + Navigator.of(context).pop(); + } catch (e, s) { + _logger.severe('Removing person label failed', e, s); + } + }, + ); + } + + Future _showPerson(BuildContext context) async { + bool assignName = false; + await showChoiceDialog( + context, + title: + "Are you sure you want to show this person in people section again?", + firstButtonLabel: "Yes, show person", + firstButtonOnTap: () async { + try { + await PersonService.instance + .deletePerson(widget.person.remoteID, onlyMapping: false); + Bus.instance.fire(PeopleChangedEvent()); + assignName = true; + } catch (e, s) { + _logger.severe('Unignoring/showing and naming person failed', e, s); + // await showGenericErrorDialog(context: context, error: e); + } + }, + ); + if (assignName) { + final result = await showAssignPersonAction( + context, + clusterID: widget.person.data.assigned!.first.id, + ); + Navigator.pop(context); + if (result != null && result is (PersonEntity, EnteFile)) { + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result.$1)); + } else if (result != null && result is PersonEntity) { + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result)); + } + } + } + + Future setCoverPhoto(BuildContext context) async { + // final int? coverPhotoID = await showPickCoverPhotoSheet( + // context, + // widget.collection!, + // ); + // if (coverPhotoID != null) { + // unawaited(changeCoverPhoto(context, widget.collection!, coverPhotoID)); + // } + } +} diff --git a/mobile/lib/ui/viewer/people/people_banner.dart b/mobile/lib/ui/viewer/people/people_banner.dart new file mode 100644 index 0000000000..db242a5230 --- /dev/null +++ b/mobile/lib/ui/viewer/people/people_banner.dart @@ -0,0 +1,132 @@ +import "package:flutter/material.dart"; +import "package:flutter_animate/flutter_animate.dart"; +import "package:photos/ente_theme_data.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/components/buttons/icon_button_widget.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; + +enum PeopleBannerType { + addName, + suggestion, +} + +class PeopleBanner extends StatelessWidget { + final PeopleBannerType type; + final IconData? startIcon; + final PersonFaceWidget? faceWidget; + final IconData actionIcon; + final String text; + final String? subText; + final GestureTapCallback onTap; + + const PeopleBanner({ + Key? key, + required this.type, + this.startIcon, + this.faceWidget, + required this.actionIcon, + required this.text, + required this.onTap, + this.subText, + }) : super(key: key); + + @override + Widget build(BuildContext context) { + final colorScheme = getEnteColorScheme(context); + final textTheme = getEnteTextTheme(context); + final backgroundColor = colorScheme.backgroundElevated2; + final TextStyle mainTextStyle = textTheme.bodyBold; + final TextStyle subTextStyle = textTheme.miniMuted; + late final Widget startWidget; + late final bool roundedActionIcon; + switch (type) { + case PeopleBannerType.suggestion: + assert(startIcon != null); + startWidget = Padding( + padding: + const EdgeInsets.only(top: 10, bottom: 10, left: 6, right: 4), + child: Icon( + startIcon!, + size: 40, + color: colorScheme.primary500, + ), + ); + roundedActionIcon = true; + break; + case PeopleBannerType.addName: + assert(faceWidget != null); + startWidget = SizedBox( + width: 56, + height: 56, + child: ClipRRect( + borderRadius: const BorderRadius.all( + Radius.circular(4), + ), + child: faceWidget!, + ), + ); + roundedActionIcon = false; + } + + return RepaintBoundary( + child: Center( + child: GestureDetector( + onTap: onTap, + child: Container( + decoration: BoxDecoration( + boxShadow: Theme.of(context).colorScheme.enteTheme.shadowMenu, + color: backgroundColor, + ), + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 2, vertical: 2), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + startWidget, + const SizedBox(width: 12), + Expanded( + child: Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + text, + style: mainTextStyle, + textAlign: TextAlign.left, + ), + subText != null + ? const SizedBox(height: 6) + : const SizedBox.shrink(), + subText != null + ? Text( + subText!, + style: subTextStyle, + ) + : const SizedBox.shrink(), + ], + ), + ), + const SizedBox(width: 12), + IconButtonWidget( + icon: actionIcon, + iconButtonType: IconButtonType.primary, + iconColor: colorScheme.strokeBase, + defaultColor: colorScheme.fillFaint, + pressedColor: colorScheme.fillMuted, + roundedIcon: roundedActionIcon, + onTap: onTap, + ), + const SizedBox(width: 6), + ], + ), + ), + ), + ), + ).animate(onPlay: (controller) => controller.repeat()).shimmer( + duration: 1000.ms, + delay: 3200.ms, + size: 0.6, + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/people_page.dart b/mobile/lib/ui/viewer/people/people_page.dart new file mode 100644 index 0000000000..8b399ced0d --- /dev/null +++ b/mobile/lib/ui/viewer/people/people_page.dart @@ -0,0 +1,215 @@ +import "dart:async"; +import "dart:developer"; + +import 'package:flutter/material.dart'; +import "package:logging/logging.dart"; +import 'package:photos/core/event_bus.dart'; +import 'package:photos/events/files_updated_event.dart'; +import 'package:photos/events/local_photos_updated_event.dart'; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/model/person.dart"; +import 'package:photos/models/file/file.dart'; +import 'package:photos/models/file_load_result.dart'; +import 'package:photos/models/gallery_type.dart'; +import 'package:photos/models/selected_files.dart'; +import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart"; +import "package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart"; +import "package:photos/services/search_service.dart"; +import 'package:photos/ui/viewer/actions/file_selection_overlay_bar.dart'; +import 'package:photos/ui/viewer/gallery/gallery.dart'; +import "package:photos/ui/viewer/people/people_app_bar.dart"; +import "package:photos/ui/viewer/people/people_banner.dart"; +import "package:photos/ui/viewer/people/person_cluster_suggestion.dart"; + +class PeoplePage extends StatefulWidget { + final String tagPrefix; + final PersonEntity person; + + static const GalleryType appBarType = GalleryType.peopleTag; + static const GalleryType overlayType = GalleryType.peopleTag; + + const PeoplePage({ + this.tagPrefix = "", + required this.person, + Key? key, + }) : super(key: key); + + @override + State createState() => _PeoplePageState(); +} + +class _PeoplePageState extends State { + final Logger _logger = Logger("_PeoplePageState"); + final _selectedFiles = SelectedFiles(); + List? files; + int? smallestClusterSize; + Future> filesFuture = Future.value([]); + + bool get showSuggestionBanner => (!userDismissedSuggestionBanner && + smallestClusterSize != null && + smallestClusterSize! >= kMinimumClusterSizeSearchResult && + files != null && + files!.isNotEmpty && + files!.length > 200); + + bool userDismissedSuggestionBanner = false; + + late final StreamSubscription _filesUpdatedEvent; + late final StreamSubscription _peopleChangedEvent; + + @override + void initState() { + super.initState(); + ClusterFeedbackService.resetLastViewedClusterID(); + _peopleChangedEvent = Bus.instance.on().listen((event) { + setState(() {}); + }); + + filesFuture = loadPersonFiles(); + + _filesUpdatedEvent = + Bus.instance.on().listen((event) { + if (event.type == EventType.deletedFromDevice || + event.type == EventType.deletedFromEverywhere || + event.type == EventType.deletedFromRemote || + event.type == EventType.hide) { + for (var updatedFile in event.updatedFiles) { + files?.remove(updatedFile); + } + setState(() {}); + } + }); + } + + Future> loadPersonFiles() async { + log("loadPersonFiles"); + final result = await SearchService.instance + .getClusterFilesForPersonID(widget.person.remoteID); + smallestClusterSize = result.values.fold(result.values.first.length, + (previousValue, element) { + return element.length < previousValue ? element.length : previousValue; + }); + final List resultFiles = []; + for (final e in result.entries) { + resultFiles.addAll(e.value); + } + final List sortedFiles = List.from(resultFiles); + sortedFiles.sort((a, b) => b.creationTime!.compareTo(a.creationTime!)); + files = sortedFiles; + return sortedFiles; + } + + @override + void dispose() { + _filesUpdatedEvent.cancel(); + _peopleChangedEvent.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + _logger.info("Building for ${widget.person.data.name}"); + return Scaffold( + appBar: PreferredSize( + preferredSize: const Size.fromHeight(50.0), + child: PeopleAppBar( + GalleryType.peopleTag, + widget.person.data.name, + _selectedFiles, + widget.person, + ), + ), + body: FutureBuilder>( + future: filesFuture, + builder: (context, snapshot) { + if (snapshot.hasData) { + final personFiles = snapshot.data as List; + return Column( + children: [ + Expanded( + child: Stack( + alignment: Alignment.bottomCenter, + children: [ + Gallery( + asyncLoader: ( + creationStartTime, + creationEndTime, { + limit, + asc, + }) async { + final result = await loadPersonFiles(); + return Future.value( + FileLoadResult( + result, + false, + ), + ); + }, + reloadEvent: Bus.instance.on(), + forceReloadEvents: [ + Bus.instance.on(), + ], + removalEventTypes: const { + EventType.deletedFromRemote, + EventType.deletedFromEverywhere, + EventType.hide, + }, + tagPrefix: widget.tagPrefix + widget.tagPrefix, + selectedFiles: _selectedFiles, + initialFiles: + personFiles.isNotEmpty ? [personFiles.first] : [], + ), + FileSelectionOverlayBar( + PeoplePage.overlayType, + _selectedFiles, + person: widget.person, + ), + ], + ), + ), + showSuggestionBanner + ? Dismissible( + key: const Key("suggestionBanner"), + direction: DismissDirection.horizontal, + onDismissed: (direction) { + setState(() { + userDismissedSuggestionBanner = true; + }); + }, + child: PeopleBanner( + type: PeopleBannerType.suggestion, + startIcon: Icons.face_retouching_natural, + actionIcon: Icons.search_outlined, + text: "Review suggestions", + subText: "Improve the results", + onTap: () async { + unawaited( + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => + PersonReviewClusterSuggestion( + widget.person, + ), + ), + ), + ); + }, + ), + ) + : const SizedBox.shrink(), + ], + ); + } else if (snapshot.hasError) { + log("Error: ${snapshot.error} ${snapshot.stackTrace}}"); + //Need to show an error on the UI here + return const SizedBox.shrink(); + } else { + return const Center( + child: CircularProgressIndicator(), + ); + } + }, + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/person_cluster_suggestion.dart b/mobile/lib/ui/viewer/people/person_cluster_suggestion.dart new file mode 100644 index 0000000000..2a904720bb --- /dev/null +++ b/mobile/lib/ui/viewer/people/person_cluster_suggestion.dart @@ -0,0 +1,452 @@ +import "dart:async" show StreamSubscription, unawaited; +import "dart:math"; +import "dart:typed_data"; + +import "package:flutter/foundation.dart" show kDebugMode; +import "package:flutter/material.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/file/file.dart"; +import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart'; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/components/buttons/button_widget.dart"; +import "package:photos/ui/components/models/button_type.dart"; +// import "package:photos/ui/viewer/people/add_person_action_sheet.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/ui/viewer/people/person_clusters_page.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; + +class PersonReviewClusterSuggestion extends StatefulWidget { + final PersonEntity person; + + const PersonReviewClusterSuggestion( + this.person, { + super.key, + }); + + @override + State createState() => _PersonClustersState(); +} + +class _PersonClustersState extends State { + int currentSuggestionIndex = 0; + bool fetch = true; + Key futureBuilderKeySuggestions = UniqueKey(); + Key futureBuilderKeyFaceThumbnails = UniqueKey(); + bool canGiveFeedback = true; + + // Declare a variable for the future + late Future> futureClusterSuggestions; + late StreamSubscription _peopleChangedEvent; + + @override + void initState() { + super.initState(); + // Initialize the future in initState + if (fetch) _fetchClusterSuggestions(); + fetch = true; + } + + @override + void dispose() { + _peopleChangedEvent.cancel(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Review suggestions'), + actions: [ + IconButton( + icon: const Icon(Icons.history_outlined), + onPressed: () { + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => PersonClustersPage(widget.person), + ), + ); + }, + ), + ], + ), + body: FutureBuilder>( + key: futureBuilderKeySuggestions, + future: futureClusterSuggestions, + builder: (context, snapshot) { + if (snapshot.hasData) { + if (snapshot.data!.isEmpty) { + return Center( + child: Text( + "No suggestions for ${widget.person.data.name}", + style: getEnteTextTheme(context).largeMuted, + ), + ); + } + + final allSuggestions = snapshot.data!; + final numberOfDifferentSuggestions = allSuggestions.length; + final currentSuggestion = allSuggestions[currentSuggestionIndex]; + final int clusterID = currentSuggestion.clusterIDToMerge; + final double distance = currentSuggestion.distancePersonToCluster; + final bool usingMean = currentSuggestion.usedOnlyMeanForSuggestion; + final List files = currentSuggestion.filesInCluster; + + final Future> generateFacedThumbnails = + _generateFaceThumbnails( + files.sublist(0, min(files.length, 8)), + clusterID, + ); + + _peopleChangedEvent = + Bus.instance.on().listen((event) { + if (event.type == PeopleEventType.removedFilesFromCluster && + (event.source == clusterID.toString())) { + for (var updatedFile in event.relevantFiles!) { + files.remove(updatedFile); + } + fetch = false; + setState(() {}); + } + }); + return InkWell( + onTap: () { + final List sortedFiles = + List.from(currentSuggestion.filesInCluster); + sortedFiles.sort( + (a, b) => b.creationTime!.compareTo(a.creationTime!), + ); + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + sortedFiles, + personID: widget.person, + clusterID: clusterID, + showNamingBanner: false, + ), + ), + ); + }, + child: Container( + padding: const EdgeInsets.symmetric( + horizontal: 8.0, + vertical: 20, + ), + child: _buildSuggestionView( + clusterID, + distance, + usingMean, + files, + numberOfDifferentSuggestions, + allSuggestions, + generateFacedThumbnails, + ), + ), + ); + } else if (snapshot.hasError) { + // log the error + return const Center(child: Text("Error")); + } else { + return const Center(child: CircularProgressIndicator()); + } + }, + ), + ); + } + + Future _handleUserClusterChoice( + int clusterID, + bool yesOrNo, + int numberOfSuggestions, + ) async { + // Perform the action based on clusterID, e.g., assignClusterToPerson or captureNotPersonFeedback + if (!canGiveFeedback) { + return; + } + if (yesOrNo) { + canGiveFeedback = false; + await FaceMLDataDB.instance.assignClusterToPerson( + personID: widget.person.remoteID, + clusterID: clusterID, + ); + Bus.instance.fire(PeopleChangedEvent()); + // Increment the suggestion index + if (mounted) { + setState(() => currentSuggestionIndex++); + } + + // Check if we need to fetch new data + if (currentSuggestionIndex >= (numberOfSuggestions)) { + setState(() { + currentSuggestionIndex = 0; + futureBuilderKeySuggestions = + UniqueKey(); // Reset to trigger FutureBuilder + futureBuilderKeyFaceThumbnails = UniqueKey(); + _fetchClusterSuggestions(); + }); + } else { + futureBuilderKeyFaceThumbnails = UniqueKey(); + fetch = false; + setState(() {}); + } + } else { + await _rejectSuggestion(clusterID, numberOfSuggestions); + } + } + + Future _rejectSuggestion( + int clusterID, + int numberOfSuggestions, + ) async { + canGiveFeedback = false; + await FaceMLDataDB.instance.captureNotPersonFeedback( + personID: widget.person.remoteID, + clusterID: clusterID, + ); + // Recalculate the suggestions when a suggestion is rejected + setState(() { + currentSuggestionIndex = 0; + futureBuilderKeySuggestions = + UniqueKey(); // Reset to trigger FutureBuilder + futureBuilderKeyFaceThumbnails = UniqueKey(); + _fetchClusterSuggestions(); + }); + } + + // Method to fetch cluster suggestions + void _fetchClusterSuggestions() { + futureClusterSuggestions = + ClusterFeedbackService.instance.getSuggestionForPerson(widget.person); + } + + Widget _buildSuggestionView( + int clusterID, + double distance, + bool usingMean, + List files, + int numberOfSuggestions, + List allSuggestions, + Future> generateFaceThumbnails, + ) { + final widgetToReturn = Column( + key: ValueKey("cluster_id-$clusterID-files-${files.length}"), + children: [ + if (kDebugMode) + Text( + "ClusterID: $clusterID, Distance: ${distance.toStringAsFixed(3)}, usingMean: $usingMean", + style: getEnteTextTheme(context).smallMuted, + ), + Text( + // TODO: come up with a better copy for strings below! + "${widget.person.data.name}?", + style: getEnteTextTheme(context).largeMuted, + ), + const SizedBox(height: 24), + _buildThumbnailWidget( + files, + clusterID, + generateFaceThumbnails, + ), + const SizedBox( + height: 24.0, + ), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 24.0), + child: Row( + children: [ + Expanded( + child: ButtonWidget( + buttonType: ButtonType.critical, + labelText: 'No', + buttonSize: ButtonSize.large, + onTap: () async => { + await _handleUserClusterChoice( + clusterID, + false, + numberOfSuggestions, + ), + }, + ), + ), + const SizedBox(width: 12.0), + Expanded( + child: ButtonWidget( + buttonType: ButtonType.primary, + labelText: 'Yes', + buttonSize: ButtonSize.large, + onTap: () async => { + await _handleUserClusterChoice( + clusterID, + true, + numberOfSuggestions, + ), + }, + ), + ), + ], + ), + ), + // const SizedBox( + // height: 24.0, + // ), + // ButtonWidget( + // shouldSurfaceExecutionStates: false, + // buttonType: ButtonType.neutral, + // labelText: 'Assign different person', + // buttonSize: ButtonSize.small, + // onTap: () async { + // final result = await showAssignPersonAction( + // context, + // clusterID: clusterID, + // ); + // if (result != null && + // (result is (PersonEntity, EnteFile) || + // result is PersonEntity)) { + // await _rejectSuggestion(clusterID, numberOfSuggestions); + // } + // }, + // ), + ], + ); + // Precompute face thumbnails for next suggestions, in case there are + const precomputeSuggestions = 8; + const maxPrecomputations = 8; + int compCount = 0; + if (allSuggestions.length > currentSuggestionIndex + 1) { + outerLoop: + for (final suggestion in allSuggestions.sublist( + currentSuggestionIndex + 1, + min( + allSuggestions.length, + currentSuggestionIndex + precomputeSuggestions, + ), + )) { + final files = suggestion.filesInCluster; + final clusterID = suggestion.clusterIDToMerge; + for (final file in files.sublist(0, min(files.length, 8))) { + unawaited( + PersonFaceWidget.precomputeNextFaceCrops( + file, + clusterID, + useFullFile: false, + ), + ); + compCount++; + if (compCount >= maxPrecomputations) { + debugPrint( + 'Prefetching $compCount face thumbnails for suggestions', + ); + break outerLoop; + } + } + } + } + return widgetToReturn; + } + + Widget _buildThumbnailWidget( + List files, + int clusterID, + Future> generateFaceThumbnails, + ) { + return SizedBox( + height: MediaQuery.of(context).size.height * 0.4, + child: FutureBuilder>( + key: futureBuilderKeyFaceThumbnails, + future: generateFaceThumbnails, + builder: (context, snapshot) { + if (snapshot.hasData) { + final faceThumbnails = snapshot.data!; + canGiveFeedback = true; + return Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.center, + children: _buildThumbnailWidgetsRow( + files, + clusterID, + faceThumbnails, + ), + ), + if (files.length > 4) const SizedBox(height: 24), + if (files.length > 4) + Row( + mainAxisAlignment: MainAxisAlignment.center, + children: _buildThumbnailWidgetsRow( + files, + clusterID, + faceThumbnails, + start: 4, + ), + ), + const SizedBox(height: 24.0), + Text( + "${files.length} photos", + style: getEnteTextTheme(context).body, + ), + ], + ); + } else if (snapshot.hasError) { + // log the error + return const Center(child: Text("Error")); + } else { + canGiveFeedback = false; + return const Center(child: CircularProgressIndicator()); + } + }, + ), + ); + } + + List _buildThumbnailWidgetsRow( + List files, + int cluserId, + Map faceThumbnails, { + int start = 0, + }) { + return List.generate( + min(4, max(0, files.length - start)), + (index) => Padding( + padding: const EdgeInsets.all(8.0), + child: SizedBox( + width: 72, + height: 72, + child: ClipOval( + child: PersonFaceWidget( + files[start + index], + clusterID: cluserId, + useFullFile: false, + thumbnailFallback: false, + faceCrop: faceThumbnails[files[start + index].uploadedFileID!], + ), + ), + ), + ), + ); + } + + Future> _generateFaceThumbnails( + List files, + int clusterID, + ) async { + final futures = >[]; + for (final file in files) { + futures.add( + PersonFaceWidget.precomputeNextFaceCrops( + file, + clusterID, + useFullFile: false, + ), + ); + } + final faceCropsList = await Future.wait(futures); + final faceCrops = {}; + for (var i = 0; i < faceCropsList.length; i++) { + faceCrops[files[i].uploadedFileID!] = faceCropsList[i]; + } + return faceCrops; + } +} diff --git a/mobile/lib/ui/viewer/people/person_clusters_page.dart b/mobile/lib/ui/viewer/people/person_clusters_page.dart new file mode 100644 index 0000000000..2c493fc21f --- /dev/null +++ b/mobile/lib/ui/viewer/people/person_clusters_page.dart @@ -0,0 +1,144 @@ +import "package:flutter/cupertino.dart"; +import "package:flutter/material.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/event_bus.dart"; +import "package:photos/events/people_changed_event.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import "package:photos/services/search_service.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/viewer/file/no_thumbnail_widget.dart"; +// import "package:photos/ui/viewer/file/thumbnail_widget.dart"; +import "package:photos/ui/viewer/people/cluster_page.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; + +class PersonClustersPage extends StatefulWidget { + final PersonEntity person; + + const PersonClustersPage( + this.person, { + super.key, + }); + + @override + State createState() => _PersonClustersPageState(); +} + +class _PersonClustersPageState extends State { + final Logger _logger = Logger("_PersonClustersState"); + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text(widget.person.data.name), + ), + body: FutureBuilder>>( + future: SearchService.instance + .getClusterFilesForPersonID(widget.person.remoteID), + builder: (context, snapshot) { + if (snapshot.hasData) { + final List keys = snapshot.data!.keys.toList(); + return ListView.builder( + itemCount: keys.length, + itemBuilder: (context, index) { + final int clusterID = keys[index]; + final List files = snapshot.data![keys[index]]!; + return InkWell( + onTap: () { + Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => ClusterPage( + files, + personID: widget.person, + clusterID: index, + ), + ), + ); + }, + child: Container( + padding: const EdgeInsets.all(8.0), + child: Row( + children: [ + SizedBox( + width: 64, + height: 64, + child: files.isNotEmpty + ? ClipRRect( + borderRadius: const BorderRadius.all( + Radius.elliptical(16, 12), + ), + child: PersonFaceWidget( + files.first, + clusterID: clusterID, + ), + ) + : const ClipRRect( + borderRadius: BorderRadius.all( + Radius.elliptical(16, 12), + ), + child: NoThumbnailWidget( + addBorder: false, + ), + ), + ), + const SizedBox( + width: 8.0, + ), // Add some spacing between the thumbnail and the text + Expanded( + child: Padding( + padding: + const EdgeInsets.symmetric(horizontal: 8.0), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + Text( + "${snapshot.data![keys[index]]!.length} photos", + style: getEnteTextTheme(context).body, + ), + GestureDetector( + onTap: () async { + try { + await PersonService.instance + .removeClusterToPerson( + personID: widget.person.remoteID, + clusterID: clusterID, + ); + _logger.info( + "Removed cluster $clusterID from person ${widget.person.remoteID}", + ); + Bus.instance.fire(PeopleChangedEvent()); + setState(() {}); + } catch (e) { + _logger.severe( + "removing cluster from person,", + e, + ); + } + }, + child: const Icon( + CupertinoIcons.minus_circled, + color: Colors.red, + ), + ), + ], + ), + ), + ), + ], + ), + ), + ); + }, + ); + } else if (snapshot.hasError) { + _logger.warning("Failed to get cluster", snapshot.error); + return const Center(child: Text("Error")); + } else { + return const Center(child: CircularProgressIndicator()); + } + }, + ), + ); + } +} diff --git a/mobile/lib/ui/viewer/people/person_row_item.dart b/mobile/lib/ui/viewer/people/person_row_item.dart new file mode 100644 index 0000000000..831fe97298 --- /dev/null +++ b/mobile/lib/ui/viewer/people/person_row_item.dart @@ -0,0 +1,36 @@ +import "package:flutter/material.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/ui/viewer/search/result/person_face_widget.dart"; + +class PersonRowItem extends StatelessWidget { + final PersonEntity person; + final EnteFile personFile; + final VoidCallback onTap; + + const PersonRowItem({ + Key? key, + required this.person, + required this.personFile, + required this.onTap, + }) : super(key: key); + + @override + Widget build(BuildContext context) { + return ListTile( + dense: false, + leading: SizedBox( + width: 56, + height: 56, + child: ClipRRect( + borderRadius: const BorderRadius.all( + Radius.elliptical(16, 12), + ), + child: PersonFaceWidget(personFile, personId: person.remoteID), + ), + ), + title: Text(person.data.name), + onTap: onTap, + ); + } +} diff --git a/mobile/lib/ui/viewer/search/result/no_result_widget.dart b/mobile/lib/ui/viewer/search/result/no_result_widget.dart index 9ebb9cf80d..48ba811df5 100644 --- a/mobile/lib/ui/viewer/search/result/no_result_widget.dart +++ b/mobile/lib/ui/viewer/search/result/no_result_widget.dart @@ -21,7 +21,6 @@ class _NoResultWidgetState extends State { super.initState(); searchTypes = SectionType.values.toList(growable: true); // remove face and content sectionType - searchTypes.remove(SectionType.face); searchTypes.remove(SectionType.content); } diff --git a/mobile/lib/ui/viewer/search/result/person_face_widget.dart b/mobile/lib/ui/viewer/search/result/person_face_widget.dart new file mode 100644 index 0000000000..8be99e5f6e --- /dev/null +++ b/mobile/lib/ui/viewer/search/result/person_face_widget.dart @@ -0,0 +1,270 @@ +import "dart:developer"; +// import "dart:io"; +import "dart:typed_data"; + +import 'package:flutter/widgets.dart'; +import "package:photos/db/files_db.dart"; +import "package:photos/face/db.dart"; +import "package:photos/face/model/face.dart"; +import "package:photos/face/model/person.dart"; +import 'package:photos/models/file/file.dart'; +import "package:photos/services/machine_learning/face_ml/person/person_service.dart"; +import "package:photos/ui/common/loading_widget.dart"; +import "package:photos/ui/viewer/file/thumbnail_widget.dart"; +import "package:photos/ui/viewer/file_details/face_widget.dart"; +import "package:photos/ui/viewer/people/cropped_face_image_view.dart"; +import "package:photos/utils/face/face_box_crop.dart"; +import "package:photos/utils/thumbnail_util.dart"; +import "package:pool/pool.dart"; + +class PersonFaceWidget extends StatelessWidget { + final EnteFile file; + final String? personId; + final int? clusterID; + final bool useFullFile; + final bool thumbnailFallback; + final Uint8List? faceCrop; + + // PersonFaceWidget constructor checks that both personId and clusterID are not null + // and that the file is not null + const PersonFaceWidget( + this.file, { + this.personId, + this.clusterID, + this.useFullFile = true, + this.thumbnailFallback = true, + this.faceCrop, + Key? key, + }) : assert( + personId != null || clusterID != null, + "PersonFaceWidget requires either personId or clusterID to be non-null", + ), + super(key: key); + + @override + Widget build(BuildContext context) { + if (faceCrop != null) { + return Stack( + fit: StackFit.expand, + children: [ + Image( + image: MemoryImage(faceCrop!), + fit: BoxFit.cover, + ), + ], + ); + } + if (useGeneratedFaceCrops) { + return FutureBuilder( + future: getFaceCrop(), + builder: (context, snapshot) { + if (snapshot.hasData) { + final ImageProvider imageProvider = MemoryImage(snapshot.data!); + return Stack( + fit: StackFit.expand, + children: [ + Image( + image: imageProvider, + fit: BoxFit.cover, + ), + ], + ); + } else { + if (snapshot.hasError) { + log('Error getting cover face for person: ${snapshot.error}'); + } + return thumbnailFallback + ? ThumbnailWidget(file) + : const EnteLoadingWidget(); + } + }, + ); + } else { + return FutureBuilder( + future: _getFace(), + builder: (context, snapshot) { + if (snapshot.hasData) { + final Face face = snapshot.data!; + return Stack( + fit: StackFit.expand, + children: [ + CroppedFaceImageView(enteFile: file, face: face), + ], + ); + } else { + if (snapshot.hasError) { + log('Error getting cover face for person: ${snapshot.error}'); + } + return thumbnailFallback + ? ThumbnailWidget(file) + : const EnteLoadingWidget(); + } + }, + ); + } + } + + Future _getFace() async { + String? personAvatarFaceID; + if (personId != null) { + final PersonEntity? personEntity = + await PersonService.instance.getPerson(personId!); + if (personEntity != null) { + personAvatarFaceID = personEntity.data.avatarFaceId; + } + } + return await FaceMLDataDB.instance.getCoverFaceForPerson( + recentFileID: file.uploadedFileID!, + avatarFaceId: personAvatarFaceID, + personID: personId, + clusterID: clusterID, + ); + } + + Future getFaceCrop() async { + try { + final Face? face = await _getFace(); + if (face == null) { + debugPrint( + "No cover face for person: $personId and cluster $clusterID and recentFile ${file.uploadedFileID}", + ); + return null; + } + final Uint8List? cachedFace = faceCropCache.get(face.faceID); + if (cachedFace != null) { + return cachedFace; + } + final faceCropCacheFile = cachedFaceCropPath(face.faceID); + if ((await faceCropCacheFile.exists())) { + final data = await faceCropCacheFile.readAsBytes(); + faceCropCache.put(face.faceID, data); + return data; + } + if (!useFullFile) { + final Uint8List? cachedFaceThumbnail = + faceCropThumbnailCache.get(face.faceID); + if (cachedFaceThumbnail != null) { + return cachedFaceThumbnail; + } + } + EnteFile? fileForFaceCrop = file; + if (face.fileID != file.uploadedFileID!) { + fileForFaceCrop = + await FilesDB.instance.getAnyUploadedFile(face.fileID); + } + if (fileForFaceCrop == null) { + return null; + } + + late final Pool relevantResourcePool; + if (useFullFile) { + relevantResourcePool = poolFullFileFaceGenerations; + } else { + relevantResourcePool = poolThumbnailFaceGenerations; + } + final result = await relevantResourcePool.withResource( + () async => await getFaceCrops( + fileForFaceCrop!, + { + face.faceID: face.detection.box, + }, + useFullFile: useFullFile, + ), + ); + final Uint8List? computedCrop = result?[face.faceID]; + if (computedCrop != null) { + if (useFullFile) { + faceCropCache.put(face.faceID, computedCrop); + faceCropCacheFile.writeAsBytes(computedCrop).ignore(); + } else { + faceCropThumbnailCache.put(face.faceID, computedCrop); + } + } + return computedCrop; + } catch (e, s) { + log( + "Error getting cover face for person: $personId and cluster $clusterID", + error: e, + stackTrace: s, + ); + return null; + } + } + + static Future precomputeNextFaceCrops( + file, + clusterID, { + required bool useFullFile, + }) async { + try { + final Face? face = await FaceMLDataDB.instance.getCoverFaceForPerson( + recentFileID: file.uploadedFileID!, + clusterID: clusterID, + ); + if (face == null) { + debugPrint( + "No cover face for cluster $clusterID and recentFile ${file.uploadedFileID}", + ); + return null; + } + final Uint8List? cachedFace = faceCropCache.get(face.faceID); + if (cachedFace != null) { + return cachedFace; + } + final faceCropCacheFile = cachedFaceCropPath(face.faceID); + if ((await faceCropCacheFile.exists())) { + final data = await faceCropCacheFile.readAsBytes(); + faceCropCache.put(face.faceID, data); + return data; + } + if (!useFullFile) { + final Uint8List? cachedFaceThumbnail = + faceCropThumbnailCache.get(face.faceID); + if (cachedFaceThumbnail != null) { + return cachedFaceThumbnail; + } + } + EnteFile? fileForFaceCrop = file; + if (face.fileID != file.uploadedFileID!) { + fileForFaceCrop = + await FilesDB.instance.getAnyUploadedFile(face.fileID); + } + if (fileForFaceCrop == null) { + return null; + } + + late final Pool relevantResourcePool; + if (useFullFile) { + relevantResourcePool = poolFullFileFaceGenerations; + } else { + relevantResourcePool = poolThumbnailFaceGenerations; + } + final result = await relevantResourcePool.withResource( + () async => await getFaceCrops( + fileForFaceCrop!, + { + face.faceID: face.detection.box, + }, + useFullFile: useFullFile, + ), + ); + final Uint8List? computedCrop = result?[face.faceID]; + if (computedCrop != null) { + if (useFullFile) { + faceCropCache.put(face.faceID, computedCrop); + faceCropCacheFile.writeAsBytes(computedCrop).ignore(); + } else { + faceCropThumbnailCache.put(face.faceID, computedCrop); + } + } + return computedCrop; + } catch (e, s) { + log( + "Error getting cover face for cluster $clusterID", + error: e, + stackTrace: s, + ); + return null; + } + } +} diff --git a/mobile/lib/ui/viewer/search/result/search_result_widget.dart b/mobile/lib/ui/viewer/search/result/search_result_widget.dart index 5564af7c9c..fbd77531a8 100644 --- a/mobile/lib/ui/viewer/search/result/search_result_widget.dart +++ b/mobile/lib/ui/viewer/search/result/search_result_widget.dart @@ -13,12 +13,14 @@ class SearchResultWidget extends StatelessWidget { final SearchResult searchResult; final Future? resultCount; final Function? onResultTap; + final Map? params; const SearchResultWidget( this.searchResult, { Key? key, this.resultCount, this.onResultTap, + this.params, }) : super(key: key); @override @@ -42,6 +44,7 @@ class SearchResultWidget extends StatelessWidget { SearchThumbnailWidget( searchResult.previewThumbnail(), heroTagPrefix, + searchResult: searchResult, ), const SizedBox(width: 12), Padding( @@ -143,6 +146,8 @@ class SearchResultWidget extends StatelessWidget { return "Magic"; case ResultType.shared: return "Shared"; + case ResultType.faces: + return "Person"; default: return type.name.toUpperCase(); } diff --git a/mobile/lib/ui/viewer/search/result/search_section_all_page.dart b/mobile/lib/ui/viewer/search/result/search_section_all_page.dart index 59761009af..17dea1f84f 100644 --- a/mobile/lib/ui/viewer/search/result/search_section_all_page.dart +++ b/mobile/lib/ui/viewer/search/result/search_section_all_page.dart @@ -1,5 +1,6 @@ import "dart:async"; +import "package:collection/collection.dart"; import "package:flutter/material.dart"; import "package:flutter_animate/flutter_animate.dart"; import "package:photos/events/event.dart"; @@ -109,7 +110,12 @@ class _SearchSectionAllPageState extends State { builder: (context, snapshot) { if (snapshot.hasData) { List sectionResults = snapshot.data!; - sectionResults.sort((a, b) => a.name().compareTo(b.name())); + if (widget.sectionType.sortByName) { + sectionResults.sort( + (a, b) => + compareAsciiLowerCaseNatural(b.name(), a.name()), + ); + } if (widget.sectionType == SectionType.location) { final result = sectionResults.splitMatch( (e) => e.type() == ResultType.location, diff --git a/mobile/lib/ui/viewer/search/result/search_thumbnail_widget.dart b/mobile/lib/ui/viewer/search/result/search_thumbnail_widget.dart index 13b303fecc..514c65b996 100644 --- a/mobile/lib/ui/viewer/search/result/search_thumbnail_widget.dart +++ b/mobile/lib/ui/viewer/search/result/search_thumbnail_widget.dart @@ -1,15 +1,22 @@ import 'package:flutter/widgets.dart'; import 'package:photos/models/file/file.dart'; +import "package:photos/models/search/generic_search_result.dart"; +import "package:photos/models/search/search_constants.dart"; +import "package:photos/models/search/search_result.dart"; +import "package:photos/models/search/search_types.dart"; import 'package:photos/ui/viewer/file/no_thumbnail_widget.dart'; import 'package:photos/ui/viewer/file/thumbnail_widget.dart'; +import 'package:photos/ui/viewer/search/result/person_face_widget.dart'; class SearchThumbnailWidget extends StatelessWidget { final EnteFile? file; + final SearchResult? searchResult; final String tagPrefix; const SearchThumbnailWidget( this.file, this.tagPrefix, { + this.searchResult, Key? key, }) : super(key: key); @@ -23,9 +30,18 @@ class SearchThumbnailWidget extends StatelessWidget { child: ClipRRect( borderRadius: const BorderRadius.horizontal(left: Radius.circular(4)), child: file != null - ? ThumbnailWidget( - file!, - ) + ? (searchResult != null && + searchResult!.type() == ResultType.faces) + ? PersonFaceWidget( + file!, + personId: (searchResult as GenericSearchResult) + .params[kPersonParamID], + clusterID: (searchResult as GenericSearchResult) + .params[kClusterParamId], + ) + : ThumbnailWidget( + file!, + ) : const NoThumbnailWidget( addBorder: false, ), diff --git a/mobile/lib/ui/viewer/search/result/searchable_item.dart b/mobile/lib/ui/viewer/search/result/searchable_item.dart index 1124d925e2..f8e2ed1acb 100644 --- a/mobile/lib/ui/viewer/search/result/searchable_item.dart +++ b/mobile/lib/ui/viewer/search/result/searchable_item.dart @@ -30,6 +30,8 @@ class SearchableItemWidget extends StatelessWidget { final heroTagPrefix = additionalPrefix + searchResult.heroTag(); final textTheme = getEnteTextTheme(context); final colorScheme = getEnteColorScheme(context); + final bool isCluster = (searchResult.type() == ResultType.faces && + int.tryParse(searchResult.name()) != null); return GestureDetector( onTap: () { @@ -66,6 +68,7 @@ class SearchableItemWidget extends StatelessWidget { child: SearchThumbnailWidget( searchResult.previewThumbnail(), heroTagPrefix, + searchResult: searchResult, ), ), const SizedBox(width: 12), @@ -75,14 +78,16 @@ class SearchableItemWidget extends StatelessWidget { child: Column( crossAxisAlignment: CrossAxisAlignment.start, children: [ - Text( - searchResult.name(), - style: searchResult.type() == - ResultType.locationSuggestion - ? textTheme.bodyFaint - : textTheme.body, - overflow: TextOverflow.ellipsis, - ), + isCluster + ? const SizedBox.shrink() + : Text( + searchResult.name(), + style: searchResult.type() == + ResultType.locationSuggestion + ? textTheme.bodyFaint + : textTheme.body, + overflow: TextOverflow.ellipsis, + ), const SizedBox( height: 2, ), diff --git a/mobile/lib/ui/viewer/search/search_widget.dart b/mobile/lib/ui/viewer/search/search_widget.dart index 1c6c7b693f..c917d60e91 100644 --- a/mobile/lib/ui/viewer/search/search_widget.dart +++ b/mobile/lib/ui/viewer/search/search_widget.dart @@ -2,10 +2,12 @@ import "dart:async"; import "package:flutter/material.dart"; import "package:flutter/scheduler.dart"; +import "package:logging/logging.dart"; import "package:photos/core/event_bus.dart"; import "package:photos/events/clear_and_unfocus_search_bar_event.dart"; import "package:photos/events/tab_changed_event.dart"; import "package:photos/generated/l10n.dart"; +import "package:photos/models/search/generic_search_result.dart"; import "package:photos/models/search/index_of_indexed_stack.dart"; import "package:photos/models/search/search_result.dart"; import "package:photos/services/search_service.dart"; @@ -41,6 +43,7 @@ class SearchWidgetState extends State { TextEditingController textController = TextEditingController(); late final StreamSubscription _clearAndUnfocusSearchBar; + late final Logger _logger = Logger("SearchWidgetState"); @override void initState() { @@ -200,7 +203,7 @@ class SearchWidgetState extends State { String query, ) { int resultCount = 0; - final maxResultCount = _isYearValid(query) ? 11 : 10; + final maxResultCount = _isYearValid(query) ? 13 : 12; final streamController = StreamController>(); if (query.isEmpty) { @@ -215,6 +218,11 @@ class SearchWidgetState extends State { if (resultCount == maxResultCount) { streamController.close(); } + if (resultCount > maxResultCount) { + _logger.warning( + "More results than expected. Expected: $maxResultCount, actual: $resultCount", + ); + } } if (_isYearValid(query)) { @@ -252,6 +260,17 @@ class SearchWidgetState extends State { onResultsReceived(locationResult); }, ); + _searchService.getAllFace(null).then( + (locationResult) { + final List filteredResults = []; + for (final result in locationResult) { + if (result.name().toLowerCase().contains(query.toLowerCase())) { + filteredResults.add(result); + } + } + onResultsReceived(filteredResults); + }, + ); _searchService.getCollectionSearchResults(query).then( (collectionResults) { diff --git a/mobile/lib/ui/viewer/search_tab/people_section.dart b/mobile/lib/ui/viewer/search_tab/people_section.dart new file mode 100644 index 0000000000..13e2f8a813 --- /dev/null +++ b/mobile/lib/ui/viewer/search_tab/people_section.dart @@ -0,0 +1,329 @@ +import "dart:async"; + +import "package:collection/collection.dart"; +import "package:flutter/material.dart"; +import "package:photos/core/constants.dart"; +import "package:photos/events/event.dart"; +import "package:photos/face/model/person.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/models/search/album_search_result.dart"; +import "package:photos/models/search/generic_search_result.dart"; +import "package:photos/models/search/recent_searches.dart"; +import "package:photos/models/search/search_constants.dart"; +import "package:photos/models/search/search_result.dart"; +import "package:photos/models/search/search_types.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/settings/machine_learning_settings_page.dart"; +import "package:photos/ui/viewer/file/no_thumbnail_widget.dart"; +import "package:photos/ui/viewer/file/thumbnail_widget.dart"; +import "package:photos/ui/viewer/people/add_person_action_sheet.dart"; +import "package:photos/ui/viewer/people/people_page.dart"; +import 'package:photos/ui/viewer/search/result/person_face_widget.dart'; +import "package:photos/ui/viewer/search/result/search_result_page.dart"; +import 'package:photos/ui/viewer/search/result/search_section_all_page.dart'; +import "package:photos/ui/viewer/search/search_section_cta.dart"; +import "package:photos/utils/navigation_util.dart"; + +class PeopleSection extends StatefulWidget { + final SectionType sectionType = SectionType.face; + final List examples; + final int limit; + + const PeopleSection({ + Key? key, + required this.examples, + this.limit = 7, + }) : super(key: key); + + @override + State createState() => _PeopleSectionState(); +} + +class _PeopleSectionState extends State { + late List _examples; + final streamSubscriptions = []; + + @override + void initState() { + super.initState(); + _examples = widget.examples; + + final streamsToListenTo = widget.sectionType.sectionUpdateEvents(); + for (Stream stream in streamsToListenTo) { + streamSubscriptions.add( + stream.listen((event) async { + _examples = await widget.sectionType.getData( + context, + limit: kSearchSectionLimit, + ); + setState(() {}); + }), + ); + } + } + + @override + void dispose() { + for (var subscriptions in streamSubscriptions) { + subscriptions.cancel(); + } + super.dispose(); + } + + @override + void didUpdateWidget(covariant PeopleSection oldWidget) { + super.didUpdateWidget(oldWidget); + _examples = widget.examples; + } + + @override + Widget build(BuildContext context) { + debugPrint("Building section for ${widget.sectionType.name}"); + final shouldShowMore = _examples.length >= widget.limit - 1; + final textTheme = getEnteTextTheme(context); + return _examples.isNotEmpty + ? GestureDetector( + behavior: HitTestBehavior.opaque, + onTap: () { + if (shouldShowMore) { + routeToPage( + context, + SearchSectionAllPage( + sectionType: widget.sectionType, + ), + ); + } + }, + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + Padding( + padding: const EdgeInsets.all(12), + child: Text( + widget.sectionType.sectionTitle(context), + style: textTheme.largeBold, + ), + ), + shouldShowMore + ? Padding( + padding: const EdgeInsets.all(12), + child: Icon( + Icons.chevron_right_outlined, + color: getEnteColorScheme(context).strokeMuted, + ), + ) + : const SizedBox.shrink(), + ], + ), + const SizedBox(height: 2), + SearchExampleRow(_examples, widget.sectionType), + ], + ), + ) + : GestureDetector( + behavior: HitTestBehavior.opaque, + onTap: () { + routeToPage( + context, + const MachineLearningSettingsPage(), + ); + }, + child: Padding( + padding: const EdgeInsets.only(left: 16, right: 8), + child: Row( + children: [ + Expanded( + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 12), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + widget.sectionType.sectionTitle(context), + style: textTheme.largeBold, + ), + const SizedBox(height: 24), + Text( + widget.sectionType.getEmptyStateText(context), + style: textTheme.smallMuted, + ), + ], + ), + ), + ), + const SizedBox(width: 8), + SearchSectionEmptyCTAIcon(widget.sectionType), + ], + ), + ), + ); + } +} + +class SearchExampleRow extends StatelessWidget { + final SectionType sectionType; + final List examples; + + const SearchExampleRow(this.examples, this.sectionType, {super.key}); + + @override + Widget build(BuildContext context) { + //Cannot use listView.builder here + final scrollableExamples = []; + examples.forEachIndexed((index, element) { + scrollableExamples.add( + SearchExample( + searchResult: examples.elementAt(index), + ), + ); + }); + return SizedBox( + child: SingleChildScrollView( + physics: const BouncingScrollPhysics(), + scrollDirection: Axis.horizontal, + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: scrollableExamples, + ), + ), + ); + } +} + +class SearchExample extends StatelessWidget { + final SearchResult searchResult; + const SearchExample({required this.searchResult, super.key}); + + @override + Widget build(BuildContext context) { + final textScaleFactor = MediaQuery.textScaleFactorOf(context); + final bool isCluster = (searchResult.type() == ResultType.faces && + int.tryParse(searchResult.name()) != null); + late final double width; + if (textScaleFactor <= 1.0) { + width = 85.0; + } else { + width = 85.0 + ((textScaleFactor - 1.0) * 64); + } + final heroTag = + searchResult.heroTag() + (searchResult.previewThumbnail()?.tag ?? ""); + return GestureDetector( + onTap: () { + RecentSearches().add(searchResult.name()); + + if (searchResult is GenericSearchResult) { + final genericSearchResult = searchResult as GenericSearchResult; + if (genericSearchResult.onResultTap != null) { + genericSearchResult.onResultTap!(context); + } else { + routeToPage( + context, + SearchResultPage(searchResult), + ); + } + } else if (searchResult is AlbumSearchResult) { + final albumSearchResult = searchResult as GenericSearchResult; + routeToPage( + context, + SearchResultPage( + albumSearchResult, + tagPrefix: albumSearchResult.heroTag(), + ), + ); + } + }, + child: SizedBox( + width: width, + child: Padding( + padding: const EdgeInsets.only(left: 6, right: 6, top: 8), + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + SizedBox( + width: 64, + height: 64, + child: searchResult.previewThumbnail() != null + ? Hero( + tag: heroTag, + child: ClipRRect( + borderRadius: + const BorderRadius.all(Radius.elliptical(16, 12)), + child: searchResult.type() != ResultType.faces + ? ThumbnailWidget( + searchResult.previewThumbnail()!, + shouldShowSyncStatus: false, + ) + : FaceSearchResult(searchResult, heroTag), + ), + ) + : const ClipRRect( + borderRadius: + BorderRadius.all(Radius.elliptical(16, 12)), + child: NoThumbnailWidget( + addBorder: false, + ), + ), + ), + isCluster + ? GestureDetector( + behavior: HitTestBehavior.translucent, + onTap: () async { + final result = await showAssignPersonAction( + context, + clusterID: int.parse(searchResult.name()), + ); + if (result != null && + result is (PersonEntity, EnteFile)) { + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result.$1)); + } else if (result != null && result is PersonEntity) { + // ignore: unawaited_futures + routeToPage(context, PeoplePage(person: result)); + } + }, + child: Padding( + padding: const EdgeInsets.only(top: 10, bottom: 16), + child: Text( + "Add name", + maxLines: 1, + textAlign: TextAlign.center, + overflow: TextOverflow.ellipsis, + style: getEnteTextTheme(context).mini, + ), + ), + ) + : Padding( + padding: const EdgeInsets.only(top: 10, bottom: 16), + child: Text( + searchResult.name(), + maxLines: 2, + textAlign: TextAlign.center, + overflow: TextOverflow.ellipsis, + style: getEnteTextTheme(context).mini, + ), + ), + ], + ), + ), + ), + ); + } +} + +class FaceSearchResult extends StatelessWidget { + final SearchResult searchResult; + final String heroTagPrefix; + const FaceSearchResult(this.searchResult, this.heroTagPrefix, {super.key}); + + @override + Widget build(BuildContext context) { + return PersonFaceWidget( + searchResult.previewThumbnail()!, + personId: (searchResult as GenericSearchResult).params[kPersonParamID], + clusterID: (searchResult as GenericSearchResult).params[kClusterParamId], + ); + } +} diff --git a/mobile/lib/ui/viewer/search_tab/search_tab.dart b/mobile/lib/ui/viewer/search_tab/search_tab.dart index bfb35600a5..46dcfda036 100644 --- a/mobile/lib/ui/viewer/search_tab/search_tab.dart +++ b/mobile/lib/ui/viewer/search_tab/search_tab.dart @@ -1,9 +1,12 @@ import "package:fade_indexed_stack/fade_indexed_stack.dart"; +import "package:flutter/foundation.dart"; import "package:flutter/material.dart"; import "package:flutter_animate/flutter_animate.dart"; +import "package:logging/logging.dart"; import "package:photos/models/search/album_search_result.dart"; import "package:photos/models/search/generic_search_result.dart"; import "package:photos/models/search/index_of_indexed_stack.dart"; +import "package:photos/models/search/search_result.dart"; import "package:photos/models/search/search_types.dart"; import "package:photos/states/all_sections_examples_state.dart"; import "package:photos/ui/common/loading_widget.dart"; @@ -16,6 +19,8 @@ import "package:photos/ui/viewer/search_tab/descriptions_section.dart"; import "package:photos/ui/viewer/search_tab/file_type_section.dart"; import "package:photos/ui/viewer/search_tab/locations_section.dart"; import "package:photos/ui/viewer/search_tab/moments_section.dart"; +import "package:photos/ui/viewer/search_tab/people_section.dart"; +import "package:photos/utils/local_settings.dart"; class SearchTab extends StatefulWidget { const SearchTab({Key? key}) : super(key: key); @@ -73,17 +78,17 @@ class AllSearchSections extends StatefulWidget { } class _AllSearchSectionsState extends State { + final Logger _logger = Logger('_AllSearchSectionsState'); @override Widget build(BuildContext context) { final searchTypes = SectionType.values.toList(growable: true); - // remove face and content sectionType - searchTypes.remove(SectionType.face); searchTypes.remove(SectionType.content); + return Padding( padding: const EdgeInsets.only(top: 8), child: Stack( children: [ - FutureBuilder( + FutureBuilder>>( future: InheritedAllSectionsExamples.of(context) .allSectionsExamplesFuture, builder: (context, snapshot) { @@ -94,6 +99,14 @@ class _AllSearchSectionsState extends State { child: SearchTabEmptyState(), ); } + if (snapshot.data!.length != searchTypes.length) { + return Padding( + padding: const EdgeInsets.only(bottom: 72), + child: Text( + 'Sections length mismatch: ${snapshot.data!.length} != ${searchTypes.length}', + ), + ); + } return ListView.builder( padding: const EdgeInsets.only(bottom: 180), physics: const BouncingScrollPhysics(), @@ -101,7 +114,16 @@ class _AllSearchSectionsState extends State { // ignore: body_might_complete_normally_nullable itemBuilder: (context, index) { switch (searchTypes[index]) { + case SectionType.face: + if (!LocalSettings.instance.isFaceIndexingEnabled) { + return const SizedBox.shrink(); + } + return PeopleSection( + examples: snapshot.data!.elementAt(index) + as List, + ); case SectionType.album: + // return const SizedBox.shrink(); return AlbumsSection( snapshot.data!.elementAt(index) as List, @@ -150,6 +172,17 @@ class _AllSearchSectionsState extends State { curve: Curves.easeOut, ); } else if (snapshot.hasError) { + _logger.severe( + 'Failed to load sections: ', + snapshot.error, + snapshot.stackTrace, + ); + if (kDebugMode) { + return Padding( + padding: const EdgeInsets.only(bottom: 72), + child: Text('Error: ${snapshot.error}'), + ); + } //Errors are handled and this else if condition will be false always //is the understanding. return const Padding( diff --git a/mobile/lib/utils/debug_ml_export_data.dart b/mobile/lib/utils/debug_ml_export_data.dart new file mode 100644 index 0000000000..f7a5e96460 --- /dev/null +++ b/mobile/lib/utils/debug_ml_export_data.dart @@ -0,0 +1,40 @@ +import "dart:convert"; +import "dart:developer" show log; +import "dart:io"; + +import "package:path_provider/path_provider.dart"; + +Future encodeAndSaveData( + dynamic nestedData, + String fileName, [ + String? service, +]) async { + // Convert map keys to strings if nestedData is a map + final dataToEncode = nestedData is Map + ? nestedData.map((key, value) => MapEntry(key.toString(), value)) + : nestedData; + // Step 1: Serialize Your Data + final String jsonData = jsonEncode(dataToEncode); + + // Step 2: Encode the JSON String to Base64 + // final String base64String = base64Encode(utf8.encode(jsonData)); + + // Step 3 & 4: Write the Base64 String to a File and Execute the Function + try { + final File file = await _writeStringToFile(jsonData, fileName); + // Success, handle the file, e.g., print the file path + log('[$service]: File saved at ${file.path}'); + } catch (e) { + // If an error occurs, handle it. + log('[$service]: Error saving file: $e'); + } +} + +Future _writeStringToFile( + String dataString, + String fileName, +) async { + final directory = await getExternalStorageDirectory(); + final file = File('${directory!.path}/$fileName.json'); + return file.writeAsString(dataString); +} diff --git a/mobile/lib/utils/dialog_util.dart b/mobile/lib/utils/dialog_util.dart index f6e9eb021c..d57a6990a8 100644 --- a/mobile/lib/utils/dialog_util.dart +++ b/mobile/lib/utils/dialog_util.dart @@ -109,7 +109,11 @@ String parseErrorForUI( errorInfo = "Reason: " + dioError.type.toString(); } } else { - errorInfo = error.toString().split('Source stack')[0]; + if (kDebugMode) { + errorInfo = error.toString(); + } else { + errorInfo = error.toString().split('Source stack')[0]; + } } if (errorInfo.isNotEmpty) { return "$genericError\n\n$errorInfo"; diff --git a/mobile/lib/utils/face/face_box_crop.dart b/mobile/lib/utils/face/face_box_crop.dart new file mode 100644 index 0000000000..281c0ef495 --- /dev/null +++ b/mobile/lib/utils/face/face_box_crop.dart @@ -0,0 +1,56 @@ +import "dart:io" show File; + +import "package:flutter/foundation.dart"; +import "package:photos/core/cache/lru_map.dart"; +import "package:photos/face/model/box.dart"; +import "package:photos/models/file/file.dart"; +import "package:photos/models/file/file_type.dart"; +// import "package:photos/utils/face/face_util.dart"; +import "package:photos/utils/file_util.dart"; +import "package:photos/utils/image_ml_isolate.dart"; +import "package:photos/utils/thumbnail_util.dart"; +import "package:pool/pool.dart"; + +final LRUMap faceCropCache = LRUMap(1000); +final LRUMap faceCropThumbnailCache = LRUMap(1000); +final poolFullFileFaceGenerations = + Pool(20, timeout: const Duration(seconds: 15)); +final poolThumbnailFaceGenerations = + Pool(100, timeout: const Duration(seconds: 15)); +Future?> getFaceCrops( + EnteFile file, + Map faceBoxeMap, { + bool useFullFile = true, +}) async { + late String? imagePath; + if (useFullFile && file.fileType != FileType.video) { + final File? ioFile = await getFile(file); + if (ioFile == null) { + return null; + } + imagePath = ioFile.path; + } else { + final thumbnail = await getThumbnailForUploadedFile(file); + if (thumbnail == null) { + return null; + } + imagePath = thumbnail.path; + } + final List faceIds = []; + final List faceBoxes = []; + for (final e in faceBoxeMap.entries) { + faceIds.add(e.key); + faceBoxes.add(e.value); + } + final List faceCrop = + await ImageMlIsolate.instance.generateFaceThumbnailsForImageUsingCanvas( + // await generateJpgFaceThumbnails( + imagePath, + faceBoxes, + ); + final Map result = {}; + for (int i = 0; i < faceIds.length; i++) { + result[faceIds[i]] = faceCrop[i]; + } + return result; +} diff --git a/mobile/lib/utils/face/face_util.dart b/mobile/lib/utils/face/face_util.dart new file mode 100644 index 0000000000..56dc8f3bf0 --- /dev/null +++ b/mobile/lib/utils/face/face_util.dart @@ -0,0 +1,175 @@ +import "dart:math"; +import "dart:typed_data"; + +import "package:computer/computer.dart"; +import "package:flutter_image_compress/flutter_image_compress.dart"; +import "package:image/image.dart" as img; +import "package:logging/logging.dart"; +import "package:photos/face/model/box.dart"; + +/// Bounding box of a face. +/// +/// [xMin] and [yMin] are the coordinates of the top left corner of the box, and +/// [width] and [height] are the width and height of the box. +/// +/// One unit is equal to one pixel in the original image. +class FaceBoxImage { + final int xMin; + final int yMin; + final int width; + final int height; + + FaceBoxImage({ + required this.xMin, + required this.yMin, + required this.width, + required this.height, + }); +} + +final _logger = Logger("FaceUtil"); +final _computer = Computer.shared(); +const _faceImageBufferFactor = 0.2; + +///Convert img.Image to ui.Image and use RawImage to display. +Future> generateImgFaceThumbnails( + String imagePath, + List faceBoxes, +) async { + final faceThumbnails = []; + + final image = await decodeToImgImage(imagePath); + + for (FaceBox faceBox in faceBoxes) { + final croppedImage = cropFaceBoxFromImage(image, faceBox); + faceThumbnails.add(croppedImage); + } + + return faceThumbnails; +} + +Future> generateJpgFaceThumbnails( + String imagePath, + List faceBoxes, +) async { + final image = await decodeToImgImage(imagePath); + final croppedImages = []; + for (FaceBox faceBox in faceBoxes) { + final croppedImage = cropFaceBoxFromImage(image, faceBox); + croppedImages.add(croppedImage); + } + + return await _computer + .compute(_encodeImagesToJpg, param: {"images": croppedImages}); +} + +Future decodeToImgImage(String imagePath) async { + img.Image? image = + await _computer.compute(_decodeImageFile, param: {"filePath": imagePath}); + + if (image == null) { + _logger.info( + "Failed to decode image. Compressing to jpg and decoding", + ); + final compressedJPGImage = + await FlutterImageCompress.compressWithFile(imagePath); + image = await _computer.compute( + _decodeJpg, + param: {"image": compressedJPGImage}, + ); + + if (image == null) { + throw Exception("Failed to decode image"); + } else { + return image; + } + } else { + return image; + } +} + +/// Returns an Image from 'package:image/image.dart' +img.Image cropFaceBoxFromImage(img.Image image, FaceBox faceBox) { + final squareFaceBox = _getSquareFaceBoxImage(image, faceBox); + final squareFaceBoxWithBuffer = + _addBufferAroundFaceBox(squareFaceBox, _faceImageBufferFactor); + return img.copyCrop( + image, + x: squareFaceBoxWithBuffer.xMin, + y: squareFaceBoxWithBuffer.yMin, + width: squareFaceBoxWithBuffer.width, + height: squareFaceBoxWithBuffer.height, + antialias: false, + ); +} + +/// Returns a square face box image from the original image with +/// side length equal to the maximum of the width and height of the face box in +/// the OG image. +FaceBoxImage _getSquareFaceBoxImage(img.Image image, FaceBox faceBox) { + final width = (image.width * faceBox.width).round(); + final height = (image.height * faceBox.height).round(); + final side = max(width, height); + final xImage = (image.width * faceBox.x).round(); + final yImage = (image.height * faceBox.y).round(); + + if (height >= width) { + final xImageAdj = (xImage - (height - width) / 2).round(); + return FaceBoxImage( + xMin: xImageAdj, + yMin: yImage, + width: side, + height: side, + ); + } else { + final yImageAdj = (yImage - (width - height) / 2).round(); + return FaceBoxImage( + xMin: xImage, + yMin: yImageAdj, + width: side, + height: side, + ); + } +} + +///To add some buffer around the face box so that the face isn't cropped +///too close to the face. +FaceBoxImage _addBufferAroundFaceBox( + FaceBoxImage faceBoxImage, + double bufferFactor, +) { + final heightBuffer = faceBoxImage.height * bufferFactor; + final widthBuffer = faceBoxImage.width * bufferFactor; + final xMinWithBuffer = faceBoxImage.xMin - widthBuffer; + final yMinWithBuffer = faceBoxImage.yMin - heightBuffer; + final widthWithBuffer = faceBoxImage.width + 2 * widthBuffer; + final heightWithBuffer = faceBoxImage.height + 2 * heightBuffer; + //Do not add buffer if the top left edge of the image is out of bounds + //after adding the buffer. + if (xMinWithBuffer < 0 || yMinWithBuffer < 0) { + return faceBoxImage; + } + //Another similar case that can be handled is when the bottom right edge + //of the image is out of bounds after adding the buffer. But the + //the visual difference is not as significant as when the top left edge + //is out of bounds, so we are not handling that case. + return FaceBoxImage( + xMin: xMinWithBuffer.round(), + yMin: yMinWithBuffer.round(), + width: widthWithBuffer.round(), + height: heightWithBuffer.round(), + ); +} + +List _encodeImagesToJpg(Map args) { + final images = args["images"] as List; + return images.map((img.Image image) => img.encodeJpg(image)).toList(); +} + +Future _decodeImageFile(Map args) async { + return await img.decodeImageFile(args["filePath"]); +} + +img.Image? _decodeJpg(Map args) { + return img.decodeJpg(args["image"])!; +} diff --git a/mobile/lib/utils/file_download_util.dart b/mobile/lib/utils/file_download_util.dart index a8847e3fdb..6db6ecbe09 100644 --- a/mobile/lib/utils/file_download_util.dart +++ b/mobile/lib/utils/file_download_util.dart @@ -47,9 +47,9 @@ Future downloadAndDecrypt( ), onReceiveProgress: (a, b) { if (kDebugMode && a >= 0 && b >= 0) { - _logger.fine( - "$logPrefix download progress: ${formatBytes(a)} / ${formatBytes(b)}", - ); + // _logger.fine( + // "$logPrefix download progress: ${formatBytes(a)} / ${formatBytes(b)}", + // ); } progressCallback?.call(a, b); }, @@ -89,7 +89,8 @@ Future downloadAndDecrypt( getFileKey(file), ); fakeProgress?.stop(); - _logger.info('$logPrefix decryption completed'); + _logger + .info('$logPrefix decryption completed (genID ${file.generatedID})'); } catch (e, s) { fakeProgress?.stop(); _logger.severe("Critical: $logPrefix failed to decrypt", e, s); diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart index bcd5bb1219..9b1b37fb4d 100644 --- a/mobile/lib/utils/file_uploader.dart +++ b/mobile/lib/utils/file_uploader.dart @@ -2,10 +2,9 @@ import 'dart:async'; import 'dart:collection'; import 'dart:convert'; import 'dart:io'; -import 'dart:math'; +import 'dart:math' as math; import 'package:collection/collection.dart'; -import 'package:connectivity_plus/connectivity_plus.dart'; import 'package:dio/dio.dart'; import 'package:flutter/foundation.dart'; import 'package:logging/logging.dart'; @@ -28,6 +27,8 @@ import 'package:photos/models/file/file_type.dart'; import "package:photos/models/metadata/file_magic.dart"; import 'package:photos/models/upload_url.dart'; import "package:photos/models/user_details.dart"; +import "package:photos/module/upload/service/multipart.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import "package:photos/services/file_magic_service.dart"; import 'package:photos/services/local_sync_service.dart'; @@ -37,7 +38,7 @@ import 'package:photos/utils/crypto_util.dart'; import 'package:photos/utils/file_download_util.dart'; import 'package:photos/utils/file_uploader_util.dart'; import "package:photos/utils/file_util.dart"; -import "package:photos/utils/multipart_upload_util.dart"; +import "package:photos/utils/network_util.dart"; import 'package:shared_preferences/shared_preferences.dart'; import 'package:tuple/tuple.dart'; import "package:uuid/uuid.dart"; @@ -51,7 +52,7 @@ class FileUploader { static const kBlockedUploadsPollFrequency = Duration(seconds: 2); static const kFileUploadTimeout = Duration(minutes: 50); static const k20MBStorageBuffer = 20 * 1024 * 1024; - static const kUploadTempPrefix = "upload_file_"; + static const _lastStaleFileCleanupTime = "lastStaleFileCleanupTime"; final _logger = Logger("FileUploader"); final _dio = NetworkClient.instance.getDio(); @@ -79,6 +80,7 @@ class FileUploader { // cases, we don't want to clear the stale upload files. See #removeStaleFiles // as it can result in clearing files which are still being force uploaded. bool _hasInitiatedForceUpload = false; + late MultiPartUploader _multiPartUploader; FileUploader._privateConstructor() { Bus.instance.on().listen((event) { @@ -114,6 +116,17 @@ class FileUploader { // ignore: unawaited_futures _pollBackgroundUploadStatus(); } + _multiPartUploader = MultiPartUploader( + _enteDio, + _dio, + UploadLocksDB.instance, + flagService, + ); + if (currentTime - (_prefs.getInt(_lastStaleFileCleanupTime) ?? 0) > + tempDirCleanUpInterval) { + await removeStaleFiles(); + await _prefs.setInt(_lastStaleFileCleanupTime, currentTime); + } Bus.instance.on().listen((event) { if (event.type == EventType.deletedFromDevice || event.type == EventType.deletedFromEverywhere) { @@ -309,13 +322,28 @@ class FileUploader { // ends with .encrypted. Fetch files in async manner final files = await Directory(dir).list().toList(); final filesToDelete = files.where((file) { - return file.path.contains(kUploadTempPrefix) && + return file.path.contains(uploadTempFilePrefix) && file.path.contains(".encrypted"); }); if (filesToDelete.isNotEmpty) { - _logger.info('cleaning up state files ${filesToDelete.length}'); + _logger.info('Deleting ${filesToDelete.length} stale upload files '); + final fileNameToLastAttempt = + await _uploadLocks.getFileNameToLastAttemptedAtMap(); for (final file in filesToDelete) { - await file.delete(); + final fileName = file.path.split('/').last; + final lastAttemptTime = fileNameToLastAttempt[fileName] != null + ? DateTime.fromMillisecondsSinceEpoch( + fileNameToLastAttempt[fileName]!, + ) + : null; + if (lastAttemptTime == null || + DateTime.now().difference(lastAttemptTime).inDays > 1) { + await file.delete(); + } else { + _logger.info( + 'Skipping file $fileName as it was attempted recently on $lastAttemptTime', + ); + } } } @@ -354,18 +382,7 @@ class FileUploader { if (isForceUpload) { return; } - final List connections = - await (Connectivity().checkConnectivity()); - bool canUploadUnderCurrentNetworkConditions = true; - if (!Configuration.instance.shouldBackupOverMobileData()) { - if (connections.any((element) => element == ConnectivityResult.mobile)) { - canUploadUnderCurrentNetworkConditions = false; - } else { - _logger.info( - "mobileBackupDisabled, backing up with connections: ${connections.map((e) => e.name).toString()}", - ); - } - } + final canUploadUnderCurrentNetworkConditions = await canUseHighBandwidth(); if (!canUploadUnderCurrentNetworkConditions) { throw WiFiUnavailableError(); @@ -405,7 +422,7 @@ class FileUploader { (fileOnDisk.updationTime ?? -1) != -1 && (fileOnDisk.collectionID ?? -1) == collectionID; if (wasAlreadyUploaded) { - debugPrint("File is already uploaded ${fileOnDisk.tag}"); + _logger.info("File is already uploaded ${fileOnDisk.tag}"); return fileOnDisk; } } @@ -425,6 +442,7 @@ class FileUploader { } final String lockKey = file.localID!; + bool _isMultipartUpload = false; try { await _uploadLocks.acquireLock( @@ -438,12 +456,27 @@ class FileUploader { } final tempDirectory = Configuration.instance.getTempDirectory(); - final String uniqueID = const Uuid().v4().toString(); - final encryptedFilePath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_file.encrypted'; - final encryptedThumbnailPath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_thumb.encrypted'; MediaUploadData? mediaUploadData; + mediaUploadData = await getUploadDataFromEnteFile(file); + + final String? existingMultipartEncFileName = + mediaUploadData.hashData?.fileHash != null + ? await _uploadLocks.getEncryptedFileName( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; + bool multipartEntryExists = existingMultipartEncFileName != null; + + final String uniqueID = const Uuid().v4().toString(); + + final encryptedFilePath = multipartEntryExists + ? '$tempDirectory$existingMultipartEncFileName' + : '$tempDirectory$uploadTempFilePrefix${uniqueID}_file.encrypted'; + final encryptedThumbnailPath = + '$tempDirectory$uploadTempFilePrefix${uniqueID}_thumb.encrypted'; + var uploadCompleted = false; // This flag is used to decide whether to clear the iOS origin file cache // or not. @@ -457,13 +490,18 @@ class FileUploader { '${isUpdatedFile ? 're-upload' : 'upload'} of ${file.toString()}', ); - mediaUploadData = await getUploadDataFromEnteFile(file); - Uint8List? key; + EncryptionResult? multiPartFileEncResult = multipartEntryExists + ? await _multiPartUploader.getEncryptionResult( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; if (isUpdatedFile) { key = getFileKey(file); } else { - key = null; + key = multiPartFileEncResult?.key; // check if the file is already uploaded and can be mapped to existing // uploaded file. If map is found, it also returns the corresponding // mapped or update file entry. @@ -482,16 +520,40 @@ class FileUploader { } } - if (File(encryptedFilePath).existsSync()) { + final encryptedFileExists = File(encryptedFilePath).existsSync(); + + // If the multipart entry exists but the encrypted file doesn't, it means + // that we'll have to reupload as the nonce is lost + if (multipartEntryExists) { + final bool updateWithDiffKey = isUpdatedFile && + multiPartFileEncResult != null && + !listEquals(key, multiPartFileEncResult.key); + if (!encryptedFileExists || updateWithDiffKey) { + if (updateWithDiffKey) { + _logger.severe('multiPart update resumed with differentKey'); + } else { + _logger.warning( + 'multiPart EncryptedFile missing, discard multipart entry', + ); + } + await _uploadLocks.deleteMultipartTrack(lockKey); + multipartEntryExists = false; + multiPartFileEncResult = null; + } + } else if (encryptedFileExists) { + // otherwise just delete the file for singlepart upload await File(encryptedFilePath).delete(); } await _checkIfWithinStorageLimit(mediaUploadData.sourceFile!); final encryptedFile = File(encryptedFilePath); - final EncryptionResult fileAttributes = await CryptoUtil.encryptFile( - mediaUploadData.sourceFile!.path, - encryptedFilePath, - key: key, - ); + + final EncryptionResult fileAttributes = multiPartFileEncResult ?? + await CryptoUtil.encryptFile( + mediaUploadData.sourceFile!.path, + encryptedFilePath, + key: key, + ); + late final Uint8List? thumbnailData; if (mediaUploadData.thumbnail == null && file.fileType == FileType.video) { @@ -512,31 +574,63 @@ class FileUploader { await encryptedThumbnailFile .writeAsBytes(encryptedThumbnailData.encryptedData!); - final thumbnailUploadURL = await _getUploadURL(); - final String thumbnailObjectKey = - await _putFile(thumbnailUploadURL, encryptedThumbnailFile); - - // Calculate the number of parts for the file. Multiple part upload - // is only enabled for internal users and debug builds till it's battle tested. - final count = kDebugMode - ? await calculatePartCount( - await encryptedFile.length(), - ) - : 1; + // Calculate the number of parts for the file. + final count = await _multiPartUploader.calculatePartCount( + await encryptedFile.length(), + ); late String fileObjectKey; + late String thumbnailObjectKey; if (count <= 1) { + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); final fileUploadURL = await _getUploadURL(); fileObjectKey = await _putFile(fileUploadURL, encryptedFile); } else { - final fileUploadURLs = await getMultipartUploadURLs(count); - fileObjectKey = await putMultipartFile(fileUploadURLs, encryptedFile); + _isMultipartUpload = true; + _logger.finest( + "Init multipartUpload $multipartEntryExists, isUpdate $isUpdatedFile", + ); + if (multipartEntryExists) { + fileObjectKey = await _multiPartUploader.putExistingMultipartFile( + encryptedFile, + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ); + } else { + final fileUploadURLs = + await _multiPartUploader.getMultipartUploadURLs(count); + final encFileName = encryptedFile.path.split('/').last; + await _multiPartUploader.createTableEntry( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + fileUploadURLs, + encFileName, + await encryptedFile.length(), + fileAttributes.key!, + fileAttributes.header!, + ); + fileObjectKey = await _multiPartUploader.putMultipartFile( + fileUploadURLs, + encryptedFile, + ); + } + // in case of multipart, upload the thumbnail towards the end to avoid + // re-uploading the thumbnail in case of failure. + // In regular upload, always upload the thumbnail first to keep existing behaviour + // + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); } final metadata = await file.getMetadataForUpload(mediaUploadData); final encryptedMetadataResult = await CryptoUtil.encryptChaCha( - utf8.encode(jsonEncode(metadata)) as Uint8List, + utf8.encode(jsonEncode(metadata)), fileAttributes.key!, ); final fileDecryptionHeader = @@ -618,6 +712,8 @@ class FileUploader { } await FilesDB.instance.update(remoteFile); } + await UploadLocksDB.instance.deleteMultipartTrack(lockKey); + if (!_isBackground) { Bus.instance.fire( LocalPhotosUpdatedEvent( @@ -659,6 +755,7 @@ class FileUploader { encryptedFilePath, encryptedThumbnailPath, lockKey: lockKey, + isMultiPartUpload: _isMultipartUpload, ); } } @@ -803,6 +900,7 @@ class FileUploader { String encryptedFilePath, String encryptedThumbnailPath, { required String lockKey, + bool isMultiPartUpload = false, }) async { if (mediaUploadData != null && mediaUploadData.sourceFile != null) { // delete the file from app's internal cache if it was copied to app @@ -816,7 +914,14 @@ class FileUploader { } } if (File(encryptedFilePath).existsSync()) { - await File(encryptedFilePath).delete(); + if (isMultiPartUpload && !uploadCompleted) { + _logger.fine( + "skip delete for multipart encrypted file $encryptedFilePath", + ); + } else { + _logger.fine("deleting encrypted file $encryptedFilePath"); + await File(encryptedFilePath).delete(); + } } if (File(encryptedThumbnailPath).existsSync()) { await File(encryptedThumbnailPath).delete(); @@ -1039,7 +1144,7 @@ class FileUploader { if (_uploadURLs.isEmpty) { // the queue is empty, fetch at least for one file to handle force uploads // that are not in the queue. This is to also avoid - await fetchUploadURLs(max(_queue.length, 1)); + await fetchUploadURLs(math.max(_queue.length, 1)); } try { return _uploadURLs.removeFirst(); @@ -1061,7 +1166,7 @@ class FileUploader { final response = await _enteDio.get( "/files/upload-urls", queryParameters: { - "count": min(42, fileCount * 2), // m4gic number + "count": math.min(42, fileCount * 2), // m4gic number }, ); final urls = (response.data["urls"] as List) diff --git a/mobile/lib/utils/file_util.dart b/mobile/lib/utils/file_util.dart index 5c9dcede19..35240a3cc6 100644 --- a/mobile/lib/utils/file_util.dart +++ b/mobile/lib/utils/file_util.dart @@ -37,25 +37,30 @@ Future getFile( bool isOrigin = false, } // only relevant for live photos ) async { - if (file.isRemoteFile) { - return getFileFromServer(file, liveVideo: liveVideo); - } else { - final String key = file.tag + liveVideo.toString() + isOrigin.toString(); - final cachedFile = FileLruCache.get(key); - if (cachedFile == null) { - final diskFile = await _getLocalDiskFile( - file, - liveVideo: liveVideo, - isOrigin: isOrigin, - ); - // do not cache origin file for IOS as they are immediately deleted - // after usage - if (!(isOrigin && Platform.isIOS) && diskFile != null) { - FileLruCache.put(key, diskFile); + try { + if (file.isRemoteFile) { + return getFileFromServer(file, liveVideo: liveVideo); + } else { + final String key = file.tag + liveVideo.toString() + isOrigin.toString(); + final cachedFile = FileLruCache.get(key); + if (cachedFile == null) { + final diskFile = await _getLocalDiskFile( + file, + liveVideo: liveVideo, + isOrigin: isOrigin, + ); + // do not cache origin file for IOS as they are immediately deleted + // after usage + if (!(isOrigin && Platform.isIOS) && diskFile != null) { + FileLruCache.put(key, diskFile); + } + return diskFile; } - return diskFile; + return cachedFile; } - return cachedFile; + } catch (e, s) { + _logger.warning("Failed to get file", e, s); + return null; } } @@ -278,7 +283,9 @@ Future<_LivePhoto?> _downloadLivePhoto( if (imageFileCache != null && videoFileCache != null) { return _LivePhoto(imageFileCache, videoFileCache); } else { - debugPrint("Warning: Either image or video is missing from remoteLive"); + debugPrint( + "Warning: ${file.tag} either image ${imageFileCache == null} or video ${videoFileCache == null} is missing from remoteLive", + ); return null; } }).catchError((e) { diff --git a/mobile/lib/utils/image_ml_isolate.dart b/mobile/lib/utils/image_ml_isolate.dart new file mode 100644 index 0000000000..66de0c2558 --- /dev/null +++ b/mobile/lib/utils/image_ml_isolate.dart @@ -0,0 +1,562 @@ +import 'dart:async'; +import "dart:io" show File; +import 'dart:isolate'; +import 'dart:typed_data' show Float32List, Uint8List; +import 'dart:ui'; + +import "package:dart_ui_isolate/dart_ui_isolate.dart"; +import "package:logging/logging.dart"; +import "package:photos/face/model/box.dart"; +import "package:photos/face/model/dimension.dart"; +import 'package:photos/models/ml/ml_typedefs.dart'; +import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import "package:photos/utils/image_ml_util.dart"; +import "package:synchronized/synchronized.dart"; + +enum ImageOperation { + @Deprecated("No longer using BlazeFace`") + preprocessBlazeFace, + preprocessYoloOnnx, + preprocessFaceAlign, + preprocessMobileFaceNet, + preprocessMobileFaceNetOnnx, + generateFaceThumbnails, + generateFaceThumbnailsUsingCanvas, + cropAndPadFace, +} + +/// The isolate below uses functions from ["package:photos/utils/image_ml_util.dart"] to preprocess images for ML models. + +/// This class is responsible for all image operations needed for ML models. It runs in a separate isolate to avoid jank. +/// +/// It can be accessed through the singleton `ImageConversionIsolate.instance`. e.g. `ImageConversionIsolate.instance.convert(imageData)` +/// +/// IMPORTANT: Make sure to dispose of the isolate when you're done with it with `dispose()`, e.g. `ImageConversionIsolate.instance.dispose();` +class ImageMlIsolate { + // static const String debugName = 'ImageMlIsolate'; + + final _logger = Logger('ImageMlIsolate'); + + Timer? _inactivityTimer; + final Duration _inactivityDuration = const Duration(seconds: 60); + int _activeTasks = 0; + + final _initLock = Lock(); + final _functionLock = Lock(); + + late DartUiIsolate _isolate; + late ReceivePort _receivePort = ReceivePort(); + late SendPort _mainSendPort; + + bool isSpawned = false; + + // singleton pattern + ImageMlIsolate._privateConstructor(); + + /// Use this instance to access the ImageConversionIsolate service. Make sure to call `init()` before using it. + /// e.g. `await ImageConversionIsolate.instance.init();` + /// And kill the isolate when you're done with it with `dispose()`, e.g. `ImageConversionIsolate.instance.dispose();` + /// + /// Then you can use `convert()` to get the image, so `ImageConversionIsolate.instance.convert(imageData, imagePath: imagePath)` + static final ImageMlIsolate instance = ImageMlIsolate._privateConstructor(); + factory ImageMlIsolate() => instance; + + Future init() async { + return _initLock.synchronized(() async { + if (isSpawned) return; + + _receivePort = ReceivePort(); + + try { + _isolate = await DartUiIsolate.spawn( + _isolateMain, + _receivePort.sendPort, + ); + _mainSendPort = await _receivePort.first as SendPort; + isSpawned = true; + + _resetInactivityTimer(); + } catch (e) { + _logger.severe('Could not spawn isolate', e); + isSpawned = false; + } + }); + } + + Future ensureSpawned() async { + if (!isSpawned) { + await init(); + } + } + + @pragma('vm:entry-point') + static void _isolateMain(SendPort mainSendPort) async { + final receivePort = ReceivePort(); + mainSendPort.send(receivePort.sendPort); + + receivePort.listen((message) async { + final functionIndex = message[0] as int; + final function = ImageOperation.values[functionIndex]; + final args = message[1] as Map; + final sendPort = message[2] as SendPort; + + try { + switch (function) { + case ImageOperation.preprocessBlazeFace: + final imageData = args['imageData'] as Uint8List; + final normalize = args['normalize'] as bool; + final int normalization = normalize ? 2 : -1; + final requiredWidth = args['requiredWidth'] as int; + final requiredHeight = args['requiredHeight'] as int; + final qualityIndex = args['quality'] as int; + final maintainAspectRatio = args['maintainAspectRatio'] as bool; + final quality = FilterQuality.values[qualityIndex]; + final (result, originalSize, newSize) = + await preprocessImageToMatrix( + imageData, + normalization: normalization, + requiredWidth: requiredWidth, + requiredHeight: requiredHeight, + quality: quality, + maintainAspectRatio: maintainAspectRatio, + ); + sendPort.send({ + 'inputs': result, + 'originalWidth': originalSize.width, + 'originalHeight': originalSize.height, + 'newWidth': newSize.width, + 'newHeight': newSize.height, + }); + case ImageOperation.preprocessYoloOnnx: + final imageData = args['imageData'] as Uint8List; + final normalize = args['normalize'] as bool; + final int normalization = normalize ? 1 : -1; + final requiredWidth = args['requiredWidth'] as int; + final requiredHeight = args['requiredHeight'] as int; + final maintainAspectRatio = args['maintainAspectRatio'] as bool; + final Image image = await decodeImageFromData(imageData); + final imageByteData = await getByteDataFromImage(image); + final (result, originalSize, newSize) = + await preprocessImageToFloat32ChannelsFirst( + image, + imageByteData, + normalization: normalization, + requiredWidth: requiredWidth, + requiredHeight: requiredHeight, + maintainAspectRatio: maintainAspectRatio, + ); + sendPort.send({ + 'inputs': result, + 'originalWidth': originalSize.width, + 'originalHeight': originalSize.height, + 'newWidth': newSize.width, + 'newHeight': newSize.height, + }); + case ImageOperation.preprocessFaceAlign: + final imageData = args['imageData'] as Uint8List; + final faceLandmarks = + args['faceLandmarks'] as List>>; + final List result = await preprocessFaceAlignToUint8List( + imageData, + faceLandmarks, + ); + sendPort.send(List.from(result)); + case ImageOperation.preprocessMobileFaceNet: + final imageData = args['imageData'] as Uint8List; + final facesJson = args['facesJson'] as List>; + final ( + inputs, + alignmentResults, + isBlurs, + blurValues, + originalSize + ) = await preprocessToMobileFaceNetInput( + imageData, + facesJson, + ); + final List> alignmentResultsJson = + alignmentResults.map((result) => result.toJson()).toList(); + sendPort.send({ + 'inputs': inputs, + 'alignmentResultsJson': alignmentResultsJson, + 'isBlurs': isBlurs, + 'blurValues': blurValues, + 'originalWidth': originalSize.width, + 'originalHeight': originalSize.height, + }); + case ImageOperation.preprocessMobileFaceNetOnnx: + final imagePath = args['imagePath'] as String; + final facesJson = args['facesJson'] as List>; + final List relativeFaces = facesJson + .map((face) => FaceDetectionRelative.fromJson(face)) + .toList(); + final imageData = await File(imagePath).readAsBytes(); + final Image image = await decodeImageFromData(imageData); + final imageByteData = await getByteDataFromImage(image); + final ( + inputs, + alignmentResults, + isBlurs, + blurValues, + originalSize + ) = await preprocessToMobileFaceNetFloat32List( + image, + imageByteData, + relativeFaces, + ); + final List> alignmentResultsJson = + alignmentResults.map((result) => result.toJson()).toList(); + sendPort.send({ + 'inputs': inputs, + 'alignmentResultsJson': alignmentResultsJson, + 'isBlurs': isBlurs, + 'blurValues': blurValues, + 'originalWidth': originalSize.width, + 'originalHeight': originalSize.height, + }); + case ImageOperation.generateFaceThumbnails: + final imagePath = args['imagePath'] as String; + final Uint8List imageData = await File(imagePath).readAsBytes(); + final faceBoxesJson = + args['faceBoxesList'] as List>; + final List faceBoxes = + faceBoxesJson.map((json) => FaceBox.fromJson(json)).toList(); + final List results = await generateFaceThumbnails( + imageData, + faceBoxes, + ); + sendPort.send(List.from(results)); + case ImageOperation.generateFaceThumbnailsUsingCanvas: + final imagePath = args['imagePath'] as String; + final Uint8List imageData = await File(imagePath).readAsBytes(); + final faceBoxesJson = + args['faceBoxesList'] as List>; + final List faceBoxes = + faceBoxesJson.map((json) => FaceBox.fromJson(json)).toList(); + final List results = + await generateFaceThumbnailsUsingCanvas( + imageData, + faceBoxes, + ); + sendPort.send(List.from(results)); + case ImageOperation.cropAndPadFace: + final imageData = args['imageData'] as Uint8List; + final faceBox = args['faceBox'] as List; + final Uint8List result = + await cropAndPadFaceData(imageData, faceBox); + sendPort.send([result]); + } + } catch (e, stackTrace) { + sendPort + .send({'error': e.toString(), 'stackTrace': stackTrace.toString()}); + } + }); + } + + /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result. + Future _runInIsolate( + (ImageOperation, Map) message, + ) async { + await ensureSpawned(); + return _functionLock.synchronized(() async { + _resetInactivityTimer(); + final completer = Completer(); + final answerPort = ReceivePort(); + + _activeTasks++; + _mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]); + + answerPort.listen((receivedMessage) { + if (receivedMessage is Map && receivedMessage.containsKey('error')) { + // Handle the error + final errorMessage = receivedMessage['error']; + final errorStackTrace = receivedMessage['stackTrace']; + final exception = Exception(errorMessage); + final stackTrace = StackTrace.fromString(errorStackTrace); + completer.completeError(exception, stackTrace); + } else { + completer.complete(receivedMessage); + } + }); + _activeTasks--; + + return completer.future; + }); + } + + /// Resets a timer that kills the isolate after a certain amount of inactivity. + /// + /// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`) + void _resetInactivityTimer() { + _inactivityTimer?.cancel(); + _inactivityTimer = Timer(_inactivityDuration, () { + if (_activeTasks > 0) { + _logger.info('Tasks are still running. Delaying isolate disposal.'); + // Optionally, reschedule the timer to check again later. + _resetInactivityTimer(); + } else { + _logger.info( + 'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.', + ); + dispose(); + } + }); + } + + /// Disposes the isolate worker. + void dispose() { + if (!isSpawned) return; + + isSpawned = false; + _isolate.kill(); + _receivePort.close(); + _inactivityTimer?.cancel(); + } + + /// Preprocesses [imageData] for standard ML models inside a separate isolate. + /// + /// Returns a [Num3DInputMatrix] image usable for ML inference with BlazeFace. + /// + /// Uses [preprocessImageToMatrix] inside the isolate. + @Deprecated("No longer using BlazeFace") + Future<(Num3DInputMatrix, Size, Size)> preprocessImageBlazeFace( + Uint8List imageData, { + required bool normalize, + required int requiredWidth, + required int requiredHeight, + FilterQuality quality = FilterQuality.medium, + bool maintainAspectRatio = true, + }) async { + final Map results = await _runInIsolate( + ( + ImageOperation.preprocessBlazeFace, + { + 'imageData': imageData, + 'normalize': normalize, + 'requiredWidth': requiredWidth, + 'requiredHeight': requiredHeight, + 'quality': quality.index, + 'maintainAspectRatio': maintainAspectRatio, + }, + ), + ); + final inputs = results['inputs'] as Num3DInputMatrix; + final originalSize = Size( + results['originalWidth'] as double, + results['originalHeight'] as double, + ); + final newSize = Size( + results['newWidth'] as double, + results['newHeight'] as double, + ); + return (inputs, originalSize, newSize); + } + + /// Uses [preprocessImageToFloat32ChannelsFirst] inside the isolate. + @Deprecated( + "Old method, not needed since we now run the whole ML pipeline for faces in a single isolate", + ) + Future<(Float32List, Dimensions, Dimensions)> preprocessImageYoloOnnx( + Uint8List imageData, { + required bool normalize, + required int requiredWidth, + required int requiredHeight, + FilterQuality quality = FilterQuality.medium, + bool maintainAspectRatio = true, + }) async { + final Map results = await _runInIsolate( + ( + ImageOperation.preprocessYoloOnnx, + { + 'imageData': imageData, + 'normalize': normalize, + 'requiredWidth': requiredWidth, + 'requiredHeight': requiredHeight, + 'quality': quality.index, + 'maintainAspectRatio': maintainAspectRatio, + }, + ), + ); + final inputs = results['inputs'] as Float32List; + final originalSize = Dimensions( + width: results['originalWidth'] as int, + height: results['originalHeight'] as int, + ); + final newSize = Dimensions( + width: results['newWidth'] as int, + height: results['newHeight'] as int, + ); + return (inputs, originalSize, newSize); + } + + /// Preprocesses [imageData] for face alignment inside a separate isolate, to display the aligned faces. Mostly used for debugging. + /// + /// Returns a list of [Uint8List] images, one for each face, in png format. + /// + /// Uses [preprocessFaceAlignToUint8List] inside the isolate. + /// + /// WARNING: For preprocessing for MobileFaceNet, use [preprocessMobileFaceNet] instead! + @Deprecated( + "Old method, not needed since we now run the whole ML pipeline for faces in a single isolate", + ) + Future> preprocessFaceAlign( + Uint8List imageData, + List faces, + ) async { + final faceLandmarks = faces.map((face) => face.allKeypoints).toList(); + return await _runInIsolate( + ( + ImageOperation.preprocessFaceAlign, + { + 'imageData': imageData, + 'faceLandmarks': faceLandmarks, + }, + ), + ).then((value) => value.cast()); + } + + /// Preprocesses [imageData] for MobileFaceNet input inside a separate isolate. + /// + /// Returns a list of [Num3DInputMatrix] images, one for each face. + /// + /// Uses [preprocessToMobileFaceNetInput] inside the isolate. + @Deprecated("Old method used in TensorFlow Lite") + Future< + ( + List, + List, + List, + List, + Size, + )> preprocessMobileFaceNet( + Uint8List imageData, + List faces, + ) async { + final List> facesJson = + faces.map((face) => face.toJson()).toList(); + final Map results = await _runInIsolate( + ( + ImageOperation.preprocessMobileFaceNet, + { + 'imageData': imageData, + 'facesJson': facesJson, + }, + ), + ); + final inputs = results['inputs'] as List; + final alignmentResultsJson = + results['alignmentResultsJson'] as List>; + final alignmentResults = alignmentResultsJson.map((json) { + return AlignmentResult.fromJson(json); + }).toList(); + final isBlurs = results['isBlurs'] as List; + final blurValues = results['blurValues'] as List; + final originalSize = Size( + results['originalWidth'] as double, + results['originalHeight'] as double, + ); + return (inputs, alignmentResults, isBlurs, blurValues, originalSize); + } + + /// Uses [preprocessToMobileFaceNetFloat32List] inside the isolate. + @Deprecated( + "Old method, not needed since we now run the whole ML pipeline for faces in a single isolate", + ) + Future<(Float32List, List, List, List, Size)> + preprocessMobileFaceNetOnnx( + String imagePath, + List faces, + ) async { + final List> facesJson = + faces.map((face) => face.toJson()).toList(); + final Map results = await _runInIsolate( + ( + ImageOperation.preprocessMobileFaceNetOnnx, + { + 'imagePath': imagePath, + 'facesJson': facesJson, + }, + ), + ); + final inputs = results['inputs'] as Float32List; + final alignmentResultsJson = + results['alignmentResultsJson'] as List>; + final alignmentResults = alignmentResultsJson.map((json) { + return AlignmentResult.fromJson(json); + }).toList(); + final isBlurs = results['isBlurs'] as List; + final blurValues = results['blurValues'] as List; + final originalSize = Size( + results['originalWidth'] as double, + results['originalHeight'] as double, + ); + + return (inputs, alignmentResults, isBlurs, blurValues, originalSize); + } + + /// Generates face thumbnails for all [faceBoxes] in [imageData]. + /// + /// Uses [generateFaceThumbnails] inside the isolate. + Future> generateFaceThumbnailsForImage( + String imagePath, + List faceBoxes, + ) async { + final List> faceBoxesJson = + faceBoxes.map((box) => box.toJson()).toList(); + return await _runInIsolate( + ( + ImageOperation.generateFaceThumbnails, + { + 'imagePath': imagePath, + 'faceBoxesList': faceBoxesJson, + }, + ), + ).then((value) => value.cast()); + } + + /// Generates face thumbnails for all [faceBoxes] in [imageData]. + /// + /// Uses [generateFaceThumbnailsUsingCanvas] inside the isolate. + Future> generateFaceThumbnailsForImageUsingCanvas( + String imagePath, + List faceBoxes, + ) async { + final List> faceBoxesJson = + faceBoxes.map((box) => box.toJson()).toList(); + return await _runInIsolate( + ( + ImageOperation.generateFaceThumbnailsUsingCanvas, + { + 'imagePath': imagePath, + 'faceBoxesList': faceBoxesJson, + }, + ), + ).then((value) => value.cast()); + } + + @Deprecated('For second pass of BlazeFace, no longer used') + + /// Generates cropped and padded image data from [imageData] and a [faceBox]. + /// + /// The steps are: + /// 1. Crop the image to the face bounding box + /// 2. Resize this cropped image to a square that is half the BlazeFace input size + /// 3. Pad the image to the BlazeFace input size + /// + /// Uses [cropAndPadFaceData] inside the isolate. + Future cropAndPadFace( + Uint8List imageData, + List faceBox, + ) async { + return await _runInIsolate( + ( + ImageOperation.cropAndPadFace, + { + 'imageData': imageData, + 'faceBox': List.from(faceBox), + }, + ), + ).then((value) => value[0] as Uint8List); + } +} diff --git a/mobile/lib/utils/image_ml_util.dart b/mobile/lib/utils/image_ml_util.dart new file mode 100644 index 0000000000..916b9099cd --- /dev/null +++ b/mobile/lib/utils/image_ml_util.dart @@ -0,0 +1,1584 @@ +import "dart:async"; +import "dart:developer" show log; +import "dart:io" show File; +import "dart:math" show min, max; +import "dart:typed_data" show Float32List, Uint8List, ByteData; +import "dart:ui"; + +// import 'package:flutter/material.dart' +// show +// ImageProvider, +// ImageStream, +// ImageStreamListener, +// ImageInfo, +// MemoryImage, +// ImageConfiguration; +// import 'package:flutter/material.dart' as material show Image; +import 'package:flutter/painting.dart' as paint show decodeImageFromList; +import 'package:ml_linalg/linalg.dart'; +import "package:photos/face/model/box.dart"; +import "package:photos/face/model/dimension.dart"; +import 'package:photos/models/ml/ml_typedefs.dart'; +import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart'; +import 'package:photos/services/machine_learning/face_ml/face_alignment/similarity_transform.dart'; +import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart'; +import 'package:photos/services/machine_learning/face_ml/face_filtering/blur_detection_service.dart'; + +/// All of the functions in this file are helper functions for the [ImageMlIsolate] isolate. +/// Don't use them outside of the isolate, unless you are okay with UI jank!!!! + +/// Reads the pixel color at the specified coordinates. +Color readPixelColor( + Image image, + ByteData byteData, + int x, + int y, +) { + if (x < 0 || x >= image.width || y < 0 || y >= image.height) { + // throw ArgumentError('Invalid pixel coordinates.'); + if (y != -1) { + log('[WARNING] `readPixelColor`: Invalid pixel coordinates, out of bounds'); + } + return const Color.fromARGB(0, 0, 0, 0); + } + assert(byteData.lengthInBytes == 4 * image.width * image.height); + + final int byteOffset = 4 * (image.width * y + x); + return Color(_rgbaToArgb(byteData.getUint32(byteOffset))); +} + +void setPixelColor( + Size imageSize, + ByteData byteData, + int x, + int y, + Color color, +) { + if (x < 0 || x >= imageSize.width || y < 0 || y >= imageSize.height) { + log('[WARNING] `setPixelColor`: Invalid pixel coordinates, out of bounds'); + return; + } + assert(byteData.lengthInBytes == 4 * imageSize.width * imageSize.height); + + final int byteOffset = 4 * (imageSize.width.toInt() * y + x); + byteData.setUint32(byteOffset, _argbToRgba(color.value)); +} + +int _rgbaToArgb(int rgbaColor) { + final int a = rgbaColor & 0xFF; + final int rgb = rgbaColor >> 8; + return rgb + (a << 24); +} + +int _argbToRgba(int argbColor) { + final int r = (argbColor >> 16) & 0xFF; + final int g = (argbColor >> 8) & 0xFF; + final int b = argbColor & 0xFF; + final int a = (argbColor >> 24) & 0xFF; + return (r << 24) + (g << 16) + (b << 8) + a; +} + +@Deprecated('Used in TensorFlow Lite only, no longer needed') + +/// Creates an empty matrix with the specified shape. +/// +/// The `shape` argument must be a list of length 2 or 3, where the first +/// element represents the number of rows, the second element represents +/// the number of columns, and the optional third element represents the +/// number of channels. The function returns a matrix filled with zeros. +/// +/// Throws an [ArgumentError] if the `shape` argument is invalid. +List createEmptyOutputMatrix(List shape, [double fillValue = 0.0]) { + if (shape.length > 5) { + throw ArgumentError('Shape must have length 1-5'); + } + + if (shape.length == 1) { + return List.filled(shape[0], fillValue); + } else if (shape.length == 2) { + return List.generate(shape[0], (_) => List.filled(shape[1], fillValue)); + } else if (shape.length == 3) { + return List.generate( + shape[0], + (_) => List.generate(shape[1], (_) => List.filled(shape[2], fillValue)), + ); + } else if (shape.length == 4) { + return List.generate( + shape[0], + (_) => List.generate( + shape[1], + (_) => List.generate(shape[2], (_) => List.filled(shape[3], fillValue)), + ), + ); + } else if (shape.length == 5) { + return List.generate( + shape[0], + (_) => List.generate( + shape[1], + (_) => List.generate( + shape[2], + (_) => + List.generate(shape[3], (_) => List.filled(shape[4], fillValue)), + ), + ), + ); + } else { + throw ArgumentError('Shape must have length 2 or 3'); + } +} + +/// Creates an input matrix from the specified image, which can be used for inference +/// +/// Returns a matrix with the shape [image.height, image.width, 3], where the third dimension represents the RGB channels, as [Num3DInputMatrix]. +/// In fact, this is either a [Double3DInputMatrix] or a [Int3DInputMatrix] depending on the `normalize` argument. +/// If `normalize` is true, the pixel values are normalized doubles in range [-1, 1]. Otherwise, they are integers in range [0, 255]. +/// +/// The `image` argument must be an ui.[Image] object. The function returns a matrix +/// with the shape `[image.height, image.width, 3]`, where the third dimension +/// represents the RGB channels. +/// +/// bool `normalize`: Normalize the image to range [-1, 1] +Num3DInputMatrix createInputMatrixFromImage( + Image image, + ByteData byteDataRgba, { + double Function(num) normFunction = normalizePixelRange2, +}) { + return List.generate( + image.height, + (y) => List.generate( + image.width, + (x) { + final pixel = readPixelColor(image, byteDataRgba, x, y); + return [ + normFunction(pixel.red), + normFunction(pixel.green), + normFunction(pixel.blue), + ]; + }, + ), + ); +} + +void addInputImageToFloat32List( + Image image, + ByteData byteDataRgba, + Float32List float32List, + int startIndex, { + double Function(num) normFunction = normalizePixelRange2, +}) { + int pixelIndex = startIndex; + for (var h = 0; h < image.height; h++) { + for (var w = 0; w < image.width; w++) { + final pixel = readPixelColor(image, byteDataRgba, w, h); + float32List[pixelIndex] = normFunction(pixel.red); + float32List[pixelIndex + 1] = normFunction(pixel.green); + float32List[pixelIndex + 2] = normFunction(pixel.blue); + pixelIndex += 3; + } + } + return; +} + +List> createGrayscaleIntMatrixFromImage( + Image image, + ByteData byteDataRgba, +) { + return List.generate( + image.height, + (y) => List.generate( + image.width, + (x) { + // 0.299 ∙ Red + 0.587 ∙ Green + 0.114 ∙ Blue + final pixel = readPixelColor(image, byteDataRgba, x, y); + return (0.299 * pixel.red + 0.587 * pixel.green + 0.114 * pixel.blue) + .round() + .clamp(0, 255); + }, + ), + ); +} + +List> createGrayscaleIntMatrixFromNormalized2List( + Float32List imageList, + int startIndex, { + int width = 112, + int height = 112, +}) { + return List.generate( + height, + (y) => List.generate( + width, + (x) { + // 0.299 ∙ Red + 0.587 ∙ Green + 0.114 ∙ Blue + final pixelIndex = startIndex + 3 * (y * width + x); + return (0.299 * unnormalizePixelRange2(imageList[pixelIndex]) + + 0.587 * unnormalizePixelRange2(imageList[pixelIndex + 1]) + + 0.114 * unnormalizePixelRange2(imageList[pixelIndex + 2])) + .round() + .clamp(0, 255); + // return unnormalizePixelRange2( + // (0.299 * imageList[pixelIndex] + + // 0.587 * imageList[pixelIndex + 1] + + // 0.114 * imageList[pixelIndex + 2]), + // ).round().clamp(0, 255); + }, + ), + ); +} + +Float32List createFloat32ListFromImageChannelsFirst( + Image image, + ByteData byteDataRgba, { + double Function(num) normFunction = normalizePixelRange2, +}) { + final convertedBytes = Float32List(3 * image.height * image.width); + final buffer = Float32List.view(convertedBytes.buffer); + + int pixelIndex = 0; + final int channelOffsetGreen = image.height * image.width; + final int channelOffsetBlue = 2 * image.height * image.width; + for (var h = 0; h < image.height; h++) { + for (var w = 0; w < image.width; w++) { + final pixel = readPixelColor(image, byteDataRgba, w, h); + buffer[pixelIndex] = normFunction(pixel.red); + buffer[pixelIndex + channelOffsetGreen] = normFunction(pixel.green); + buffer[pixelIndex + channelOffsetBlue] = normFunction(pixel.blue); + pixelIndex++; + } + } + return convertedBytes.buffer.asFloat32List(); +} + +/// Creates an input matrix from the specified image, which can be used for inference +/// +/// Returns a matrix with the shape `[3, image.height, image.width]`, where the first dimension represents the RGB channels, as [Num3DInputMatrix]. +/// In fact, this is either a [Double3DInputMatrix] or a [Int3DInputMatrix] depending on the `normalize` argument. +/// If `normalize` is true, the pixel values are normalized doubles in range [-1, 1]. Otherwise, they are integers in range [0, 255]. +/// +/// The `image` argument must be an ui.[Image] object. The function returns a matrix +/// with the shape `[3, image.height, image.width]`, where the first dimension +/// represents the RGB channels. +/// +/// bool `normalize`: Normalize the image to range [-1, 1] +Num3DInputMatrix createInputMatrixFromImageChannelsFirst( + Image image, + ByteData byteDataRgba, { + bool normalize = true, +}) { + // Create an empty 3D list. + final Num3DInputMatrix imageMatrix = List.generate( + 3, + (i) => List.generate( + image.height, + (j) => List.filled(image.width, 0), + ), + ); + + // Determine which function to use to get the pixel value. + final pixelValue = normalize ? normalizePixelRange2 : (num value) => value; + + for (int y = 0; y < image.height; y++) { + for (int x = 0; x < image.width; x++) { + // Get the pixel at (x, y). + final pixel = readPixelColor(image, byteDataRgba, x, y); + + // Assign the color channels to the respective lists. + imageMatrix[0][y][x] = pixelValue(pixel.red); + imageMatrix[1][y][x] = pixelValue(pixel.green); + imageMatrix[2][y][x] = pixelValue(pixel.blue); + } + } + return imageMatrix; +} + +/// Function normalizes the pixel value to be in range [-1, 1]. +/// +/// It assumes that the pixel value is originally in range [0, 255] +double normalizePixelRange2(num pixelValue) { + return (pixelValue / 127.5) - 1; +} + +/// Function unnormalizes the pixel value to be in range [0, 255]. +/// +/// It assumes that the pixel value is originally in range [-1, 1] +int unnormalizePixelRange2(double pixelValue) { + return ((pixelValue + 1) * 127.5).round().clamp(0, 255); +} + +/// Function normalizes the pixel value to be in range [0, 1]. +/// +/// It assumes that the pixel value is originally in range [0, 255] +double normalizePixelRange1(num pixelValue) { + return (pixelValue / 255); +} + +double normalizePixelNoRange(num pixelValue) { + return pixelValue.toDouble(); +} + +/// Decodes [Uint8List] image data to an ui.[Image] object. +Future decodeImageFromData(Uint8List imageData) async { + // Decoding using flutter paint. This is the fastest and easiest method. + final Image image = await paint.decodeImageFromList(imageData); + return image; + + // // Similar decoding as above, but without using flutter paint. This is not faster than the above. + // final Codec codec = await instantiateImageCodecFromBuffer( + // await ImmutableBuffer.fromUint8List(imageData), + // ); + // final FrameInfo frameInfo = await codec.getNextFrame(); + // return frameInfo.image; + + // Decoding using the ImageProvider, same as `image_pixels` package. This is not faster than the above. + // final Completer completer = Completer(); + // final ImageProvider provider = MemoryImage(imageData); + // final ImageStream stream = provider.resolve(const ImageConfiguration()); + // final ImageStreamListener listener = + // ImageStreamListener((ImageInfo info, bool _) { + // completer.complete(info.image); + // }); + // stream.addListener(listener); + // final Image image = await completer.future; + // stream.removeListener(listener); + // return image; + + // // Decoding using the ImageProvider from material.Image. This is not faster than the above, and also the code below is not finished! + // final materialImage = material.Image.memory(imageData); + // final ImageProvider uiImage = await materialImage.image; +} + +/// Decodes [Uint8List] RGBA bytes to an ui.[Image] object. +Future decodeImageFromRgbaBytes( + Uint8List rgbaBytes, + int width, + int height, +) { + final Completer completer = Completer(); + decodeImageFromPixels( + rgbaBytes, + width, + height, + PixelFormat.rgba8888, + (Image image) { + completer.complete(image); + }, + ); + return completer.future; +} + +/// Returns the [ByteData] object of the image, in rawRgba format. +/// +/// Throws an exception if the image could not be converted to ByteData. +Future getByteDataFromImage( + Image image, { + ImageByteFormat format = ImageByteFormat.rawRgba, +}) async { + final ByteData? byteDataRgba = await image.toByteData(format: format); + if (byteDataRgba == null) { + log('[ImageMlUtils] Could not convert image to ByteData'); + throw Exception('Could not convert image to ByteData'); + } + return byteDataRgba; +} + +/// Encodes an [Image] object to a [Uint8List], by default in the png format. +/// +/// Note that the result can be used with `Image.memory()` only if the [format] is png. +Future encodeImageToUint8List( + Image image, { + ImageByteFormat format = ImageByteFormat.png, +}) async { + final ByteData byteDataPng = + await getByteDataFromImage(image, format: format); + final encodedImage = byteDataPng.buffer.asUint8List(); + + return encodedImage; +} + +/// Resizes the [image] to the specified [width] and [height]. +/// Returns the resized image and its size as a [Size] object. Note that this size excludes any empty pixels, hence it can be different than the actual image size if [maintainAspectRatio] is true. +/// +/// [quality] determines the interpolation quality. The default [FilterQuality.medium] works best for most cases, unless you're scaling by a factor of 5-10 or more +/// [maintainAspectRatio] determines whether to maintain the aspect ratio of the original image or not. Note that maintaining aspect ratio here does not change the size of the image, but instead often means empty pixels that have to be taken into account +Future<(Image, Size)> resizeImage( + Image image, + int width, + int height, { + FilterQuality quality = FilterQuality.medium, + bool maintainAspectRatio = false, +}) async { + if (image.width == width && image.height == height) { + return (image, Size(width.toDouble(), height.toDouble())); + } + final recorder = PictureRecorder(); + final canvas = Canvas( + recorder, + Rect.fromPoints( + const Offset(0, 0), + Offset(width.toDouble(), height.toDouble()), + ), + ); + // Pre-fill the canvas with RGB color (114, 114, 114) + canvas.drawRect( + Rect.fromPoints( + const Offset(0, 0), + Offset(width.toDouble(), height.toDouble()), + ), + Paint()..color = const Color.fromARGB(255, 114, 114, 114), + ); + + double scaleW = width / image.width; + double scaleH = height / image.height; + if (maintainAspectRatio) { + final scale = min(width / image.width, height / image.height); + scaleW = scale; + scaleH = scale; + } + final scaledWidth = (image.width * scaleW).round(); + final scaledHeight = (image.height * scaleH).round(); + + canvas.drawImageRect( + image, + Rect.fromPoints( + const Offset(0, 0), + Offset(image.width.toDouble(), image.height.toDouble()), + ), + Rect.fromPoints( + const Offset(0, 0), + Offset(scaledWidth.toDouble(), scaledHeight.toDouble()), + ), + Paint()..filterQuality = quality, + ); + + final picture = recorder.endRecording(); + final resizedImage = await picture.toImage(width, height); + return (resizedImage, Size(scaledWidth.toDouble(), scaledHeight.toDouble())); +} + +Future resizeAndCenterCropImage( + Image image, + int size, { + FilterQuality quality = FilterQuality.medium, +}) async { + if (image.width == size && image.height == size) { + return image; + } + final recorder = PictureRecorder(); + final canvas = Canvas( + recorder, + Rect.fromPoints( + const Offset(0, 0), + Offset(size.toDouble(), size.toDouble()), + ), + ); + + final scale = max(size / image.width, size / image.height); + final scaledWidth = (image.width * scale).round(); + final scaledHeight = (image.height * scale).round(); + + canvas.drawImageRect( + image, + Rect.fromPoints( + const Offset(0, 0), + Offset(image.width.toDouble(), image.height.toDouble()), + ), + Rect.fromPoints( + const Offset(0, 0), + Offset(scaledWidth.toDouble(), scaledHeight.toDouble()), + ), + Paint()..filterQuality = quality, + ); + + final picture = recorder.endRecording(); + final resizedImage = await picture.toImage(size, size); + return resizedImage; +} + +/// Crops an [image] based on the specified [x], [y], [width] and [height]. +Future cropImage( + Image image, + ByteData imgByteData, { + required int x, + required int y, + required int width, + required int height, +}) async { + final newByteData = ByteData(width * height * 4); + for (var h = y; h < y + height; h++) { + for (var w = x; w < x + width; w++) { + final pixel = readPixelColor(image, imgByteData, w, h); + setPixelColor( + Size(width.toDouble(), height.toDouble()), + newByteData, + w - x, + h - y, + pixel, + ); + } + } + final newImage = await decodeImageFromRgbaBytes( + newByteData.buffer.asUint8List(), + width, + height, + ); + + return newImage; +} + +Future cropImageWithCanvasSimple( + Image image, { + required double x, + required double y, + required double width, + required double height, +}) async { + final recorder = PictureRecorder(); + final canvas = Canvas( + recorder, + Rect.fromPoints( + const Offset(0, 0), + Offset(width, height), + ), + ); + + canvas.drawImageRect( + image, + Rect.fromPoints( + Offset(x, y), + Offset(x + width, y + height), + ), + Rect.fromPoints( + const Offset(0, 0), + Offset(width, height), + ), + Paint()..filterQuality = FilterQuality.medium, + ); + + final picture = recorder.endRecording(); + return picture.toImage(width.toInt(), height.toInt()); +} + +@Deprecated('Old image processing method, use `cropImage` instead!') + +/// Crops an [image] based on the specified [x], [y], [width] and [height]. +/// Optionally, the cropped image can be resized to comply with a [maxSize] and/or [minSize]. +/// Optionally, the cropped image can be rotated from the center by [rotation] radians. +/// Optionally, the [quality] of the resizing interpolation can be specified. +Future cropImageWithCanvas( + Image image, { + required double x, + required double y, + required double width, + required double height, + Size? maxSize, + Size? minSize, + double rotation = 0.0, // rotation in radians + FilterQuality quality = FilterQuality.medium, +}) async { + // Calculate the scale for resizing based on maxSize and minSize + double scaleX = 1.0; + double scaleY = 1.0; + if (maxSize != null) { + final minScale = min(maxSize.width / width, maxSize.height / height); + if (minScale < 1.0) { + scaleX = minScale; + scaleY = minScale; + } + } + if (minSize != null) { + final maxScale = max(minSize.width / width, minSize.height / height); + if (maxScale > 1.0) { + scaleX = maxScale; + scaleY = maxScale; + } + } + + // Calculate the final dimensions + final targetWidth = (width * scaleX).round(); + final targetHeight = (height * scaleY).round(); + + // Create the canvas + final recorder = PictureRecorder(); + final canvas = Canvas( + recorder, + Rect.fromPoints( + const Offset(0, 0), + Offset(targetWidth.toDouble(), targetHeight.toDouble()), + ), + ); + + // Apply rotation + final center = Offset(targetWidth / 2, targetHeight / 2); + canvas.translate(center.dx, center.dy); + canvas.rotate(rotation); + + // Enlarge both the source and destination boxes to account for the rotation (i.e. avoid cropping the corners of the image) + final List enlargedSrc = + getEnlargedAbsoluteBox([x, y, x + width, y + height], 1.5); + final List enlargedDst = getEnlargedAbsoluteBox( + [ + -center.dx, + -center.dy, + -center.dx + targetWidth, + -center.dy + targetHeight, + ], + 1.5, + ); + + canvas.drawImageRect( + image, + Rect.fromPoints( + Offset(enlargedSrc[0], enlargedSrc[1]), + Offset(enlargedSrc[2], enlargedSrc[3]), + ), + Rect.fromPoints( + Offset(enlargedDst[0], enlargedDst[1]), + Offset(enlargedDst[2], enlargedDst[3]), + ), + Paint()..filterQuality = quality, + ); + + final picture = recorder.endRecording(); + + return picture.toImage(targetWidth, targetHeight); +} + +/// Adds padding around an [Image] object. +Future addPaddingToImage( + Image image, [ + double padding = 0.5, +]) async { + const Color paddingColor = Color.fromARGB(0, 0, 0, 0); + final originalWidth = image.width; + final originalHeight = image.height; + + final paddedWidth = (originalWidth + 2 * padding * originalWidth).toInt(); + final paddedHeight = (originalHeight + 2 * padding * originalHeight).toInt(); + + final recorder = PictureRecorder(); + final canvas = Canvas( + recorder, + Rect.fromPoints( + const Offset(0, 0), + Offset(paddedWidth.toDouble(), paddedHeight.toDouble()), + ), + ); + + final paint = Paint(); + paint.color = paddingColor; + + // Draw the padding + canvas.drawRect( + Rect.fromPoints( + const Offset(0, 0), + Offset(paddedWidth.toDouble(), paddedHeight.toDouble()), + ), + paint, + ); + + // Draw the original image on top of the padding + canvas.drawImageRect( + image, + Rect.fromPoints( + const Offset(0, 0), + Offset(image.width.toDouble(), image.height.toDouble()), + ), + Rect.fromPoints( + Offset(padding * originalWidth, padding * originalHeight), + Offset( + (1 + padding) * originalWidth, + (1 + padding) * originalHeight, + ), + ), + Paint()..filterQuality = FilterQuality.none, + ); + + final picture = recorder.endRecording(); + return picture.toImage(paddedWidth, paddedHeight); +} + +/// Preprocesses [imageData] for standard ML models. +/// Returns a [Num3DInputMatrix] image, ready for inference. +/// Also returns the original image size and the new image size, respectively. +/// +/// The [imageData] argument must be a [Uint8List] object. +/// The [normalize] argument determines whether the image is normalized to range [-1, 1]. +/// The [requiredWidth] and [requiredHeight] arguments determine the size of the output image. +/// The [quality] argument determines the quality of the resizing interpolation. +/// The [maintainAspectRatio] argument determines whether the aspect ratio of the image is maintained. +@Deprecated("Old method used in blazeface") +Future<(Num3DInputMatrix, Size, Size)> preprocessImageToMatrix( + Uint8List imageData, { + required int normalization, + required int requiredWidth, + required int requiredHeight, + FilterQuality quality = FilterQuality.medium, + maintainAspectRatio = true, +}) async { + final normFunction = normalization == 2 + ? normalizePixelRange2 + : normalization == 1 + ? normalizePixelRange1 + : normalizePixelNoRange; + final Image image = await decodeImageFromData(imageData); + final originalSize = Size(image.width.toDouble(), image.height.toDouble()); + + if (image.width == requiredWidth && image.height == requiredHeight) { + final ByteData imgByteData = await getByteDataFromImage(image); + return ( + createInputMatrixFromImage( + image, + imgByteData, + normFunction: normFunction, + ), + originalSize, + originalSize + ); + } + + final (resizedImage, newSize) = await resizeImage( + image, + requiredWidth, + requiredHeight, + quality: quality, + maintainAspectRatio: maintainAspectRatio, + ); + + final ByteData imgByteData = await getByteDataFromImage(resizedImage); + final Num3DInputMatrix imageMatrix = createInputMatrixFromImage( + resizedImage, + imgByteData, + normFunction: normFunction, + ); + + return (imageMatrix, originalSize, newSize); +} + +Future<(Float32List, Dimensions, Dimensions)> + preprocessImageToFloat32ChannelsFirst( + Image image, + ByteData imgByteData, { + required int normalization, + required int requiredWidth, + required int requiredHeight, + Color Function(num, num, Image, ByteData) getPixel = getPixelBilinear, + maintainAspectRatio = true, +}) async { + final normFunction = normalization == 2 + ? normalizePixelRange2 + : normalization == 1 + ? normalizePixelRange1 + : normalizePixelNoRange; + final originalSize = Dimensions(width: image.width, height: image.height); + + if (image.width == requiredWidth && image.height == requiredHeight) { + return ( + createFloat32ListFromImageChannelsFirst( + image, + imgByteData, + normFunction: normFunction, + ), + originalSize, + originalSize + ); + } + + double scaleW = requiredWidth / image.width; + double scaleH = requiredHeight / image.height; + if (maintainAspectRatio) { + final scale = + min(requiredWidth / image.width, requiredHeight / image.height); + scaleW = scale; + scaleH = scale; + } + final scaledWidth = (image.width * scaleW).round().clamp(0, requiredWidth); + final scaledHeight = (image.height * scaleH).round().clamp(0, requiredHeight); + + final processedBytes = Float32List(3 * requiredHeight * requiredWidth); + + final buffer = Float32List.view(processedBytes.buffer); + int pixelIndex = 0; + final int channelOffsetGreen = requiredHeight * requiredWidth; + final int channelOffsetBlue = 2 * requiredHeight * requiredWidth; + for (var h = 0; h < requiredHeight; h++) { + for (var w = 0; w < requiredWidth; w++) { + late Color pixel; + if (w >= scaledWidth || h >= scaledHeight) { + pixel = const Color.fromRGBO(114, 114, 114, 1.0); + } else { + pixel = getPixel( + w / scaleW, + h / scaleH, + image, + imgByteData, + ); + } + buffer[pixelIndex] = normFunction(pixel.red); + buffer[pixelIndex + channelOffsetGreen] = normFunction(pixel.green); + buffer[pixelIndex + channelOffsetBlue] = normFunction(pixel.blue); + pixelIndex++; + } + } + + return ( + processedBytes, + originalSize, + Dimensions(width: scaledWidth, height: scaledHeight) + ); +} + +@Deprecated( + 'Replaced by `preprocessImageToFloat32ChannelsFirst` to avoid issue with iOS canvas', +) +Future<(Float32List, Size, Size)> preprocessImageToFloat32ChannelsFirstCanvas( + Uint8List imageData, { + required int normalization, + required int requiredWidth, + required int requiredHeight, + FilterQuality quality = FilterQuality.medium, + maintainAspectRatio = true, +}) async { + final normFunction = normalization == 2 + ? normalizePixelRange2 + : normalization == 1 + ? normalizePixelRange1 + : normalizePixelNoRange; + final stopwatch = Stopwatch()..start(); + final Image image = await decodeImageFromData(imageData); + stopwatch.stop(); + log("Face Detection decoding ui image took: ${stopwatch.elapsedMilliseconds} ms"); + final originalSize = Size(image.width.toDouble(), image.height.toDouble()); + late final Image resizedImage; + late final Size newSize; + + if (image.width == requiredWidth && image.height == requiredHeight) { + resizedImage = image; + newSize = originalSize; + } else { + (resizedImage, newSize) = await resizeImage( + image, + requiredWidth, + requiredHeight, + quality: quality, + maintainAspectRatio: maintainAspectRatio, + ); + } + final ByteData imgByteData = await getByteDataFromImage(resizedImage); + final Float32List imageFloat32List = createFloat32ListFromImageChannelsFirst( + resizedImage, + imgByteData, + normFunction: normFunction, + ); + + return (imageFloat32List, originalSize, newSize); +} + +/// Preprocesses [imageData] based on [faceLandmarks] to align the faces in the images. +/// +/// Returns a list of [Uint8List] images, one for each face, in png format. +@Deprecated("Old method used in blazeface") +Future> preprocessFaceAlignToUint8List( + Uint8List imageData, + List>> faceLandmarks, { + int width = 112, + int height = 112, +}) async { + final alignedImages = []; + final Image image = await decodeImageFromData(imageData); + + for (final faceLandmark in faceLandmarks) { + final (alignmentResult, correctlyEstimated) = + SimilarityTransform.instance.estimate(faceLandmark); + if (!correctlyEstimated) { + alignedImages.add(Uint8List(0)); + continue; + } + final alignmentBox = getAlignedFaceBox(alignmentResult); + final Image alignedFace = await cropImageWithCanvas( + image, + x: alignmentBox[0], + y: alignmentBox[1], + width: alignmentBox[2] - alignmentBox[0], + height: alignmentBox[3] - alignmentBox[1], + maxSize: Size(width.toDouble(), height.toDouble()), + minSize: Size(width.toDouble(), height.toDouble()), + rotation: alignmentResult.rotation, + ); + final Uint8List alignedFacePng = await encodeImageToUint8List(alignedFace); + alignedImages.add(alignedFacePng); + + // final Uint8List alignedImageRGBA = await warpAffineToUint8List( + // image, + // imgByteData, + // alignmentResult.affineMatrix + // .map( + // (row) => row.map((e) { + // if (e != 1.0) { + // return e * 112; + // } else { + // return 1.0; + // } + // }).toList(), + // ) + // .toList(), + // width: width, + // height: height, + // ); + // final Image alignedImage = + // await decodeImageFromRgbaBytes(alignedImageRGBA, width, height); + // final Uint8List alignedImagePng = + // await encodeImageToUint8List(alignedImage); + + // alignedImages.add(alignedImagePng); + } + return alignedImages; +} + +/// Preprocesses [imageData] based on [faceLandmarks] to align the faces in the images +/// +/// Returns a list of [Num3DInputMatrix] images, one for each face, ready for MobileFaceNet inference +@Deprecated("Old method used in TensorFlow Lite") +Future< + ( + List, + List, + List, + List, + Size, + )> preprocessToMobileFaceNetInput( + Uint8List imageData, + List> facesJson, { + int width = 112, + int height = 112, +}) async { + final Image image = await decodeImageFromData(imageData); + final Size originalSize = + Size(image.width.toDouble(), image.height.toDouble()); + + final List relativeFaces = + facesJson.map((face) => FaceDetectionRelative.fromJson(face)).toList(); + + final List absoluteFaces = + relativeToAbsoluteDetections( + relativeDetections: relativeFaces, + imageWidth: image.width, + imageHeight: image.height, + ); + + final List>> faceLandmarks = + absoluteFaces.map((face) => face.allKeypoints).toList(); + + final alignedImages = []; + final alignmentResults = []; + final isBlurs = []; + final blurValues = []; + + for (final faceLandmark in faceLandmarks) { + final (alignmentResult, correctlyEstimated) = + SimilarityTransform.instance.estimate(faceLandmark); + if (!correctlyEstimated) { + alignedImages.add([]); + alignmentResults.add(AlignmentResult.empty()); + continue; + } + final alignmentBox = getAlignedFaceBox(alignmentResult); + final Image alignedFace = await cropImageWithCanvas( + image, + x: alignmentBox[0], + y: alignmentBox[1], + width: alignmentBox[2] - alignmentBox[0], + height: alignmentBox[3] - alignmentBox[1], + maxSize: Size(width.toDouble(), height.toDouble()), + minSize: Size(width.toDouble(), height.toDouble()), + rotation: alignmentResult.rotation, + quality: FilterQuality.medium, + ); + final alignedFaceByteData = await getByteDataFromImage(alignedFace); + final alignedFaceMatrix = createInputMatrixFromImage( + alignedFace, + alignedFaceByteData, + normFunction: normalizePixelRange2, + ); + alignedImages.add(alignedFaceMatrix); + alignmentResults.add(alignmentResult); + final faceGrayMatrix = createGrayscaleIntMatrixFromImage( + alignedFace, + alignedFaceByteData, + ); + final (isBlur, blurValue) = await BlurDetectionService.instance + .predictIsBlurGrayLaplacian(faceGrayMatrix); + isBlurs.add(isBlur); + blurValues.add(blurValue); + + // final Double3DInputMatrix alignedImage = await warpAffineToMatrix( + // image, + // imgByteData, + // transformationMatrix, + // width: width, + // height: height, + // normalize: true, + // ); + // alignedImages.add(alignedImage); + // transformationMatrices.add(transformationMatrix); + } + return (alignedImages, alignmentResults, isBlurs, blurValues, originalSize); +} + +@Deprecated("Old image manipulation that used canvas, causing issues on iOS") +Future<(Float32List, List, List, List, Size)> + preprocessToMobileFaceNetFloat32ListCanvas( + String imagePath, + List relativeFaces, { + int width = 112, + int height = 112, +}) async { + final Uint8List imageData = await File(imagePath).readAsBytes(); + final stopwatch = Stopwatch()..start(); + final Image image = await decodeImageFromData(imageData); + stopwatch.stop(); + log("Face Alignment decoding ui image took: ${stopwatch.elapsedMilliseconds} ms"); + final Size originalSize = + Size(image.width.toDouble(), image.height.toDouble()); + + final List absoluteFaces = + relativeToAbsoluteDetections( + relativeDetections: relativeFaces, + imageWidth: image.width, + imageHeight: image.height, + ); + + final List>> faceLandmarks = + absoluteFaces.map((face) => face.allKeypoints).toList(); + + final alignedImagesFloat32List = + Float32List(3 * width * height * faceLandmarks.length); + final alignmentResults = []; + final isBlurs = []; + final blurValues = []; + + int alignedImageIndex = 0; + for (final faceLandmark in faceLandmarks) { + final (alignmentResult, correctlyEstimated) = + SimilarityTransform.instance.estimate(faceLandmark); + if (!correctlyEstimated) { + alignedImageIndex += 3 * width * height; + alignmentResults.add(AlignmentResult.empty()); + continue; + } + final alignmentBox = getAlignedFaceBox(alignmentResult); + final Image alignedFace = await cropImageWithCanvas( + image, + x: alignmentBox[0], + y: alignmentBox[1], + width: alignmentBox[2] - alignmentBox[0], + height: alignmentBox[3] - alignmentBox[1], + maxSize: Size(width.toDouble(), height.toDouble()), + minSize: Size(width.toDouble(), height.toDouble()), + rotation: alignmentResult.rotation, + quality: FilterQuality.medium, + ); + final alignedFaceByteData = await getByteDataFromImage(alignedFace); + addInputImageToFloat32List( + alignedFace, + alignedFaceByteData, + alignedImagesFloat32List, + alignedImageIndex, + normFunction: normalizePixelRange2, + ); + alignedImageIndex += 3 * width * height; + alignmentResults.add(alignmentResult); + final blurDetectionStopwatch = Stopwatch()..start(); + final faceGrayMatrix = createGrayscaleIntMatrixFromImage( + alignedFace, + alignedFaceByteData, + ); + final grascalems = blurDetectionStopwatch.elapsedMilliseconds; + log('creating grayscale matrix took $grascalems ms'); + final (isBlur, blurValue) = await BlurDetectionService.instance + .predictIsBlurGrayLaplacian(faceGrayMatrix); + final blurms = blurDetectionStopwatch.elapsedMilliseconds - grascalems; + log('blur detection took $blurms ms'); + log( + 'total blur detection took ${blurDetectionStopwatch.elapsedMilliseconds} ms', + ); + blurDetectionStopwatch.stop(); + isBlurs.add(isBlur); + blurValues.add(blurValue); + } + return ( + alignedImagesFloat32List, + alignmentResults, + isBlurs, + blurValues, + originalSize + ); +} + +Future<(Float32List, List, List, List, Size)> + preprocessToMobileFaceNetFloat32List( + Image image, + ByteData imageByteData, + List relativeFaces, { + int width = 112, + int height = 112, +}) async { + final stopwatch = Stopwatch()..start(); + + final Size originalSize = + Size(image.width.toDouble(), image.height.toDouble()); + + final List absoluteFaces = + relativeToAbsoluteDetections( + relativeDetections: relativeFaces, + imageWidth: image.width, + imageHeight: image.height, + ); + + final alignedImagesFloat32List = + Float32List(3 * width * height * absoluteFaces.length); + final alignmentResults = []; + final isBlurs = []; + final blurValues = []; + + int alignedImageIndex = 0; + for (final face in absoluteFaces) { + final (alignmentResult, correctlyEstimated) = + SimilarityTransform.instance.estimate(face.allKeypoints); + if (!correctlyEstimated) { + alignedImageIndex += 3 * width * height; + alignmentResults.add(AlignmentResult.empty()); + continue; + } + alignmentResults.add(alignmentResult); + + warpAffineFloat32List( + image, + imageByteData, + alignmentResult.affineMatrix, + alignedImagesFloat32List, + alignedImageIndex, + ); + + final blurDetectionStopwatch = Stopwatch()..start(); + final faceGrayMatrix = createGrayscaleIntMatrixFromNormalized2List( + alignedImagesFloat32List, + alignedImageIndex, + ); + + alignedImageIndex += 3 * width * height; + final grayscalems = blurDetectionStopwatch.elapsedMilliseconds; + log('creating grayscale matrix took $grayscalems ms'); + final (isBlur, blurValue) = + await BlurDetectionService.instance.predictIsBlurGrayLaplacian( + faceGrayMatrix, + faceDirection: face.getFaceDirection(), + ); + final blurms = blurDetectionStopwatch.elapsedMilliseconds - grayscalems; + log('blur detection took $blurms ms'); + log( + 'total blur detection took ${blurDetectionStopwatch.elapsedMilliseconds} ms', + ); + blurDetectionStopwatch.stop(); + isBlurs.add(isBlur); + blurValues.add(blurValue); + } + stopwatch.stop(); + log("Face Alignment took: ${stopwatch.elapsedMilliseconds} ms"); + return ( + alignedImagesFloat32List, + alignmentResults, + isBlurs, + blurValues, + originalSize + ); +} + +void warpAffineFloat32List( + Image inputImage, + ByteData imgByteDataRgba, + List> affineMatrix, + Float32List outputList, + int startIndex, { + int width = 112, + int height = 112, +}) { + if (width != 112 || height != 112) { + throw Exception( + 'Width and height must be 112, other transformations are not supported yet.', + ); + } + + final transformationMatrix = affineMatrix + .map( + (row) => row.map((e) { + if (e != 1.0) { + return e * 112; + } else { + return 1.0; + } + }).toList(), + ) + .toList(); + + final A = Matrix.fromList([ + [transformationMatrix[0][0], transformationMatrix[0][1]], + [transformationMatrix[1][0], transformationMatrix[1][1]], + ]); + final aInverse = A.inverse(); + // final aInverseMinus = aInverse * -1; + final B = Vector.fromList( + [transformationMatrix[0][2], transformationMatrix[1][2]], + ); + final b00 = B[0]; + final b10 = B[1]; + final a00Prime = aInverse[0][0]; + final a01Prime = aInverse[0][1]; + final a10Prime = aInverse[1][0]; + final a11Prime = aInverse[1][1]; + + for (int yTrans = 0; yTrans < height; ++yTrans) { + for (int xTrans = 0; xTrans < width; ++xTrans) { + // Perform inverse affine transformation (original implementation, intuitive but slow) + // final X = aInverse * (Vector.fromList([xTrans, yTrans]) - B); + // final X = aInverseMinus * (B - [xTrans, yTrans]); + // final xList = X.asFlattenedList; + // num xOrigin = xList[0]; + // num yOrigin = xList[1]; + + // Perform inverse affine transformation (fast implementation, less intuitive) + final num xOrigin = (xTrans - b00) * a00Prime + (yTrans - b10) * a01Prime; + final num yOrigin = (xTrans - b00) * a10Prime + (yTrans - b10) * a11Prime; + + final Color pixel = + getPixelBicubic(xOrigin, yOrigin, inputImage, imgByteDataRgba); + + // Set the new pixel + outputList[startIndex + 3 * (yTrans * width + xTrans)] = + normalizePixelRange2(pixel.red); + outputList[startIndex + 3 * (yTrans * width + xTrans) + 1] = + normalizePixelRange2(pixel.green); + outputList[startIndex + 3 * (yTrans * width + xTrans) + 2] = + normalizePixelRange2(pixel.blue); + } + } +} + +Future> generateFaceThumbnails( + Uint8List imageData, + List faceBoxes, +) async { + final stopwatch = Stopwatch()..start(); + + final Image img = await decodeImageFromData(imageData); + final ByteData imgByteData = await getByteDataFromImage(img); + + try { + final List faceThumbnails = []; + + for (final faceBox in faceBoxes) { + // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first + final double xMinAbs = faceBox.x * img.width; + final double yMinAbs = faceBox.y * img.height; + final double widthAbs = faceBox.width * img.width; + final double heightAbs = faceBox.height * img.height; + + final int xCrop = (xMinAbs - widthAbs / 2).round().clamp(0, img.width); + final int yCrop = (yMinAbs - heightAbs / 2).round().clamp(0, img.height); + final int widthCrop = min((widthAbs * 2).round(), img.width - xCrop); + final int heightCrop = min((heightAbs * 2).round(), img.height - yCrop); + final Image faceThumbnail = await cropImage( + img, + imgByteData, + x: xCrop, + y: yCrop, + width: widthCrop, + height: heightCrop, + ); + final Uint8List faceThumbnailPng = await encodeImageToUint8List( + faceThumbnail, + format: ImageByteFormat.png, + ); + faceThumbnails.add(faceThumbnailPng); + } + stopwatch.stop(); + log('Face thumbnail generation took: ${stopwatch.elapsedMilliseconds} ms'); + + return faceThumbnails; + } catch (e, s) { + log('[ImageMlUtils] Error generating face thumbnails: $e, \n stackTrace: $s'); + rethrow; + } +} + +/// Generates a face thumbnail from [imageData] and a [faceDetection]. +/// +/// Returns a [Uint8List] image, in png format. +Future> generateFaceThumbnailsUsingCanvas( + Uint8List imageData, + List faceBoxes, +) async { + final Image img = await decodeImageFromData(imageData); + int i = 0; + + try { + final futureFaceThumbnails = >[]; + for (final faceBox in faceBoxes) { + // Note that the faceBox values are relative to the image size, so we need to convert them to absolute values first + final double xMinAbs = faceBox.x * img.width; + final double yMinAbs = faceBox.y * img.height; + final double widthAbs = faceBox.width * img.width; + final double heightAbs = faceBox.height * img.height; + + // Calculate the crop values by adding some padding around the face and making sure it's centered + const regularPadding = 0.4; + const minimumPadding = 0.1; + final num xCrop = (xMinAbs - widthAbs * regularPadding); + final num xOvershoot = min(0, xCrop).abs() / widthAbs; + final num widthCrop = widthAbs * (1 + 2 * regularPadding) - + 2 * min(xOvershoot, regularPadding - minimumPadding) * widthAbs; + final num yCrop = (yMinAbs - heightAbs * regularPadding); + final num yOvershoot = min(0, yCrop).abs() / heightAbs; + final num heightCrop = heightAbs * (1 + 2 * regularPadding) - + 2 * min(yOvershoot, regularPadding - minimumPadding) * heightAbs; + + // Prevent the face from going out of image bounds + final xCropSafe = xCrop.clamp(0, img.width); + final yCropSafe = yCrop.clamp(0, img.height); + final widthCropSafe = widthCrop.clamp(0, img.width - xCropSafe); + final heightCropSafe = heightCrop.clamp(0, img.height - yCropSafe); + + futureFaceThumbnails.add( + cropAndEncodeCanvas( + img, + x: xCropSafe.toDouble(), + y: yCropSafe.toDouble(), + width: widthCropSafe.toDouble(), + height: heightCropSafe.toDouble(), + ), + ); + i++; + } + final List faceThumbnails = + await Future.wait(futureFaceThumbnails); + return faceThumbnails; + } catch (e) { + log('[ImageMlUtils] Error generating face thumbnails: $e'); + log('[ImageMlUtils] cropImage problematic input argument: ${faceBoxes[i]}'); + return []; + } +} + +Future cropAndEncodeCanvas( + Image image, { + required double x, + required double y, + required double width, + required double height, +}) async { + final croppedImage = await cropImageWithCanvasSimple( + image, + x: x, + y: y, + width: width, + height: height, + ); + return await encodeImageToUint8List( + croppedImage, + format: ImageByteFormat.png, + ); +} + +@Deprecated('For second pass of BlazeFace, no longer used') + +/// Generates cropped and padded image data from [imageData] and a [faceBox]. +/// +/// The steps are: +/// 1. Crop the image to the face bounding box +/// 2. Resize this cropped image to a square that is half the BlazeFace input size +/// 3. Pad the image to the BlazeFace input size +/// +/// Note that [faceBox] is a list of the following values: [xMinBox, yMinBox, xMaxBox, yMaxBox]. +Future cropAndPadFaceData( + Uint8List imageData, + List faceBox, +) async { + final Image image = await decodeImageFromData(imageData); + + final Image faceCrop = await cropImageWithCanvas( + image, + x: (faceBox[0] * image.width), + y: (faceBox[1] * image.height), + width: ((faceBox[2] - faceBox[0]) * image.width), + height: ((faceBox[3] - faceBox[1]) * image.height), + maxSize: const Size(128, 128), + minSize: const Size(128, 128), + ); + + final Image facePadded = await addPaddingToImage( + faceCrop, + 0.5, + ); + + return await encodeImageToUint8List(facePadded); +} + +Color getPixelBilinear(num fx, num fy, Image image, ByteData byteDataRgba) { + // Clamp to image boundaries + fx = fx.clamp(0, image.width - 1); + fy = fy.clamp(0, image.height - 1); + + // Get the surrounding coordinates and their weights + final int x0 = fx.floor(); + final int x1 = fx.ceil(); + final int y0 = fy.floor(); + final int y1 = fy.ceil(); + final dx = fx - x0; + final dy = fy - y0; + final dx1 = 1.0 - dx; + final dy1 = 1.0 - dy; + + // Get the original pixels + final Color pixel1 = readPixelColor(image, byteDataRgba, x0, y0); + final Color pixel2 = readPixelColor(image, byteDataRgba, x1, y0); + final Color pixel3 = readPixelColor(image, byteDataRgba, x0, y1); + final Color pixel4 = readPixelColor(image, byteDataRgba, x1, y1); + + int bilinear( + num val1, + num val2, + num val3, + num val4, + ) => + (val1 * dx1 * dy1 + val2 * dx * dy1 + val3 * dx1 * dy + val4 * dx * dy) + .round(); + + // Calculate the weighted sum of pixels + final int r = bilinear(pixel1.red, pixel2.red, pixel3.red, pixel4.red); + final int g = + bilinear(pixel1.green, pixel2.green, pixel3.green, pixel4.green); + final int b = bilinear(pixel1.blue, pixel2.blue, pixel3.blue, pixel4.blue); + + return Color.fromRGBO(r, g, b, 1.0); +} + +/// Get the pixel value using Bicubic Interpolation. Code taken mainly from https://github.com/brendan-duncan/image/blob/6e407612752ffdb90b28cd5863c7f65856349348/lib/src/image/image.dart#L697 +Color getPixelBicubic(num fx, num fy, Image image, ByteData byteDataRgba) { + fx = fx.clamp(0, image.width - 1); + fy = fy.clamp(0, image.height - 1); + + final x = fx.toInt() - (fx >= 0.0 ? 0 : 1); + final px = x - 1; + final nx = x + 1; + final ax = x + 2; + final y = fy.toInt() - (fy >= 0.0 ? 0 : 1); + final py = y - 1; + final ny = y + 1; + final ay = y + 2; + final dx = fx - x; + final dy = fy - y; + num cubic(num dx, num ipp, num icp, num inp, num iap) => + icp + + 0.5 * + (dx * (-ipp + inp) + + dx * dx * (2 * ipp - 5 * icp + 4 * inp - iap) + + dx * dx * dx * (-ipp + 3 * icp - 3 * inp + iap)); + + final icc = readPixelColor(image, byteDataRgba, x, y); + + final ipp = + px < 0 || py < 0 ? icc : readPixelColor(image, byteDataRgba, px, py); + final icp = px < 0 ? icc : readPixelColor(image, byteDataRgba, x, py); + final inp = py < 0 || nx >= image.width + ? icc + : readPixelColor(image, byteDataRgba, nx, py); + final iap = ax >= image.width || py < 0 + ? icc + : readPixelColor(image, byteDataRgba, ax, py); + + final ip0 = cubic(dx, ipp.red, icp.red, inp.red, iap.red); + final ip1 = cubic(dx, ipp.green, icp.green, inp.green, iap.green); + final ip2 = cubic(dx, ipp.blue, icp.blue, inp.blue, iap.blue); + // final ip3 = cubic(dx, ipp.a, icp.a, inp.a, iap.a); + + final ipc = px < 0 ? icc : readPixelColor(image, byteDataRgba, px, y); + final inc = + nx >= image.width ? icc : readPixelColor(image, byteDataRgba, nx, y); + final iac = + ax >= image.width ? icc : readPixelColor(image, byteDataRgba, ax, y); + + final ic0 = cubic(dx, ipc.red, icc.red, inc.red, iac.red); + final ic1 = cubic(dx, ipc.green, icc.green, inc.green, iac.green); + final ic2 = cubic(dx, ipc.blue, icc.blue, inc.blue, iac.blue); + // final ic3 = cubic(dx, ipc.a, icc.a, inc.a, iac.a); + + final ipn = px < 0 || ny >= image.height + ? icc + : readPixelColor(image, byteDataRgba, px, ny); + final icn = + ny >= image.height ? icc : readPixelColor(image, byteDataRgba, x, ny); + final inn = nx >= image.width || ny >= image.height + ? icc + : readPixelColor(image, byteDataRgba, nx, ny); + final ian = ax >= image.width || ny >= image.height + ? icc + : readPixelColor(image, byteDataRgba, ax, ny); + + final in0 = cubic(dx, ipn.red, icn.red, inn.red, ian.red); + final in1 = cubic(dx, ipn.green, icn.green, inn.green, ian.green); + final in2 = cubic(dx, ipn.blue, icn.blue, inn.blue, ian.blue); + // final in3 = cubic(dx, ipn.a, icn.a, inn.a, ian.a); + + final ipa = px < 0 || ay >= image.height + ? icc + : readPixelColor(image, byteDataRgba, px, ay); + final ica = + ay >= image.height ? icc : readPixelColor(image, byteDataRgba, x, ay); + final ina = nx >= image.width || ay >= image.height + ? icc + : readPixelColor(image, byteDataRgba, nx, ay); + final iaa = ax >= image.width || ay >= image.height + ? icc + : readPixelColor(image, byteDataRgba, ax, ay); + + final ia0 = cubic(dx, ipa.red, ica.red, ina.red, iaa.red); + final ia1 = cubic(dx, ipa.green, ica.green, ina.green, iaa.green); + final ia2 = cubic(dx, ipa.blue, ica.blue, ina.blue, iaa.blue); + // final ia3 = cubic(dx, ipa.a, ica.a, ina.a, iaa.a); + + final c0 = cubic(dy, ip0, ic0, in0, ia0).clamp(0, 255).toInt(); + final c1 = cubic(dy, ip1, ic1, in1, ia1).clamp(0, 255).toInt(); + final c2 = cubic(dy, ip2, ic2, in2, ia2).clamp(0, 255).toInt(); + // final c3 = cubic(dy, ip3, ic3, in3, ia3); + + return Color.fromRGBO(c0, c1, c2, 1.0); +} + +@Deprecated('Old method only used in other deprecated methods') +List getAlignedFaceBox(AlignmentResult alignment) { + final List box = [ + // [xMinBox, yMinBox, xMaxBox, yMaxBox] + alignment.center[0] - alignment.size / 2, + alignment.center[1] - alignment.size / 2, + alignment.center[0] + alignment.size / 2, + alignment.center[1] + alignment.size / 2, + ]; + box.roundBoxToDouble(); + return box; +} + +/// Returns an enlarged version of the [box] by a factor of [factor]. +/// The [box] is in absolute coordinates: [xMinBox, yMinBox, xMaxBox, yMaxBox]. +List getEnlargedAbsoluteBox(List box, [double factor = 2]) { + final boxCopy = List.from(box, growable: false); + // The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox]. + + final width = boxCopy[2] - boxCopy[0]; + final height = boxCopy[3] - boxCopy[1]; + + boxCopy[0] -= width * (factor - 1) / 2; + boxCopy[1] -= height * (factor - 1) / 2; + boxCopy[2] += width * (factor - 1) / 2; + boxCopy[3] += height * (factor - 1) / 2; + + return boxCopy; +} diff --git a/mobile/lib/utils/image_util.dart b/mobile/lib/utils/image_util.dart index a5bcb03a75..e5b0d72fac 100644 --- a/mobile/lib/utils/image_util.dart +++ b/mobile/lib/utils/image_util.dart @@ -1,6 +1,8 @@ import 'dart:async'; +import 'dart:ui' as ui; import 'package:flutter/widgets.dart'; +import 'package:image/image.dart' as img; Future getImageInfo(ImageProvider imageProvider) { final completer = Completer(); @@ -14,3 +16,35 @@ Future getImageInfo(ImageProvider imageProvider) { completer.future.whenComplete(() => imageStream.removeListener(listener)); return completer.future; } + +Future convertImageToFlutterUi(img.Image image) async { + if (image.format != img.Format.uint8 || image.numChannels != 4) { + final cmd = img.Command() + ..image(image) + ..convert(format: img.Format.uint8, numChannels: 4); + final rgba8 = await cmd.getImageThread(); + if (rgba8 != null) { + image = rgba8; + } + } + + final ui.ImmutableBuffer buffer = + await ui.ImmutableBuffer.fromUint8List(image.toUint8List()); + + final ui.ImageDescriptor id = ui.ImageDescriptor.raw( + buffer, + height: image.height, + width: image.width, + pixelFormat: ui.PixelFormat.rgba8888, + ); + + final ui.Codec codec = await id.instantiateCodec( + targetHeight: image.height, + targetWidth: image.width, + ); + + final ui.FrameInfo fi = await codec.getNextFrame(); + final ui.Image uiImage = fi.image; + + return uiImage; +} diff --git a/mobile/lib/utils/local_settings.dart b/mobile/lib/utils/local_settings.dart index 2f277c80ba..6b81e76971 100644 --- a/mobile/lib/utils/local_settings.dart +++ b/mobile/lib/utils/local_settings.dart @@ -14,6 +14,8 @@ class LocalSettings { static const kCollectionSortPref = "collection_sort_pref"; static const kPhotoGridSize = "photo_grid_size"; static const kEnableMagicSearch = "enable_magic_search"; + static const kEnableFaceIndexing = "enable_face_indexing"; + static const kEnableFaceClustering = "enable_face_clustering"; static const kRateUsShownCount = "rate_us_shown_count"; static const kRateUsPromptThreshold = 2; @@ -69,4 +71,30 @@ class LocalSettings { bool shouldPromptToRateUs() { return getRateUsShownCount() < kRateUsPromptThreshold; } + + bool get isFaceIndexingEnabled => + _prefs.getBool(kEnableFaceIndexing) ?? false; + + bool get isFaceClusteringEnabled => + _prefs.getBool(kEnableFaceIndexing) ?? false; + + /// toggleFaceIndexing toggles the face indexing setting and returns the new value + Future toggleFaceIndexing() async { + await _prefs.setBool(kEnableFaceIndexing, !isFaceIndexingEnabled); + return isFaceIndexingEnabled; + } + + //#region todo:(NG) remove this section, only needed for internal testing to see + // if the OS stops the app during indexing + bool get remoteFetchEnabled => _prefs.getBool("remoteFetchEnabled") ?? true; + Future toggleRemoteFetch() async { + await _prefs.setBool("remoteFetchEnabled", !remoteFetchEnabled); + } + //#endregion + + /// toggleFaceClustering toggles the face clustering setting and returns the new value + Future toggleFaceClustering() async { + await _prefs.setBool(kEnableFaceClustering, !isFaceClusteringEnabled); + return isFaceClusteringEnabled; + } } diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart index 102c08d8d8..6b9ccafb97 100644 --- a/mobile/lib/utils/multipart_upload_util.dart +++ b/mobile/lib/utils/multipart_upload_util.dart @@ -6,8 +6,8 @@ import "package:dio/dio.dart"; import "package:logging/logging.dart"; import "package:photos/core/constants.dart"; import "package:photos/core/network/network.dart"; +import 'package:photos/module/upload/model/xml.dart'; import "package:photos/service_locator.dart"; -import "package:photos/utils/xml_parser_util.dart"; final _enteDio = NetworkClient.instance.enteDio; final _dio = NetworkClient.instance.getDio(); diff --git a/mobile/lib/utils/network_util.dart b/mobile/lib/utils/network_util.dart new file mode 100644 index 0000000000..a3b28561cf --- /dev/null +++ b/mobile/lib/utils/network_util.dart @@ -0,0 +1,21 @@ +import "package:connectivity_plus/connectivity_plus.dart"; +import "package:flutter/foundation.dart" show debugPrint; +import "package:photos/core/configuration.dart"; + +Future canUseHighBandwidth() async { + // Connections will contain a list of currently active connections. + // could be vpn and wifi or mobile and vpn, but should not be wifi and mobile + final List connections = + await (Connectivity().checkConnectivity()); + bool canUploadUnderCurrentNetworkConditions = true; + if (!Configuration.instance.shouldBackupOverMobileData()) { + if (connections.any((element) => element == ConnectivityResult.mobile)) { + canUploadUnderCurrentNetworkConditions = false; + } else { + debugPrint( + "[canUseHighBandwidth] mobileBackupDisabled, backing up with connections: ${connections.map((e) => e.name).toString()}", + ); + } + } + return canUploadUnderCurrentNetworkConditions; +} diff --git a/mobile/lib/utils/thumbnail_util.dart b/mobile/lib/utils/thumbnail_util.dart index dc21676325..db7648b92b 100644 --- a/mobile/lib/utils/thumbnail_util.dart +++ b/mobile/lib/utils/thumbnail_util.dart @@ -217,3 +217,11 @@ File cachedThumbnailPath(EnteFile file) { thumbnailCacheDirectory + "/" + file.uploadedFileID.toString(), ); } + +File cachedFaceCropPath(String faceID) { + final thumbnailCacheDirectory = + Configuration.instance.getThumbnailCacheDirectory(); + return File( + thumbnailCacheDirectory + "/" + faceID, + ); +} diff --git a/mobile/lib/utils/xml_parser_util.dart b/mobile/lib/utils/xml_parser_util.dart index 9490fc40cb..8b13789179 100644 --- a/mobile/lib/utils/xml_parser_util.dart +++ b/mobile/lib/utils/xml_parser_util.dart @@ -1,41 +1 @@ -// ignore_for_file: implementation_imports -import "package:xml/xml.dart"; - -// used for classes that can be converted to xml -abstract class XmlParsableObject { - Map toMap(); - String get elementName; -} - -// for converting the response to xml -String convertJs2Xml(Map json) { - final builder = XmlBuilder(); - buildXml(builder, json); - return builder.buildDocument().toXmlString( - pretty: true, - indent: ' ', - ); -} - -// for building the xml node tree recursively -void buildXml(XmlBuilder builder, dynamic node) { - if (node is Map) { - node.forEach((key, value) { - builder.element(key, nest: () => buildXml(builder, value)); - }); - } else if (node is List) { - for (var item in node) { - buildXml(builder, item); - } - } else if (node is XmlParsableObject) { - builder.element( - node.elementName, - nest: () { - buildXml(builder, node.toMap()); - }, - ); - } else { - builder.text(node.toString()); - } -} diff --git a/mobile/plugins/ente_feature_flag/lib/src/service.dart b/mobile/plugins/ente_feature_flag/lib/src/service.dart index 47539eeb5f..ce90352030 100644 --- a/mobile/plugins/ente_feature_flag/lib/src/service.dart +++ b/mobile/plugins/ente_feature_flag/lib/src/service.dart @@ -67,7 +67,7 @@ class FlagService { bool get mapEnabled => flags.mapEnabled; - bool get faceSearchEnabled => flags.faceSearchEnabled; + bool get faceSearchEnabled => internalUser || flags.betaUser; bool get passKeyEnabled => flags.passKeyEnabled || internalOrBetaUser; diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 5c9c955c9e..c8b2c912c4 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -37,10 +37,10 @@ packages: dependency: "direct main" description: name: animated_list_plus - sha256: fe66f9c300d715254727fbdf050487844d17b013fec344fa28081d29bddbdf1a + sha256: fb3d7f1fbaf5af84907f3c739236bacda8bf32cbe1f118dd51510752883ff50c url: "https://pub.dev" source: hosted - version: "0.4.5" + version: "0.5.2" animated_stack_widget: dependency: transitive description: @@ -355,6 +355,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.3.2" + dart_ui_isolate: + dependency: "direct main" + description: + name: dart_ui_isolate + sha256: bd531558002a00de0ac7dd73c84887dd01e652bd254d3098d7763881535196d7 + url: "https://pub.dev" + source: hosted + version: "1.1.1" dbus: dependency: transitive description: @@ -907,10 +915,10 @@ packages: dependency: "direct main" description: name: home_widget - sha256: "29565bfee4b32eaf9e7e8b998d504618b779a74b2b1ac62dd4dac7468e66f1a3" + sha256: "2a0fdd6267ff975bd07bedf74686bd5577200f504f5de36527ac1b56bdbe68e3" url: "https://pub.dev" source: hosted - version: "0.5.0" + version: "0.6.0" html: dependency: transitive description: @@ -1056,26 +1064,26 @@ packages: dependency: transitive description: name: leak_tracker - sha256: "78eb209deea09858f5269f5a5b02be4049535f568c07b275096836f01ea323fa" + sha256: "7f0df31977cb2c0b88585095d168e689669a2cc9b97c309665e3386f3e9d341a" url: "https://pub.dev" source: hosted - version: "10.0.0" + version: "10.0.4" leak_tracker_flutter_testing: dependency: transitive description: name: leak_tracker_flutter_testing - sha256: b46c5e37c19120a8a01918cfaf293547f47269f7cb4b0058f21531c2465d6ef0 + sha256: "06e98f569d004c1315b991ded39924b21af84cf14cc94791b8aea337d25b57f8" url: "https://pub.dev" source: hosted - version: "2.0.1" + version: "3.0.3" leak_tracker_testing: dependency: transitive description: name: leak_tracker_testing - sha256: a597f72a664dbd293f3bfc51f9ba69816f84dcd403cdac7066cb3f6003f3ab47 + sha256: "6ba465d5d76e67ddf503e1161d1f4a6bc42306f9d66ca1e8f079a47290fb06d3" url: "https://pub.dev" source: hosted - version: "2.0.1" + version: "3.0.1" like_button: dependency: "direct main" description: @@ -1272,10 +1280,10 @@ packages: dependency: transitive description: name: meta - sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04 + sha256: "7687075e408b093f36e6bbf6c91878cc0d4cd10f409506f7bc996f68220b9136" url: "https://pub.dev" source: hosted - version: "1.11.0" + version: "1.12.0" mgrs_dart: dependency: transitive description: @@ -1292,6 +1300,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.5" + ml_linalg: + dependency: "direct main" + description: + name: ml_linalg + sha256: "304cb8a2a172f2303226d672d0b6f18dbfe558e2db49d27c8aa9f3e15475c0cd" + url: "https://pub.dev" + source: hosted + version: "13.12.2" modal_bottom_sheet: dependency: "direct main" description: @@ -1624,7 +1640,7 @@ packages: source: hosted version: "1.0.1" pool: - dependency: transitive + dependency: "direct main" description: name: pool sha256: "20fe868b6314b322ea036ba325e6fc0711a22948856475e2c2b6306e8ab39c2a" @@ -1648,7 +1664,7 @@ packages: source: hosted version: "2.1.0" protobuf: - dependency: transitive + dependency: "direct main" description: name: protobuf sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d" @@ -1887,6 +1903,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.4" + simple_cluster: + dependency: "direct main" + description: + name: simple_cluster + sha256: "64d6b7d60d641299ad8c3f012417c711532792c1bc61ac6a7f52b942cdba65da" + url: "https://pub.dev" + source: hosted + version: "0.3.0" sky_engine: dependency: transitive description: flutter @@ -2032,10 +2056,10 @@ packages: dependency: "direct main" description: name: styled_text - sha256: f72928d1ebe8cb149e3b34a689cb1ddca696b808187cf40ac3a0bd183dff379c + sha256: fd624172cf629751b4f171dd0ecf9acf02a06df3f8a81bb56c0caa4f1df706c3 url: "https://pub.dev" source: hosted - version: "7.0.0" + version: "8.1.0" sync_http: dependency: transitive description: @@ -2048,20 +2072,20 @@ packages: dependency: "direct main" description: name: syncfusion_flutter_core - sha256: "9be1bb9bbdb42823439a18da71484f1964c14dbe1c255ab1b931932b12fa96e8" + sha256: "63108a33f9b0d89f7b6b56cce908b8e519fe433dbbe0efcf41ad3e8bb2081bd9" url: "https://pub.dev" source: hosted - version: "19.4.56" + version: "25.2.5" syncfusion_flutter_sliders: dependency: "direct main" description: name: syncfusion_flutter_sliders - sha256: "1f6a63ccab4180b544074b9264a20f01ee80b553de154192fe1d7b434089d3c2" + sha256: f27310bedc0e96e84054f0a70ac593d1a3c38397c158c5226ba86027ad77b2c1 url: "https://pub.dev" source: hosted - version: "19.4.56" + version: "25.2.5" synchronized: - dependency: transitive + dependency: "direct main" description: name: synchronized sha256: "539ef412b170d65ecdafd780f924e5be3f60032a1128df156adad6c5b373d558" @@ -2080,26 +2104,26 @@ packages: dependency: "direct dev" description: name: test - sha256: a1f7595805820fcc05e5c52e3a231aedd0b72972cb333e8c738a8b1239448b6f + sha256: "7ee446762c2c50b3bd4ea96fe13ffac69919352bd3b4b17bac3f3465edc58073" url: "https://pub.dev" source: hosted - version: "1.24.9" + version: "1.25.2" test_api: dependency: transitive description: name: test_api - sha256: "5c2f730018264d276c20e4f1503fd1308dfbbae39ec8ee63c5236311ac06954b" + sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f" url: "https://pub.dev" source: hosted - version: "0.6.1" + version: "0.7.0" test_core: dependency: transitive description: name: test_core - sha256: a757b14fc47507060a162cc2530d9a4a2f92f5100a952c7443b5cad5ef5b106a + sha256: "2bc4b4ecddd75309300d8096f781c0e3280ca1ef85beda558d33fcbedc2eead4" url: "https://pub.dev" source: hosted - version: "0.5.9" + version: "0.6.0" timezone: dependency: transitive description: @@ -2329,10 +2353,10 @@ packages: dependency: transitive description: name: vm_service - sha256: b3d56ff4341b8f182b96aceb2fa20e3dcb336b9f867bc0eafc0de10f1048e957 + sha256: "3923c89304b715fb1eb6423f017651664a03bf5f4b29983627c4da791f74a4ec" url: "https://pub.dev" source: hosted - version: "13.0.0" + version: "14.2.1" volume_controller: dependency: transitive description: @@ -2479,4 +2503,4 @@ packages: version: "3.1.2" sdks: dart: ">=3.3.0 <4.0.0" - flutter: ">=3.19.0" + flutter: ">=3.20.0-1.2.pre" diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 35cce0ada9..5faea5a6db 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.95+615 +version: 0.8.108+632 publish_to: none environment: @@ -21,7 +21,7 @@ environment: dependencies: adaptive_theme: ^3.1.0 animate_do: ^2.0.0 - animated_list_plus: ^0.4.5 + animated_list_plus: ^0.5.2 archive: ^3.1.2 background_fetch: ^1.2.1 battery_info: ^1.1.1 @@ -39,7 +39,8 @@ dependencies: connectivity_plus: ^6.0.2 cross_file: ^0.3.3 crypto: ^3.0.2 - cupertino_icons: ^1.0.8 + cupertino_icons: ^1.0.0 + dart_ui_isolate: ^1.1.1 defer_pointer: ^0.0.2 device_info_plus: ^9.0.3 dio: ^4.0.6 @@ -90,12 +91,12 @@ dependencies: fluttertoast: ^8.0.6 freezed_annotation: ^2.4.1 google_nav_bar: ^5.0.5 - home_widget: ^0.5.0 + home_widget: ^0.6.0 html_unescape: ^2.0.0 http: ^1.1.0 image: ^4.0.17 image_editor: ^1.3.0 - intl: ^0.18.0 + intl: ^0.19.0 json_annotation: ^4.8.0 latlong2: ^0.9.0 like_button: ^2.0.5 @@ -109,6 +110,7 @@ dependencies: media_kit: ^1.1.10+1 media_kit_libs_video: ^1.0.4 media_kit_video: ^1.2.4 + ml_linalg: ^13.11.31 modal_bottom_sheet: ^3.0.0-pre motion_photos: git: "https://github.com/ente-io/motion_photo.git" @@ -131,6 +133,8 @@ dependencies: photo_view: ^0.14.0 pinput: ^1.2.2 pointycastle: ^3.7.3 + pool: ^1.5.1 + protobuf: ^3.1.0 provider: ^6.0.0 quiver: ^3.0.1 receive_sharing_intent: ^1.7.0 @@ -139,19 +143,16 @@ dependencies: sentry_flutter: ^7.9.0 share_plus: 7.2.2 shared_preferences: ^2.0.5 + simple_cluster: ^0.3.0 sqflite: ^2.3.0 sqflite_migration: ^0.3.0 sqlite3_flutter_libs: ^0.5.20 sqlite_async: ^0.6.1 step_progress_indicator: ^1.0.2 - styled_text: ^7.0.0 - syncfusion_flutter_core: ^19.2.49 - syncfusion_flutter_sliders: ^19.2.49 - # tflite_flutter: ^0.9.0 - # tflite_flutter_helper: - # git: - # url: https://github.com/pnyompen/tflite_flutter_helper.git - # ref: 43e87d4b9627539266dc20250beb35bf36320dce + styled_text: ^8.1.0 + syncfusion_flutter_core: ^25.2.5 + syncfusion_flutter_sliders: ^25.2.5 + synchronized: ^3.1.0 tuple: ^2.0.0 uni_links: ^0.5.1 url_launcher: ^6.0.3 @@ -173,6 +174,7 @@ dependency_overrides: # Remove this after removing dependency from flutter_sodium. # Newer flutter packages depends on ffi > 2.0.0 while flutter_sodium depends on ffi < 2.0.0 ffi: 2.1.0 + intl: 0.18.1 video_player: git: url: https://github.com/ente-io/packages.git @@ -225,9 +227,6 @@ flutter_native_splash: flutter: assets: - assets/ - - assets/models/cocossd/ - - assets/models/mobilenet/ - - assets/models/scenes/ - assets/models/clip/ fonts: - family: Inter diff --git a/mobile/thirdparty/flutter b/mobile/thirdparty/flutter index ba39319843..367f9ea16b 160000 --- a/mobile/thirdparty/flutter +++ b/mobile/thirdparty/flutter @@ -1 +1 @@ -Subproject commit ba393198430278b6595976de84fe170f553cc728 +Subproject commit 367f9ea16bfae1ca451b9cc27c1366870b187ae2 diff --git a/server/cmd/museum/main.go b/server/cmd/museum/main.go index 84c34189d2..8ccb43cc09 100644 --- a/server/cmd/museum/main.go +++ b/server/cmd/museum/main.go @@ -678,7 +678,7 @@ func main() { pushHandler := &api.PushHandler{PushController: pushController} privateAPI.POST("/push/token", pushHandler.AddToken) - embeddingController := &embeddingCtrl.Controller{Repo: embeddingRepo, AccessCtrl: accessCtrl, ObjectCleanupController: objectCleanupController, S3Config: s3Config, FileRepo: fileRepo, CollectionRepo: collectionRepo, QueueRepo: queueRepo, TaskLockingRepo: taskLockingRepo, HostName: hostName} + embeddingController := embeddingCtrl.New(embeddingRepo, accessCtrl, objectCleanupController, s3Config, queueRepo, taskLockingRepo, fileRepo, collectionRepo, hostName) embeddingHandler := &api.EmbeddingHandler{Controller: embeddingController} privateAPI.PUT("/embeddings", embeddingHandler.InsertOrUpdate) diff --git a/server/configurations/local.yaml b/server/configurations/local.yaml index 7785f56019..87502c2713 100644 --- a/server/configurations/local.yaml +++ b/server/configurations/local.yaml @@ -125,6 +125,16 @@ s3: endpoint: region: bucket: + wasabi-eu-central-2-derived: + key: + secret: + endpoint: + region: + bucket: + # Derived storage bucket is used for storing derived data like embeddings, preview etc. + # By default, it is the same as the hot storage bucket. + # derived-storage: wasabi-eu-central-2-derived + # If true, enable some workarounds to allow us to use a local minio instance # for object storage. # @@ -180,6 +190,9 @@ smtp: port: username: password: + # The email address from which to send the email. Set this to an email + # address whose credentials you're providing. + email: # Zoho Zeptomail config (optional) # diff --git a/server/docs/publish.md b/server/docs/publish.md index de4849d900..3a49a47611 100644 --- a/server/docs/publish.md +++ b/server/docs/publish.md @@ -39,3 +39,7 @@ combine both these steps too. Once the workflow completes, the resultant image will be available at `ghcr.io/ente-io/server`. The image will be tagged by the commit SHA. The latest image will also be tagged, well, "latest". + +The workflow will also tag the commit it used to build the image with +`server/ghcr`. This tag will be overwritten on each publish, and it'll point to +the code that was used in the most recent publish. diff --git a/server/ente/billing.go b/server/ente/billing.go index 20c37bdb5a..f623a92e85 100644 --- a/server/ente/billing.go +++ b/server/ente/billing.go @@ -11,7 +11,7 @@ import ( const ( // FreePlanStorage is the amount of storage in free plan - FreePlanStorage = 1 * 1024 * 1024 * 1024 + FreePlanStorage = 5 * 1024 * 1024 * 1024 // FreePlanProductID is the product ID of free plan FreePlanProductID = "free" // FreePlanTransactionID is the dummy transaction ID for the free plan diff --git a/server/ente/embedding.go b/server/ente/embedding.go index 2990a779a3..fabde44a58 100644 --- a/server/ente/embedding.go +++ b/server/ente/embedding.go @@ -7,6 +7,7 @@ type Embedding struct { DecryptionHeader string `json:"decryptionHeader"` UpdatedAt int64 `json:"updatedAt"` Version *int `json:"version,omitempty"` + Size *int64 } type InsertOrUpdateEmbeddingRequest struct { @@ -30,9 +31,10 @@ type GetFilesEmbeddingRequest struct { } type GetFilesEmbeddingResponse struct { - Embeddings []Embedding `json:"embeddings"` - NoDataFileIDs []int64 `json:"noDataFileIDs"` - ErrFileIDs []int64 `json:"errFileIDs"` + Embeddings []Embedding `json:"embeddings"` + PendingIndexFileIDs []int64 `json:"pendingIndexFileIDs"` + ErrFileIDs []int64 `json:"errFileIDs"` + NoEmbeddingFileIDs []int64 `json:"noEmbeddingFileIDs"` } type Model string diff --git a/server/ente/file.go b/server/ente/file.go index 4a69473e3a..a0e67c71cf 100644 --- a/server/ente/file.go +++ b/server/ente/file.go @@ -134,6 +134,7 @@ type UpdateMagicMetadata struct { // UpdateMultipleMagicMetadataRequest request payload for updating magic metadata for list of files type UpdateMultipleMagicMetadataRequest struct { MetadataList []UpdateMagicMetadata `json:"metadataList" binding:"required"` + SkipVersion *bool `json:"skipVersion"` } // UploadURL represents the upload url for a specific object diff --git a/server/migrations/85_increase_free_storage.down.sql b/server/migrations/85_increase_free_storage.down.sql new file mode 100644 index 0000000000..9f7060a47f --- /dev/null +++ b/server/migrations/85_increase_free_storage.down.sql @@ -0,0 +1 @@ +-- no-op diff --git a/server/migrations/85_increase_free_storage.up.sql b/server/migrations/85_increase_free_storage.up.sql new file mode 100644 index 0000000000..395033c8dd --- /dev/null +++ b/server/migrations/85_increase_free_storage.up.sql @@ -0,0 +1 @@ +UPDATE subscriptions SET storage = 5368709120, expiry_time = 1749355117000000 where storage = 1073741824 and product_id = 'free'; diff --git a/server/migrations/86_add_dc_embedding.down.sql b/server/migrations/86_add_dc_embedding.down.sql new file mode 100644 index 0000000000..b705b29b6e --- /dev/null +++ b/server/migrations/86_add_dc_embedding.down.sql @@ -0,0 +1,18 @@ +-- Add types for the new dcs that are introduced for the derived data +ALTER TABLE embeddings DROP COLUMN IF EXISTS datacenters; + +DO +$$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM pg_trigger WHERE tgname = 'update_embeddings_updated_at') THEN + CREATE TRIGGER update_embeddings_updated_at + BEFORE UPDATE + ON embeddings + FOR EACH ROW + EXECUTE PROCEDURE + trigger_updated_at_microseconds_column(); + ELSE + RAISE NOTICE 'Trigger update_embeddings_updated_at already exists.'; + END IF; + END +$$; \ No newline at end of file diff --git a/server/migrations/86_add_dc_embedding.up.sql b/server/migrations/86_add_dc_embedding.up.sql new file mode 100644 index 0000000000..9d8e28ba77 --- /dev/null +++ b/server/migrations/86_add_dc_embedding.up.sql @@ -0,0 +1,4 @@ +-- Add types for the new dcs that are introduced for the derived data +ALTER TYPE s3region ADD VALUE 'wasabi-eu-central-2-derived'; +DROP TRIGGER IF EXISTS update_embeddings_updated_at ON embeddings; +ALTER TABLE embeddings ADD COLUMN IF NOT EXISTS datacenters s3region[] default '{b2-eu-cen}'; diff --git a/server/pkg/api/file.go b/server/pkg/api/file.go index a253c71c2a..990336e372 100644 --- a/server/pkg/api/file.go +++ b/server/pkg/api/file.go @@ -110,7 +110,7 @@ func (h *FileHandler) GetUploadURLs(c *gin.Context) { userID := auth.GetUserID(c.Request.Header) count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/api/public_collection.go b/server/pkg/api/public_collection.go index 7a38f43808..9290d64560 100644 --- a/server/pkg/api/public_collection.go +++ b/server/pkg/api/public_collection.go @@ -57,7 +57,7 @@ func (h *PublicCollectionHandler) GetUploadUrls(c *gin.Context) { } userID := collection.Owner.ID count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/controller/embedding/controller.go b/server/pkg/controller/embedding/controller.go index d6e78209fa..6f3de3ca78 100644 --- a/server/pkg/controller/embedding/controller.go +++ b/server/pkg/controller/embedding/controller.go @@ -2,12 +2,16 @@ package embedding import ( "bytes" + "context" "encoding/json" "errors" "fmt" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/ente-io/museum/pkg/utils/array" "strconv" + "strings" "sync" + gTime "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3" @@ -20,23 +24,62 @@ import ( "github.com/ente-io/museum/pkg/utils/auth" "github.com/ente-io/museum/pkg/utils/network" "github.com/ente-io/museum/pkg/utils/s3config" - "github.com/ente-io/museum/pkg/utils/time" "github.com/ente-io/stacktrace" "github.com/gin-gonic/gin" log "github.com/sirupsen/logrus" ) +const ( + // maxEmbeddingDataSize is the min size of an embedding object in bytes + minEmbeddingDataSize = 2048 + embeddingFetchTimeout = 10 * gTime.Second +) + +// _fetchConfig is the configuration for the fetching objects from S3 +type _fetchConfig struct { + RetryCount int + InitialTimeout gTime.Duration + MaxTimeout gTime.Duration +} + +var _defaultFetchConfig = _fetchConfig{RetryCount: 3, InitialTimeout: 10 * gTime.Second, MaxTimeout: 30 * gTime.Second} +var _b2FetchConfig = _fetchConfig{RetryCount: 3, InitialTimeout: 15 * gTime.Second, MaxTimeout: 30 * gTime.Second} + type Controller struct { - Repo *embedding.Repository - AccessCtrl access.Controller - ObjectCleanupController *controller.ObjectCleanupController - S3Config *s3config.S3Config - QueueRepo *repo.QueueRepository - TaskLockingRepo *repo.TaskLockRepository - FileRepo *repo.FileRepository - CollectionRepo *repo.CollectionRepository - HostName string - cleanupCronRunning bool + Repo *embedding.Repository + AccessCtrl access.Controller + ObjectCleanupController *controller.ObjectCleanupController + S3Config *s3config.S3Config + QueueRepo *repo.QueueRepository + TaskLockingRepo *repo.TaskLockRepository + FileRepo *repo.FileRepository + CollectionRepo *repo.CollectionRepository + HostName string + cleanupCronRunning bool + derivedStorageDataCenter string + downloadManagerCache map[string]*s3manager.Downloader +} + +func New(repo *embedding.Repository, accessCtrl access.Controller, objectCleanupController *controller.ObjectCleanupController, s3Config *s3config.S3Config, queueRepo *repo.QueueRepository, taskLockingRepo *repo.TaskLockRepository, fileRepo *repo.FileRepository, collectionRepo *repo.CollectionRepository, hostName string) *Controller { + embeddingDcs := []string{s3Config.GetHotBackblazeDC(), s3Config.GetHotWasabiDC(), s3Config.GetWasabiDerivedDC(), s3Config.GetDerivedStorageDataCenter()} + cache := make(map[string]*s3manager.Downloader, len(embeddingDcs)) + for i := range embeddingDcs { + s3Client := s3Config.GetS3Client(embeddingDcs[i]) + cache[embeddingDcs[i]] = s3manager.NewDownloaderWithClient(&s3Client) + } + return &Controller{ + Repo: repo, + AccessCtrl: accessCtrl, + ObjectCleanupController: objectCleanupController, + S3Config: s3Config, + QueueRepo: queueRepo, + TaskLockingRepo: taskLockingRepo, + FileRepo: fileRepo, + CollectionRepo: collectionRepo, + HostName: hostName, + derivedStorageDataCenter: s3Config.GetDerivedStorageDataCenter(), + downloadManagerCache: cache, + } } func (c *Controller) InsertOrUpdate(ctx *gin.Context, req ente.InsertOrUpdateEmbeddingRequest) (*ente.Embedding, error) { @@ -69,12 +112,12 @@ func (c *Controller) InsertOrUpdate(ctx *gin.Context, req ente.InsertOrUpdateEmb DecryptionHeader: req.DecryptionHeader, Client: network.GetPrettyUA(ctx.GetHeader("User-Agent")) + "/" + ctx.GetHeader("X-Client-Version"), } - size, uploadErr := c.uploadObject(obj, c.getObjectKey(userID, req.FileID, req.Model)) + size, uploadErr := c.uploadObject(obj, c.getObjectKey(userID, req.FileID, req.Model), c.derivedStorageDataCenter) if uploadErr != nil { log.Error(uploadErr) return nil, stacktrace.Propagate(uploadErr, "") } - embedding, err := c.Repo.InsertOrUpdate(ctx, userID, req, size, version) + embedding, err := c.Repo.InsertOrUpdate(ctx, userID, req, size, version, c.derivedStorageDataCenter) embedding.Version = &version if err != nil { return nil, stacktrace.Propagate(err, "") @@ -105,7 +148,7 @@ func (c *Controller) GetDiff(ctx *gin.Context, req ente.GetEmbeddingDiffRequest) // Fetch missing embeddings in parallel if len(objectKeys) > 0 { - embeddingObjects, err := c.getEmbeddingObjectsParallel(objectKeys) + embeddingObjects, err := c.getEmbeddingObjectsParallel(objectKeys, c.derivedStorageDataCenter) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -135,15 +178,23 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd return nil, stacktrace.Propagate(err, "") } + embeddingsWithData := make([]ente.Embedding, 0) + noEmbeddingFileIds := make([]int64, 0) dbFileIds := make([]int64, 0) - for _, embedding := range userFileEmbeddings { - dbFileIds = append(dbFileIds, embedding.FileID) + // fileIDs that were indexed, but they don't contain any embedding information + for i := range userFileEmbeddings { + dbFileIds = append(dbFileIds, userFileEmbeddings[i].FileID) + if userFileEmbeddings[i].Size != nil && *userFileEmbeddings[i].Size < minEmbeddingDataSize { + noEmbeddingFileIds = append(noEmbeddingFileIds, userFileEmbeddings[i].FileID) + } else { + embeddingsWithData = append(embeddingsWithData, userFileEmbeddings[i]) + } } - missingFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) + pendingIndexFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) errFileIds := make([]int64, 0) // Fetch missing userFileEmbeddings in parallel - embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, userFileEmbeddings) + embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, embeddingsWithData, c.derivedStorageDataCenter) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -166,88 +217,13 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd } return &ente.GetFilesEmbeddingResponse{ - Embeddings: fetchedEmbeddings, - NoDataFileIDs: missingFileIds, - ErrFileIDs: errFileIds, + Embeddings: fetchedEmbeddings, + PendingIndexFileIDs: pendingIndexFileIds, + ErrFileIDs: errFileIds, + NoEmbeddingFileIDs: noEmbeddingFileIds, }, nil } -func (c *Controller) DeleteAll(ctx *gin.Context) error { - userID := auth.GetUserID(ctx.Request.Header) - - err := c.Repo.DeleteAll(ctx, userID) - if err != nil { - return stacktrace.Propagate(err, "") - } - return nil -} - -// CleanupDeletedEmbeddings clears all embeddings for deleted files from the object store -func (c *Controller) CleanupDeletedEmbeddings() { - log.Info("Cleaning up deleted embeddings") - if c.cleanupCronRunning { - log.Info("Skipping CleanupDeletedEmbeddings cron run as another instance is still running") - return - } - c.cleanupCronRunning = true - defer func() { - c.cleanupCronRunning = false - }() - items, err := c.QueueRepo.GetItemsReadyForDeletion(repo.DeleteEmbeddingsQueue, 200) - if err != nil { - log.WithError(err).Error("Failed to fetch items from queue") - return - } - for _, i := range items { - c.deleteEmbedding(i) - } -} - -func (c *Controller) deleteEmbedding(qItem repo.QueueItem) { - lockName := fmt.Sprintf("Embedding:%s", qItem.Item) - lockStatus, err := c.TaskLockingRepo.AcquireLock(lockName, time.MicrosecondsAfterHours(1), c.HostName) - ctxLogger := log.WithField("item", qItem.Item).WithField("queue_id", qItem.Id) - if err != nil || !lockStatus { - ctxLogger.Warn("unable to acquire lock") - return - } - defer func() { - err = c.TaskLockingRepo.ReleaseLock(lockName) - if err != nil { - ctxLogger.Errorf("Error while releasing lock %s", err) - } - }() - ctxLogger.Info("Deleting all embeddings") - - fileID, _ := strconv.ParseInt(qItem.Item, 10, 64) - ownerID, err := c.FileRepo.GetOwnerID(fileID) - if err != nil { - ctxLogger.WithError(err).Error("Failed to fetch ownerID") - return - } - prefix := c.getEmbeddingObjectPrefix(ownerID, fileID) - - err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, c.S3Config.GetHotDataCenter()) - if err != nil { - ctxLogger.WithError(err).Error("Failed to delete all objects") - return - } - - err = c.Repo.Delete(fileID) - if err != nil { - ctxLogger.WithError(err).Error("Failed to remove from db") - return - } - - err = c.QueueRepo.DeleteItem(repo.DeleteEmbeddingsQueue, qItem.Item) - if err != nil { - ctxLogger.WithError(err).Error("Failed to remove item from the queue") - return - } - - ctxLogger.Info("Successfully deleted all embeddings") -} - func (c *Controller) getObjectKey(userID int64, fileID int64, model string) string { return c.getEmbeddingObjectPrefix(userID, fileID) + model + ".json" } @@ -256,12 +232,23 @@ func (c *Controller) getEmbeddingObjectPrefix(userID int64, fileID int64) string return strconv.FormatInt(userID, 10) + "/ml-data/" + strconv.FormatInt(fileID, 10) + "/" } +// Get userId, model and fileID from the object key +func (c *Controller) getEmbeddingObjectDetails(objectKey string) (userID int64, model string, fileID int64) { + split := strings.Split(objectKey, "/") + userID, _ = strconv.ParseInt(split[0], 10, 64) + fileID, _ = strconv.ParseInt(split[2], 10, 64) + model = strings.Split(split[3], ".")[0] + return userID, model, fileID +} + // uploadObject uploads the embedding object to the object store and returns the object size -func (c *Controller) uploadObject(obj ente.EmbeddingObject, key string) (int, error) { +func (c *Controller) uploadObject(obj ente.EmbeddingObject, key string, dc string) (int, error) { embeddingObj, _ := json.Marshal(obj) - uploader := s3manager.NewUploaderWithClient(c.S3Config.GetHotS3Client()) + s3Client := c.S3Config.GetS3Client(dc) + s3Bucket := c.S3Config.GetBucket(dc) + uploader := s3manager.NewUploaderWithClient(&s3Client) up := s3manager.UploadInput{ - Bucket: c.S3Config.GetHotBucket(), + Bucket: s3Bucket, Key: &key, Body: bytes.NewReader(embeddingObj), } @@ -279,12 +266,10 @@ var globalDiffFetchSemaphore = make(chan struct{}, 300) var globalFileFetchSemaphore = make(chan struct{}, 400) -func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.EmbeddingObject, error) { +func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string, dc string) ([]ente.EmbeddingObject, error) { var wg sync.WaitGroup var errs []error embeddingObjects := make([]ente.EmbeddingObject, len(objectKeys)) - downloader := s3manager.NewDownloaderWithClient(c.S3Config.GetHotS3Client()) - for i, objectKey := range objectKeys { wg.Add(1) globalDiffFetchSemaphore <- struct{}{} // Acquire from global semaphore @@ -292,7 +277,7 @@ func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.Em defer wg.Done() defer func() { <-globalDiffFetchSemaphore }() // Release back to global semaphore - obj, err := c.getEmbeddingObject(objectKey, downloader) + obj, err := c.getEmbeddingObject(context.Background(), objectKey, dc) if err != nil { errs = append(errs, err) log.Error("error fetching embedding object: "+objectKey, err) @@ -317,10 +302,9 @@ type embeddingObjectResult struct { err error } -func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows []ente.Embedding) ([]embeddingObjectResult, error) { +func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows []ente.Embedding, dc string) ([]embeddingObjectResult, error) { var wg sync.WaitGroup embeddingObjects := make([]embeddingObjectResult, len(dbEmbeddingRows)) - downloader := s3manager.NewDownloaderWithClient(c.S3Config.GetHotS3Client()) for i, dbEmbeddingRow := range dbEmbeddingRows { wg.Add(1) @@ -329,7 +313,7 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows defer wg.Done() defer func() { <-globalFileFetchSemaphore }() // Release back to global semaphore objectKey := c.getObjectKey(userID, dbEmbeddingRow.FileID, dbEmbeddingRow.Model) - obj, err := c.getEmbeddingObject(objectKey, downloader) + obj, err := c.getEmbeddingObject(context.Background(), objectKey, dc) if err != nil { log.Error("error fetching embedding object: "+objectKey, err) embeddingObjects[i] = embeddingObjectResult{ @@ -349,25 +333,125 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows return embeddingObjects, nil } -func (c *Controller) getEmbeddingObject(objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) { +func (c *Controller) getEmbeddingObject(ctx context.Context, objectKey string, dc string) (ente.EmbeddingObject, error) { + opt := _defaultFetchConfig + if dc == c.S3Config.GetHotBackblazeDC() { + opt = _b2FetchConfig + } + ctxLogger := log.WithField("objectKey", objectKey).WithField("dc", dc) + totalAttempts := opt.RetryCount + 1 + timeout := opt.InitialTimeout + for i := 0; i < totalAttempts; i++ { + if i > 0 { + timeout = timeout * 2 + if timeout > opt.MaxTimeout { + timeout = opt.MaxTimeout + } + } + fetchCtx, cancel := context.WithTimeout(ctx, timeout) + select { + case <-ctx.Done(): + cancel() + return ente.EmbeddingObject{}, stacktrace.Propagate(ctx.Err(), "") + default: + obj, err := c.downloadObject(fetchCtx, objectKey, dc) + cancel() // Ensure cancel is called to release resources + if err == nil { + if i > 0 { + ctxLogger.Infof("Fetched object after %d attempts", i) + } + return obj, nil + } + // Check if the error is due to context timeout or cancellation + if err == nil && fetchCtx.Err() != nil { + ctxLogger.Error("Fetch timed out or cancelled: ", fetchCtx.Err()) + } else { + // check if the error is due to object not found + if s3Err, ok := err.(awserr.RequestFailure); ok { + if s3Err.Code() == s3.ErrCodeNoSuchKey { + var srcDc, destDc string + destDc = c.S3Config.GetDerivedStorageDataCenter() + // todo:(neeraj) Refactor this later to get available the DC from the DB instead of + // querying the DB. This will help in case of multiple DCs and avoid querying the DB + // for each object. + // For initial migration, as we know that original DC was b2, and if the embedding is not found + // in the new derived DC, we can try to fetch it from the B2 DC. + if c.derivedStorageDataCenter != c.S3Config.GetHotBackblazeDC() { + // embeddings ideally should ideally be in the default hot bucket b2 + srcDc = c.S3Config.GetHotBackblazeDC() + } else { + _, modelName, fileID := c.getEmbeddingObjectDetails(objectKey) + activeDcs, err := c.Repo.GetOtherDCsForFileAndModel(context.Background(), fileID, modelName, c.derivedStorageDataCenter) + if err != nil { + return ente.EmbeddingObject{}, stacktrace.Propagate(err, "failed to get other dc") + } + if len(activeDcs) > 0 { + srcDc = activeDcs[0] + } else { + ctxLogger.Error("Object not found in any dc ", s3Err) + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("object not found"), "") + } + } + copyEmbeddingObject, err := c.copyEmbeddingObject(ctx, objectKey, srcDc, destDc) + if err == nil { + ctxLogger.Infof("Got object from dc %s", srcDc) + return *copyEmbeddingObject, nil + } else { + ctxLogger.WithError(err).Errorf("Failed to get object from fallback dc %s", srcDc) + } + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("object not found"), "") + } + } + ctxLogger.Error("Failed to fetch object: ", err) + } + } + } + return ente.EmbeddingObject{}, stacktrace.Propagate(errors.New("failed to fetch object"), "") +} + +func (c *Controller) downloadObject(ctx context.Context, objectKey string, dc string) (ente.EmbeddingObject, error) { var obj ente.EmbeddingObject buff := &aws.WriteAtBuffer{} - _, err := downloader.Download(buff, &s3.GetObjectInput{ - Bucket: c.S3Config.GetHotBucket(), + bucket := c.S3Config.GetBucket(dc) + downloader := c.downloadManagerCache[dc] + _, err := downloader.DownloadWithContext(ctx, buff, &s3.GetObjectInput{ + Bucket: bucket, Key: &objectKey, }) if err != nil { - log.Error(err) - return obj, stacktrace.Propagate(err, "") + return obj, err } err = json.Unmarshal(buff.Bytes(), &obj) if err != nil { - log.Error(err) - return obj, stacktrace.Propagate(err, "") + return obj, stacktrace.Propagate(err, "unmarshal failed") } return obj, nil } +// download the embedding object from hot bucket and upload to embeddings bucket +func (c *Controller) copyEmbeddingObject(ctx context.Context, objectKey string, srcDC, destDC string) (*ente.EmbeddingObject, error) { + if srcDC == destDC { + return nil, stacktrace.Propagate(errors.New("src and dest dc can not be same"), "") + } + obj, err := c.downloadObject(ctx, objectKey, srcDC) + if err != nil { + return nil, stacktrace.Propagate(err, fmt.Sprintf("failed to download object from %s", srcDC)) + } + go func() { + userID, modelName, fileID := c.getEmbeddingObjectDetails(objectKey) + size, uploadErr := c.uploadObject(obj, objectKey, c.derivedStorageDataCenter) + if uploadErr != nil { + log.WithField("object", objectKey).Error("Failed to copy to embeddings bucket: ", uploadErr) + } + updateDcErr := c.Repo.AddNewDC(context.Background(), fileID, ente.Model(modelName), userID, size, destDC) + if updateDcErr != nil { + log.WithField("object", objectKey).Error("Failed to update dc in db: ", updateDcErr) + return + } + }() + return &obj, nil +} + func (c *Controller) _validateGetFileEmbeddingsRequest(ctx *gin.Context, userID int64, req ente.GetFilesEmbeddingRequest) error { if req.Model == "" { return ente.NewBadRequestWithMessage("model is required") diff --git a/server/pkg/controller/embedding/delete.go b/server/pkg/controller/embedding/delete.go new file mode 100644 index 0000000000..91a70963fe --- /dev/null +++ b/server/pkg/controller/embedding/delete.go @@ -0,0 +1,110 @@ +package embedding + +import ( + "context" + "fmt" + "github.com/ente-io/museum/pkg/repo" + "github.com/ente-io/museum/pkg/utils/auth" + "github.com/ente-io/museum/pkg/utils/time" + "github.com/ente-io/stacktrace" + "github.com/gin-gonic/gin" + log "github.com/sirupsen/logrus" + "strconv" +) + +func (c *Controller) DeleteAll(ctx *gin.Context) error { + userID := auth.GetUserID(ctx.Request.Header) + + err := c.Repo.DeleteAll(ctx, userID) + if err != nil { + return stacktrace.Propagate(err, "") + } + return nil +} + +// CleanupDeletedEmbeddings clears all embeddings for deleted files from the object store +func (c *Controller) CleanupDeletedEmbeddings() { + log.Info("Cleaning up deleted embeddings") + if c.cleanupCronRunning { + log.Info("Skipping CleanupDeletedEmbeddings cron run as another instance is still running") + return + } + c.cleanupCronRunning = true + defer func() { + c.cleanupCronRunning = false + }() + items, err := c.QueueRepo.GetItemsReadyForDeletion(repo.DeleteEmbeddingsQueue, 200) + if err != nil { + log.WithError(err).Error("Failed to fetch items from queue") + return + } + for _, i := range items { + c.deleteEmbedding(i) + } +} + +func (c *Controller) deleteEmbedding(qItem repo.QueueItem) { + lockName := fmt.Sprintf("Embedding:%s", qItem.Item) + lockStatus, err := c.TaskLockingRepo.AcquireLock(lockName, time.MicrosecondsAfterHours(1), c.HostName) + ctxLogger := log.WithField("item", qItem.Item).WithField("queue_id", qItem.Id) + if err != nil || !lockStatus { + ctxLogger.Warn("unable to acquire lock") + return + } + defer func() { + err = c.TaskLockingRepo.ReleaseLock(lockName) + if err != nil { + ctxLogger.Errorf("Error while releasing lock %s", err) + } + }() + ctxLogger.Info("Deleting all embeddings") + + fileID, _ := strconv.ParseInt(qItem.Item, 10, 64) + ownerID, err := c.FileRepo.GetOwnerID(fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to fetch ownerID") + return + } + prefix := c.getEmbeddingObjectPrefix(ownerID, fileID) + datacenters, err := c.Repo.GetDatacenters(context.Background(), fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to fetch datacenters") + return + } + ctxLogger.Infof("Deleting from all datacenters %v", datacenters) + for i := range datacenters { + dc := datacenters[i] + err = c.ObjectCleanupController.DeleteAllObjectsWithPrefix(prefix, dc) + if err != nil { + ctxLogger.WithError(err). + WithField("dc", dc). + Errorf("Failed to delete all objects from %s", datacenters[i]) + return + } else { + removeErr := c.Repo.RemoveDatacenter(context.Background(), fileID, datacenters[i]) + if removeErr != nil { + ctxLogger.WithError(removeErr). + WithField("dc", dc). + Error("Failed to remove datacenter from db") + return + } + } + } + + noDcs, noDcErr := c.Repo.GetDatacenters(context.Background(), fileID) + if len(noDcs) > 0 || noDcErr != nil { + ctxLogger.Errorf("Failed to delete from all datacenters %s", noDcs) + return + } + err = c.Repo.Delete(fileID) + if err != nil { + ctxLogger.WithError(err).Error("Failed to remove from db") + return + } + err = c.QueueRepo.DeleteItem(repo.DeleteEmbeddingsQueue, qItem.Item) + if err != nil { + ctxLogger.WithError(err).Error("Failed to remove item from the queue") + return + } + ctxLogger.Info("Successfully deleted all embeddings") +} diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go index e91d299f15..b3fec115d0 100644 --- a/server/pkg/controller/file.go +++ b/server/pkg/controller/file.go @@ -258,7 +258,7 @@ func (c *FileController) Update(ctx context.Context, userID int64, file ente.Fil } // GetUploadURLs returns a bunch of presigned URLs for uploading files -func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App) ([]ente.UploadURL, error) { +func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App, ignoreLimit bool) ([]ente.UploadURL, error) { err := c.UsageCtrl.CanUploadFile(ctx, userID, nil, app) if err != nil { return []ente.UploadURL{}, stacktrace.Propagate(err, "") @@ -268,7 +268,7 @@ func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count bucket := c.S3Config.GetHotBucket() urls := make([]ente.UploadURL, 0) objectKeys := make([]string, 0) - if count > MaxUploadURLsLimit { + if count > MaxUploadURLsLimit && !ignoreLimit { count = MaxUploadURLsLimit } for i := 0; i < count; i++ { @@ -502,7 +502,7 @@ func (c *FileController) UpdateMagicMetadata(ctx *gin.Context, req ente.UpdateMu if err != nil { return stacktrace.Propagate(err, "") } - err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata) + err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata, req.SkipVersion) if err != nil { return stacktrace.Propagate(err, "failed to update magic attributes") } diff --git a/server/pkg/controller/file_copy/file_copy.go b/server/pkg/controller/file_copy/file_copy.go index afab10efee..4f9267e2e9 100644 --- a/server/pkg/controller/file_copy/file_copy.go +++ b/server/pkg/controller/file_copy/file_copy.go @@ -92,7 +92,7 @@ func (fc *FileCopyController) CopyFiles(c *gin.Context, req ente.CopyFileSyncReq // request the uploadUrls using existing method. This is to ensure that orphan objects are automatically cleaned up // todo:(neeraj) optimize this method by removing the need for getting a signed url for each object - uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app) + uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app, true) if err != nil { return nil, err } diff --git a/server/pkg/controller/object_cleanup.go b/server/pkg/controller/object_cleanup.go index a1ba2dba57..91426cb56c 100644 --- a/server/pkg/controller/object_cleanup.go +++ b/server/pkg/controller/object_cleanup.go @@ -260,7 +260,10 @@ func (c *ObjectCleanupController) DeleteAllObjectsWithPrefix(prefix string, dc s Prefix: &prefix, }) if err != nil { - log.Error(err) + log.WithFields(log.Fields{ + "prefix": prefix, + "dc": dc, + }).WithError(err).Error("Failed to list objects") return stacktrace.Propagate(err, "") } var keys []string @@ -270,7 +273,10 @@ func (c *ObjectCleanupController) DeleteAllObjectsWithPrefix(prefix string, dc s for _, key := range keys { err = c.DeleteObjectFromDataCenter(key, dc) if err != nil { - log.Error(err) + log.WithFields(log.Fields{ + "object_key": key, + "dc": dc, + }).WithError(err).Error("Failed to delete object") return stacktrace.Propagate(err, "") } } diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go index 2f4446c9d0..823b17b2ee 100644 --- a/server/pkg/repo/cast/repo.go +++ b/server/pkg/repo/cast/repo.go @@ -8,6 +8,7 @@ import ( "github.com/ente-io/stacktrace" "github.com/google/uuid" log "github.com/sirupsen/logrus" + "strings" ) type Repository struct { @@ -19,6 +20,7 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str if err != nil { return "", err } + codeValue = strings.ToUpper(codeValue) _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip) if err != nil { return "", err @@ -28,11 +30,13 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str // InsertCastData insert collection_id, cast_user, token and encrypted_payload for given code if collection_id is not null func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code string, collectionID int64, castToken string, encryptedPayload string) error { + code = strings.ToUpper(code) _, err := r.DB.ExecContext(ctx, "UPDATE casting SET collection_id = $1, cast_user = $2, token = $3, encrypted_payload = $4 WHERE code = $5 and is_deleted=false", collectionID, castUserID, castToken, encryptedPayload, code) return err } func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) { + code = strings.ToUpper(code) var pubKey, ip string row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&pubKey, &ip) @@ -46,6 +50,7 @@ func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, s } func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) { + code = strings.ToUpper(code) var payload sql.NullString row := r.DB.QueryRowContext(ctx, "SELECT encrypted_payload FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&payload) diff --git a/server/pkg/repo/embedding/repository.go b/server/pkg/repo/embedding/repository.go index f21e3b4f19..5cfbd35c57 100644 --- a/server/pkg/repo/embedding/repository.go +++ b/server/pkg/repo/embedding/repository.go @@ -3,11 +3,11 @@ package embedding import ( "context" "database/sql" + "errors" "fmt" - "github.com/lib/pq" - "github.com/ente-io/museum/ente" "github.com/ente-io/stacktrace" + "github.com/lib/pq" "github.com/sirupsen/logrus" ) @@ -18,15 +18,26 @@ type Repository struct { } // Create inserts a new embedding - -func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry ente.InsertOrUpdateEmbeddingRequest, size int, version int) (ente.Embedding, error) { +func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry ente.InsertOrUpdateEmbeddingRequest, size int, version int, dc string) (ente.Embedding, error) { var updatedAt int64 - err := r.DB.QueryRowContext(ctx, `INSERT INTO embeddings - (file_id, owner_id, model, size, version) - VALUES ($1, $2, $3, $4, $5) - ON CONFLICT ON CONSTRAINT unique_embeddings_file_id_model - DO UPDATE SET updated_at = now_utc_micro_seconds(), size = $4, version = $5 - RETURNING updated_at`, entry.FileID, ownerID, entry.Model, size, version).Scan(&updatedAt) + err := r.DB.QueryRowContext(ctx, ` + INSERT INTO embeddings + (file_id, owner_id, model, size, version, datacenters) + VALUES + ($1, $2, $3, $4, $5, ARRAY[$6]::s3region[]) + ON CONFLICT ON CONSTRAINT unique_embeddings_file_id_model + DO UPDATE + SET + updated_at = now_utc_micro_seconds(), + size = $4, + version = $5, + datacenters = CASE + WHEN $6 = ANY(COALESCE(embeddings.datacenters, ARRAY['b2-eu-cen']::s3region[])) THEN embeddings.datacenters + ELSE array_append(COALESCE(embeddings.datacenters, ARRAY['b2-eu-cen']::s3region[]), $6::s3region) + END + RETURNING updated_at`, + entry.FileID, ownerID, entry.Model, size, version, dc).Scan(&updatedAt) + if err != nil { // check if error is due to model enum invalid value if err.Error() == fmt.Sprintf("pq: invalid input value for enum model: \"%s\"", entry.Model) { @@ -45,7 +56,7 @@ func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry en // GetDiff returns the embeddings that have been updated since the given time func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Model, sinceTime int64, limit int16) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND updated_at > $3 ORDER BY updated_at ASC @@ -57,7 +68,7 @@ func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Mode } func (r *Repository) GetFilesEmbedding(ctx context.Context, ownerID int64, model ente.Model, fileIDs []int64) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND file_id = ANY($3)`, ownerID, model, pq.Array(fileIDs)) if err != nil { @@ -82,6 +93,89 @@ func (r *Repository) Delete(fileID int64) error { return nil } +// GetDatacenters returns unique list of datacenters where derived embeddings are stored +func (r *Repository) GetDatacenters(ctx context.Context, fileID int64) ([]string, error) { + rows, err := r.DB.QueryContext(ctx, `SELECT datacenters FROM embeddings WHERE file_id = $1`, fileID) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + uniqueDatacenters := make(map[string]struct{}) + for rows.Next() { + var datacenters []string + err = rows.Scan(pq.Array(&datacenters)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + for _, dc := range datacenters { + uniqueDatacenters[dc] = struct{}{} + } + } + datacenters := make([]string, 0, len(uniqueDatacenters)) + for dc := range uniqueDatacenters { + datacenters = append(datacenters, dc) + } + return datacenters, nil +} + +// GetOtherDCsForFileAndModel returns the list of datacenters where the embeddings are stored for a given file and model, excluding the ignoredDC +func (r *Repository) GetOtherDCsForFileAndModel(ctx context.Context, fileID int64, model string, ignoredDC string) ([]string, error) { + rows, err := r.DB.QueryContext(ctx, `SELECT datacenters FROM embeddings WHERE file_id = $1 AND model = $2`, fileID, model) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + uniqueDatacenters := make(map[string]bool) + for rows.Next() { + var datacenters []string + err = rows.Scan(pq.Array(&datacenters)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + for _, dc := range datacenters { + // add to uniqueDatacenters if it is not the ignoredDC + if dc != ignoredDC { + uniqueDatacenters[dc] = true + } + } + } + datacenters := make([]string, 0, len(uniqueDatacenters)) + for dc := range uniqueDatacenters { + datacenters = append(datacenters, dc) + } + return datacenters, nil +} + +// RemoveDatacenter removes the given datacenter from the list of datacenters +func (r *Repository) RemoveDatacenter(ctx context.Context, fileID int64, dc string) error { + _, err := r.DB.ExecContext(ctx, `UPDATE embeddings SET datacenters = array_remove(datacenters, $1) WHERE file_id = $2`, dc, fileID) + if err != nil { + return stacktrace.Propagate(err, "") + } + return nil +} + +// AddNewDC adds the dc name to the list of datacenters, if it doesn't exist already, for a given file, model and user. It also updates the size of the embedding +func (r *Repository) AddNewDC(ctx context.Context, fileID int64, model ente.Model, userID int64, size int, dc string) error { + res, err := r.DB.ExecContext(ctx, ` + UPDATE embeddings + SET size = $1, + datacenters = CASE + WHEN $2::s3region = ANY(datacenters) THEN datacenters + ELSE array_append(datacenters, $2::s3region) + END + WHERE file_id = $3 AND model = $4 AND owner_id = $5`, size, dc, fileID, model, userID) + if err != nil { + return stacktrace.Propagate(err, "") + } + rowsAffected, err := res.RowsAffected() + if err != nil { + return stacktrace.Propagate(err, "") + } + if rowsAffected == 0 { + return stacktrace.Propagate(errors.New("no row got updated"), "") + } + return nil +} + func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) { defer func() { if err := rows.Close(); err != nil { @@ -94,7 +188,7 @@ func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) { embedding := ente.Embedding{} var encryptedEmbedding, decryptionHeader sql.NullString var version sql.NullInt32 - err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version) + err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version, &embedding.Size) if encryptedEmbedding.Valid && len(encryptedEmbedding.String) > 0 { embedding.EncryptedEmbedding = encryptedEmbedding.String } diff --git a/server/pkg/repo/file.go b/server/pkg/repo/file.go index eafc7b570c..2ae4eafdca 100644 --- a/server/pkg/repo/file.go +++ b/server/pkg/repo/file.go @@ -311,7 +311,12 @@ func (repo *FileRepository) Update(file ente.File, fileSize int64, thumbnailSize // UpdateMagicAttributes updates the magic attributes for the list of files and update collection_files & collection // which have this file. -func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdates []ente.UpdateMagicMetadata, isPublicMetadata bool) error { +func (repo *FileRepository) UpdateMagicAttributes( + ctx context.Context, + fileUpdates []ente.UpdateMagicMetadata, + isPublicMetadata bool, + skipVersion *bool, +) error { updationTime := time.Microseconds() tx, err := repo.DB.BeginTx(ctx, nil) if err != nil { @@ -336,6 +341,9 @@ func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdat return stacktrace.Propagate(err, "") } } + if skipVersion != nil && *skipVersion { + return tx.Commit() + } // todo: full table scan, need to add index (for discussion: add user_id and idx {user_id, file_id}). updatedRows, err := tx.QueryContext(ctx, `UPDATE collection_files SET updation_time = $1 WHERE file_id = ANY($2) AND is_deleted= false RETURNING collection_id`, updationTime, diff --git a/server/pkg/repo/user.go b/server/pkg/repo/user.go index 596d24c64c..f35a47e1f9 100644 --- a/server/pkg/repo/user.go +++ b/server/pkg/repo/user.go @@ -194,8 +194,8 @@ func (repo *UserRepository) UpdateEmail(userID int64, encryptedEmail ente.Encryp // GetUserIDWithEmail returns the userID associated with a provided email func (repo *UserRepository) GetUserIDWithEmail(email string) (int64, error) { - trimmedEmail := strings.TrimSpace(email) - emailHash, err := crypto.GetHash(trimmedEmail, repo.HashingKey) + sanitizedEmail := strings.ToLower(strings.TrimSpace(email)) + emailHash, err := crypto.GetHash(sanitizedEmail, repo.HashingKey) if err != nil { return -1, stacktrace.Propagate(err, "") } diff --git a/server/pkg/utils/email/email.go b/server/pkg/utils/email/email.go index 46202313e7..a19987a1d8 100644 --- a/server/pkg/utils/email/email.go +++ b/server/pkg/utils/email/email.go @@ -38,6 +38,7 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s smtpPort := viper.GetString("smtp.port") smtpUsername := viper.GetString("smtp.username") smtpPassword := viper.GetString("smtp.password") + smtpEmail := viper.GetString("smtp.email") var emailMessage string @@ -50,6 +51,11 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s emailAddresses += email } + // If an sender email is provided use it instead of the fromEmail. + if smtpEmail != "" { + fromEmail = smtpEmail + } + header := "From: " + fromName + " <" + fromEmail + ">\n" + "To: " + emailAddresses + "\n" + "Subject: " + subject + "\n" + diff --git a/server/pkg/utils/s3config/s3config.go b/server/pkg/utils/s3config/s3config.go index 9b273bd612..a562e51815 100644 --- a/server/pkg/utils/s3config/s3config.go +++ b/server/pkg/utils/s3config/s3config.go @@ -28,6 +28,8 @@ type S3Config struct { hotDC string // Secondary (hot) data center secondaryHotDC string + //Derived data data center for derived files like ml embeddings & preview files + derivedStorageDC string // A map from data centers to S3 configurations s3Configs map[string]*aws.Config // A map from data centers to pre-created S3 clients @@ -71,6 +73,7 @@ var ( dcWasabiEuropeCentralDeprecated string = "wasabi-eu-central-2" dcWasabiEuropeCentral_v3 string = "wasabi-eu-central-2-v3" dcSCWEuropeFrance_v3 string = "scw-eu-fr-v3" + dcWasabiEuropeCentralDerived string = "wasabi-eu-central-2-derived" ) // Number of days that the wasabi bucket is configured to retain objects. @@ -86,9 +89,9 @@ func NewS3Config() *S3Config { } func (config *S3Config) initialize() { - dcs := [5]string{ + dcs := [6]string{ dcB2EuropeCentral, dcSCWEuropeFranceLockedDeprecated, dcWasabiEuropeCentralDeprecated, - dcWasabiEuropeCentral_v3, dcSCWEuropeFrance_v3} + dcWasabiEuropeCentral_v3, dcSCWEuropeFrance_v3, dcWasabiEuropeCentralDerived} config.hotDC = dcB2EuropeCentral config.secondaryHotDC = dcWasabiEuropeCentral_v3 @@ -99,6 +102,12 @@ func (config *S3Config) initialize() { config.secondaryHotDC = hs2 log.Infof("Hot storage: %s (secondary: %s)", hs1, hs2) } + config.derivedStorageDC = config.hotDC + embeddingsDC := viper.GetString("s3.derived-storage") + if embeddingsDC != "" && array.StringInList(embeddingsDC, dcs[:]) { + config.derivedStorageDC = embeddingsDC + log.Infof("Embeddings bucket: %s", embeddingsDC) + } config.buckets = make(map[string]string) config.s3Configs = make(map[string]*aws.Config) @@ -171,6 +180,18 @@ func (config *S3Config) GetHotS3Client() *s3.S3 { return &s3Client } +func (config *S3Config) GetDerivedStorageDataCenter() string { + return config.derivedStorageDC +} +func (config *S3Config) GetDerivedStorageBucket() *string { + return config.GetBucket(config.derivedStorageDC) +} + +func (config *S3Config) GetDerivedStorageS3Client() *s3.S3 { + s3Client := config.GetS3Client(config.derivedStorageDC) + return &s3Client +} + // Return the name of the hot Backblaze data center func (config *S3Config) GetHotBackblazeDC() string { return dcB2EuropeCentral @@ -181,6 +202,10 @@ func (config *S3Config) GetHotWasabiDC() string { return dcWasabiEuropeCentral_v3 } +func (config *S3Config) GetWasabiDerivedDC() string { + return dcWasabiEuropeCentralDerived +} + // Return the name of the cold Scaleway data center func (config *S3Config) GetColdScalewayDC() string { return dcSCWEuropeFrance_v3 diff --git a/web/README.md b/web/README.md index d33c039040..82ad85cad2 100644 --- a/web/README.md +++ b/web/README.md @@ -32,8 +32,11 @@ yarn dev That's it. The web app will automatically hot reload when you make changes. -If you're new to web development and unsure about how to get started, or are -facing some problems when running the above steps, see [docs/new](docs/new.md). +> [!TIP] +> +> If you're new to web development and unsure about how to get started, or are +> facing some problems when running the above steps, see +> [docs/new](docs/new.md). ## Other apps @@ -54,21 +57,22 @@ As a brief overview, this directory contains the following apps: your 2FA codes using this web app. For adding and editing your 2FA codes, please use the Ente Auth [mobile/desktop app](../auth/README.md) instead. -These two are the public facing apps. There are other part of the code which are +These are the public facing apps. There are other part of the code which are accessed as features within the main apps, but in terms of code are independently maintained and deployed: - `apps/accounts`: Passkey support (Coming soon) -- `apps/cast`: Chromecast support (Coming soon) +- `apps/cast`: Browser and Chromecast casting support. +- `apps/payments`: Handle subscription payments. > [!NOTE] > -> This folder is supposed to contain all our web related code. Most of it is -> already here, but some code which is being deployed from our other -> repositories like the family portal (https://github.com/ente-io/families) -> still needs to be brought here. Also, some of the Cloudflare workers we use -> for fixing headers etc too. Hang tight, we're on it, will bring in the -> remaining bits one by one. +> Some older code is being deployed from our other repositories like the family +> portal (https://github.com/ente-io/families) and still needs to be brought +> here. Likewise, some of the Cloudflare workers we use for fixing headers etc. +> We'll gradually bring all these into this monorepo one by one. + +The apps take use various `packages/` to share code amongst themselves. You might also find this [overview of dependencies](docs/dependencies.md) useful. diff --git a/web/apps/accounts/.env b/web/apps/accounts/.env new file mode 100644 index 0000000000..3f3b1cc9ae --- /dev/null +++ b/web/apps/accounts/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/accounts/src/pages/_app.tsx b/web/apps/accounts/src/pages/_app.tsx index 40a4a14588..a1927f52b2 100644 --- a/web/apps/accounts/src/pages/_app.tsx +++ b/web/apps/accounts/src/pages/_app.tsx @@ -1,6 +1,8 @@ import { CustomHead } from "@/next/components/Head"; import { setupI18n } from "@/next/i18n"; import { logUnhandledErrorsAndRejections } from "@/next/log-web"; +import { PAGES } from "@ente/accounts/constants/pages"; +import { accountLogout } from "@ente/accounts/services/logout"; import { APPS, APP_TITLES } from "@ente/shared/apps/constants"; import { Overlay } from "@ente/shared/components/Container"; import DialogBoxV2 from "@ente/shared/components/DialogBoxV2"; @@ -27,6 +29,7 @@ interface AppContextProps { isMobile: boolean; showNavBar: (show: boolean) => void; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; } export const AppContext = createContext({} as AppContextProps); @@ -78,6 +81,10 @@ export default function App({ Component, pageProps }: AppProps) { const theme = getTheme(themeColor, APPS.PHOTOS); + const logout = () => { + void accountLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.ACCOUNTS }) : APP_TITLES.get(APPS.ACCOUNTS); @@ -101,6 +108,7 @@ export default function App({ Component, pageProps }: AppProps) { showNavBar, setDialogBoxAttributesV2: setDialogBoxAttributesV2 as any, + logout, }} > {!isI18nReady && ( diff --git a/web/apps/auth/.env b/web/apps/auth/.env new file mode 100644 index 0000000000..3f3b1cc9ae --- /dev/null +++ b/web/apps/auth/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/auth/src/components/AuthFooter.tsx b/web/apps/auth/src/components/AuthFooter.tsx deleted file mode 100644 index 0291031254..0000000000 --- a/web/apps/auth/src/components/AuthFooter.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { Button } from "@mui/material"; -import { t } from "i18next"; - -export const AuthFooter = () => { - return ( -
-

{t("AUTH_DOWNLOAD_MOBILE_APP")}

- - - -
- ); -}; diff --git a/web/apps/auth/src/components/Navbar.tsx b/web/apps/auth/src/components/Navbar.tsx deleted file mode 100644 index 293d7fc16a..0000000000 --- a/web/apps/auth/src/components/Navbar.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import { logoutUser } from "@ente/accounts/services/user"; -import { HorizontalFlex } from "@ente/shared/components/Container"; -import { EnteLogo } from "@ente/shared/components/EnteLogo"; -import NavbarBase from "@ente/shared/components/Navbar/base"; -import OverflowMenu from "@ente/shared/components/OverflowMenu/menu"; -import { OverflowMenuOption } from "@ente/shared/components/OverflowMenu/option"; -import LogoutOutlined from "@mui/icons-material/LogoutOutlined"; -import MoreHoriz from "@mui/icons-material/MoreHoriz"; -import { t } from "i18next"; -import { AppContext } from "pages/_app"; -import React from "react"; - -export default function AuthNavbar() { - const { isMobile } = React.useContext(AppContext); - return ( - - - - - - } - > - } - onClick={logoutUser} - > - {t("LOGOUT")} - - - - - ); -} diff --git a/web/apps/auth/src/components/OTPDisplay.tsx b/web/apps/auth/src/components/OTPDisplay.tsx deleted file mode 100644 index 38de665aa6..0000000000 --- a/web/apps/auth/src/components/OTPDisplay.tsx +++ /dev/null @@ -1,237 +0,0 @@ -import { ButtonBase, Snackbar } from "@mui/material"; -import { t } from "i18next"; -import { HOTP, TOTP } from "otpauth"; -import { useEffect, useState } from "react"; -import { Code } from "types/code"; -import TimerProgress from "./TimerProgress"; - -const TOTPDisplay = ({ issuer, account, code, nextCode, period }) => { - return ( -
- -
-
-

- {issuer} -

-

- {account} -

-

- {code} -

-
-
-
-

- {t("AUTH_NEXT")} -

-

- {nextCode} -

-
-
-
- ); -}; - -function BadCodeInfo({ codeInfo, codeErr }) { - const [showRawData, setShowRawData] = useState(false); - - return ( -
-
{codeInfo.title}
-
{codeErr}
-
- {showRawData ? ( -
setShowRawData(false)}> - {codeInfo.rawData ?? "no raw data"} -
- ) : ( -
setShowRawData(true)}>Show rawData
- )} -
-
- ); -} - -interface OTPDisplayProps { - codeInfo: Code; -} - -const OTPDisplay = (props: OTPDisplayProps) => { - const { codeInfo } = props; - const [code, setCode] = useState(""); - const [nextCode, setNextCode] = useState(""); - const [codeErr, setCodeErr] = useState(""); - const [hasCopied, setHasCopied] = useState(false); - - const generateCodes = () => { - try { - const currentTime = new Date().getTime(); - if (codeInfo.type.toLowerCase() === "totp") { - const totp = new TOTP({ - secret: codeInfo.secret, - algorithm: codeInfo.algorithm ?? Code.defaultAlgo, - period: codeInfo.period ?? Code.defaultPeriod, - digits: codeInfo.digits ?? Code.defaultDigits, - }); - setCode(totp.generate()); - setNextCode( - totp.generate({ - timestamp: currentTime + codeInfo.period * 1000, - }), - ); - } else if (codeInfo.type.toLowerCase() === "hotp") { - const hotp = new HOTP({ - secret: codeInfo.secret, - counter: 0, - algorithm: codeInfo.algorithm, - }); - setCode(hotp.generate()); - setNextCode(hotp.generate({ counter: 1 })); - } - } catch (err) { - setCodeErr(err.message); - } - }; - - const copyCode = () => { - navigator.clipboard.writeText(code); - setHasCopied(true); - setTimeout(() => { - setHasCopied(false); - }, 2000); - }; - - useEffect(() => { - // this is to set the initial code and nextCode on component mount - generateCodes(); - const codeType = codeInfo.type; - const codePeriodInMs = codeInfo.period * 1000; - const timeToNextCode = - codePeriodInMs - (new Date().getTime() % codePeriodInMs); - const intervalId = null; - // wait until we are at the start of the next code period, - // and then start the interval loop - setTimeout(() => { - // we need to call generateCodes() once before the interval loop - // to set the initial code and nextCode - generateCodes(); - codeType.toLowerCase() === "totp" || - codeType.toLowerCase() === "hotp" - ? setInterval(() => { - generateCodes(); - }, codePeriodInMs) - : null; - }, timeToNextCode); - - return () => { - if (intervalId) clearInterval(intervalId); - }; - }, [codeInfo]); - - return ( -
- {codeErr === "" ? ( - { - copyCode(); - }} - > - - - - ) : ( - - )} -
- ); -}; - -export default OTPDisplay; diff --git a/web/apps/auth/src/components/TimerProgress.tsx b/web/apps/auth/src/components/TimerProgress.tsx deleted file mode 100644 index d1f3726f66..0000000000 --- a/web/apps/auth/src/components/TimerProgress.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import { useEffect, useState } from "react"; - -const TimerProgress = ({ period }) => { - const [progress, setProgress] = useState(0); - const [ticker, setTicker] = useState(null); - const microSecondsInPeriod = period * 1000000; - - const startTicker = () => { - const ticker = setInterval(() => { - updateTimeRemaining(); - }, 10); - setTicker(ticker); - }; - - const updateTimeRemaining = () => { - const timeRemaining = - microSecondsInPeriod - - ((new Date().getTime() * 1000) % microSecondsInPeriod); - setProgress(timeRemaining / microSecondsInPeriod); - }; - - useEffect(() => { - startTicker(); - return () => clearInterval(ticker); - }, []); - - const color = progress > 0.4 ? "green" : "orange"; - - return ( -
- ); -}; - -export default TimerProgress; diff --git a/web/apps/auth/src/pages/404.tsx b/web/apps/auth/src/pages/404.tsx index 6cca72b77e..dcd621c703 100644 --- a/web/apps/auth/src/pages/404.tsx +++ b/web/apps/auth/src/pages/404.tsx @@ -1,9 +1,3 @@ -import { APPS } from "@ente/shared/apps/constants"; -import NotFoundPage from "@ente/shared/next/pages/404"; -import { AppContext } from "pages/_app"; -import { useContext } from "react"; +import Page from "@ente/shared/next/pages/404"; -export default function NotFound() { - const appContext = useContext(AppContext); - return ; -} +export default Page; diff --git a/web/apps/auth/src/pages/_app.tsx b/web/apps/auth/src/pages/_app.tsx index a5aa55f98d..a0a579a80a 100644 --- a/web/apps/auth/src/pages/_app.tsx +++ b/web/apps/auth/src/pages/_app.tsx @@ -4,6 +4,7 @@ import { logStartupBanner, logUnhandledErrorsAndRejections, } from "@/next/log-web"; +import { accountLogout } from "@ente/accounts/services/logout"; import { APPS, APP_TITLES, @@ -44,6 +45,7 @@ type AppContextType = { setThemeColor: SetTheme; somethingWentWrong: () => void; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; }; export const AppContext = createContext(null); @@ -128,6 +130,10 @@ export default function App({ Component, pageProps }: AppProps) { content: t("UNKNOWN_ERROR"), }); + const logout = () => { + void accountLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.AUTH }) : APP_TITLES.get(APPS.AUTH); @@ -162,6 +168,7 @@ export default function App({ Component, pageProps }: AppProps) { setThemeColor, somethingWentWrong, setDialogBoxAttributesV2, + logout, }} > {(loading || !isI18nReady) && ( diff --git a/web/apps/auth/src/pages/auth.tsx b/web/apps/auth/src/pages/auth.tsx new file mode 100644 index 0000000000..e628050ea2 --- /dev/null +++ b/web/apps/auth/src/pages/auth.tsx @@ -0,0 +1,449 @@ +import { + HorizontalFlex, + VerticallyCentered, +} from "@ente/shared/components/Container"; +import { EnteLogo } from "@ente/shared/components/EnteLogo"; +import EnteSpinner from "@ente/shared/components/EnteSpinner"; +import NavbarBase from "@ente/shared/components/Navbar/base"; +import OverflowMenu from "@ente/shared/components/OverflowMenu/menu"; +import { OverflowMenuOption } from "@ente/shared/components/OverflowMenu/option"; +import { AUTH_PAGES as PAGES } from "@ente/shared/constants/pages"; +import { CustomError } from "@ente/shared/error"; +import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; +import LogoutOutlined from "@mui/icons-material/LogoutOutlined"; +import MoreHoriz from "@mui/icons-material/MoreHoriz"; +import { Button, ButtonBase, Snackbar, TextField } from "@mui/material"; +import { t } from "i18next"; +import { useRouter } from "next/router"; +import { HOTP, TOTP } from "otpauth"; +import { AppContext } from "pages/_app"; +import React, { useContext, useEffect, useState } from "react"; +import { Code } from "services/code"; +import { getAuthCodes } from "services/remote"; + +const AuthenticatorCodesPage = () => { + const appContext = useContext(AppContext); + const router = useRouter(); + const [codes, setCodes] = useState([]); + const [hasFetched, setHasFetched] = useState(false); + const [searchTerm, setSearchTerm] = useState(""); + + useEffect(() => { + const fetchCodes = async () => { + try { + const res = await getAuthCodes(); + setCodes(res); + } catch (err) { + if (err.message === CustomError.KEY_MISSING) { + InMemoryStore.set(MS_KEYS.REDIRECT_URL, PAGES.AUTH); + router.push(PAGES.ROOT); + } else { + // do not log errors + } + } + setHasFetched(true); + }; + fetchCodes(); + appContext.showNavBar(false); + }, []); + + const filteredCodes = codes.filter( + (secret) => + (secret.issuer ?? "") + .toLowerCase() + .includes(searchTerm.toLowerCase()) || + (secret.account ?? "") + .toLowerCase() + .includes(searchTerm.toLowerCase()), + ); + + if (!hasFetched) { + return ( + <> + + + + + ); + } + + return ( + <> + +
+
+ {filteredCodes.length === 0 && searchTerm.length === 0 ? ( + <> + ) : ( + setSearchTerm(e.target.value)} + variant="filled" + style={{ width: "350px" }} + value={searchTerm} + autoFocus + /> + )} + +
+
+ {filteredCodes.length === 0 ? ( +
+ {searchTerm.length !== 0 ? ( +

{t("NO_RESULTS")}

+ ) : ( +
+ )} +
+ ) : ( + filteredCodes.map((code) => ( + + )) + )} +
+
+ +
+
+ + ); +}; + +export default AuthenticatorCodesPage; + +const AuthNavbar: React.FC = () => { + const { isMobile, logout } = useContext(AppContext); + + return ( + + + + + + } + > + } + onClick={logout} + > + {t("LOGOUT")} + + + + + ); +}; + +interface CodeDisplay { + codeInfo: Code; +} + +const CodeDisplay: React.FC = ({ codeInfo }) => { + const [otp, setOTP] = useState(""); + const [nextOTP, setNextOTP] = useState(""); + const [codeErr, setCodeErr] = useState(""); + const [hasCopied, setHasCopied] = useState(false); + + const generateCodes = () => { + try { + const currentTime = new Date().getTime(); + if (codeInfo.type === "totp") { + const totp = new TOTP({ + secret: codeInfo.secret, + algorithm: codeInfo.algorithm, + period: codeInfo.period, + digits: codeInfo.digits, + }); + setOTP(totp.generate()); + setNextOTP( + totp.generate({ + timestamp: currentTime + codeInfo.period * 1000, + }), + ); + } else if (codeInfo.type === "hotp") { + const hotp = new HOTP({ + secret: codeInfo.secret, + counter: 0, + algorithm: codeInfo.algorithm, + }); + setOTP(hotp.generate()); + setNextOTP(hotp.generate({ counter: 1 })); + } + } catch (err) { + setCodeErr(err.message); + } + }; + + const copyCode = () => { + navigator.clipboard.writeText(otp); + setHasCopied(true); + setTimeout(() => { + setHasCopied(false); + }, 2000); + }; + + useEffect(() => { + // this is to set the initial code and nextCode on component mount + generateCodes(); + const codeType = codeInfo.type; + const codePeriodInMs = codeInfo.period * 1000; + const timeToNextCode = + codePeriodInMs - (new Date().getTime() % codePeriodInMs); + const intervalId = null; + // wait until we are at the start of the next code period, + // and then start the interval loop + setTimeout(() => { + // we need to call generateCodes() once before the interval loop + // to set the initial code and nextCode + generateCodes(); + codeType.toLowerCase() === "totp" || + codeType.toLowerCase() === "hotp" + ? setInterval(() => { + generateCodes(); + }, codePeriodInMs) + : null; + }, timeToNextCode); + + return () => { + if (intervalId) clearInterval(intervalId); + }; + }, [codeInfo]); + + return ( +
+ {codeErr === "" ? ( + { + copyCode(); + }} + > + + + + ) : ( + + )} +
+ ); +}; + +interface OTPDisplayProps { + code: Code; + otp: string; + nextOTP: string; +} + +const OTPDisplay: React.FC = ({ code, otp, nextOTP }) => { + return ( +
+ +
+
+

+ {code.issuer} +

+

+ {code.account} +

+

+ {otp} +

+
+
+
+

+ {t("AUTH_NEXT")} +

+

+ {nextOTP} +

+
+
+
+ ); +}; + +interface TimerProgressProps { + period: number; +} + +const TimerProgress: React.FC = ({ period }) => { + const [progress, setProgress] = useState(0); + const microSecondsInPeriod = period * 1000000; + + useEffect(() => { + const updateTimeRemaining = () => { + const timeRemaining = + microSecondsInPeriod - + ((new Date().getTime() * 1000) % microSecondsInPeriod); + setProgress(timeRemaining / microSecondsInPeriod); + }; + + const ticker = setInterval(() => { + updateTimeRemaining(); + }, 10); + + return () => clearInterval(ticker); + }, []); + + const color = progress > 0.4 ? "green" : "orange"; + + return ( +
+ ); +}; + +function BadCodeInfo({ codeInfo, codeErr }) { + const [showRawData, setShowRawData] = useState(false); + + return ( +
+
{codeInfo.title}
+
{codeErr}
+
+ {showRawData ? ( +
setShowRawData(false)}> + {codeInfo.uriString ?? "(no raw data)"} +
+ ) : ( +
setShowRawData(true)}>Show rawData
+ )} +
+
+ ); +} + +const AuthFooter: React.FC = () => { + return ( +
+

{t("AUTH_DOWNLOAD_MOBILE_APP")}

+ + + +
+ ); +}; diff --git a/web/apps/auth/src/pages/auth/index.tsx b/web/apps/auth/src/pages/auth/index.tsx deleted file mode 100644 index 55dc33ce68..0000000000 --- a/web/apps/auth/src/pages/auth/index.tsx +++ /dev/null @@ -1,129 +0,0 @@ -import { VerticallyCentered } from "@ente/shared/components/Container"; -import EnteSpinner from "@ente/shared/components/EnteSpinner"; -import { AUTH_PAGES as PAGES } from "@ente/shared/constants/pages"; -import { CustomError } from "@ente/shared/error"; -import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; -import { TextField } from "@mui/material"; -import { AuthFooter } from "components/AuthFooter"; -import AuthNavbar from "components/Navbar"; -import OTPDisplay from "components/OTPDisplay"; -import { t } from "i18next"; -import { useRouter } from "next/router"; -import { AppContext } from "pages/_app"; -import { useContext, useEffect, useState } from "react"; -import { getAuthCodes } from "services"; - -const AuthenticatorCodesPage = () => { - const appContext = useContext(AppContext); - const router = useRouter(); - const [codes, setCodes] = useState([]); - const [hasFetched, setHasFetched] = useState(false); - const [searchTerm, setSearchTerm] = useState(""); - - useEffect(() => { - const fetchCodes = async () => { - try { - const res = await getAuthCodes(); - setCodes(res); - } catch (err) { - if (err.message === CustomError.KEY_MISSING) { - InMemoryStore.set(MS_KEYS.REDIRECT_URL, PAGES.AUTH); - router.push(PAGES.ROOT); - } else { - // do not log errors - } - } - setHasFetched(true); - }; - fetchCodes(); - appContext.showNavBar(false); - }, []); - - const filteredCodes = codes.filter( - (secret) => - (secret.issuer ?? "") - .toLowerCase() - .includes(searchTerm.toLowerCase()) || - (secret.account ?? "") - .toLowerCase() - .includes(searchTerm.toLowerCase()), - ); - - if (!hasFetched) { - return ( - <> - - - - - ); - } - - return ( - <> - -
-
- {filteredCodes.length === 0 && searchTerm.length === 0 ? ( - <> - ) : ( - setSearchTerm(e.target.value)} - variant="filled" - style={{ width: "350px" }} - value={searchTerm} - autoFocus - /> - )} - -
-
- {filteredCodes.length === 0 ? ( -
- {searchTerm.length !== 0 ? ( -

{t("NO_RESULTS")}

- ) : ( -
- )} -
- ) : ( - filteredCodes.map((code) => ( - - )) - )} -
-
- -
-
- - ); -}; - -export default AuthenticatorCodesPage; diff --git a/web/apps/auth/src/pages/change-email/index.tsx b/web/apps/auth/src/pages/change-email.tsx similarity index 100% rename from web/apps/auth/src/pages/change-email/index.tsx rename to web/apps/auth/src/pages/change-email.tsx diff --git a/web/apps/auth/src/pages/change-password/index.tsx b/web/apps/auth/src/pages/change-password.tsx similarity index 100% rename from web/apps/auth/src/pages/change-password/index.tsx rename to web/apps/auth/src/pages/change-password.tsx diff --git a/web/apps/auth/src/pages/credentials/index.tsx b/web/apps/auth/src/pages/credentials.tsx similarity index 100% rename from web/apps/auth/src/pages/credentials/index.tsx rename to web/apps/auth/src/pages/credentials.tsx diff --git a/web/apps/auth/src/pages/generate/index.tsx b/web/apps/auth/src/pages/generate.tsx similarity index 100% rename from web/apps/auth/src/pages/generate/index.tsx rename to web/apps/auth/src/pages/generate.tsx diff --git a/web/apps/auth/src/pages/login/index.tsx b/web/apps/auth/src/pages/login.tsx similarity index 100% rename from web/apps/auth/src/pages/login/index.tsx rename to web/apps/auth/src/pages/login.tsx diff --git a/web/apps/auth/src/pages/passkeys/finish/index.tsx b/web/apps/auth/src/pages/passkeys/finish.tsx similarity index 100% rename from web/apps/auth/src/pages/passkeys/finish/index.tsx rename to web/apps/auth/src/pages/passkeys/finish.tsx diff --git a/web/apps/auth/src/pages/recover/index.tsx b/web/apps/auth/src/pages/recover.tsx similarity index 100% rename from web/apps/auth/src/pages/recover/index.tsx rename to web/apps/auth/src/pages/recover.tsx diff --git a/web/apps/auth/src/pages/signup/index.tsx b/web/apps/auth/src/pages/signup.tsx similarity index 100% rename from web/apps/auth/src/pages/signup/index.tsx rename to web/apps/auth/src/pages/signup.tsx diff --git a/web/apps/auth/src/pages/two-factor/recover/index.tsx b/web/apps/auth/src/pages/two-factor/recover.tsx similarity index 100% rename from web/apps/auth/src/pages/two-factor/recover/index.tsx rename to web/apps/auth/src/pages/two-factor/recover.tsx diff --git a/web/apps/auth/src/pages/two-factor/setup/index.tsx b/web/apps/auth/src/pages/two-factor/setup.tsx similarity index 100% rename from web/apps/auth/src/pages/two-factor/setup/index.tsx rename to web/apps/auth/src/pages/two-factor/setup.tsx diff --git a/web/apps/auth/src/pages/two-factor/verify/index.tsx b/web/apps/auth/src/pages/two-factor/verify.tsx similarity index 89% rename from web/apps/auth/src/pages/two-factor/verify/index.tsx rename to web/apps/auth/src/pages/two-factor/verify.tsx index 2243a43549..85eb7ff1b1 100644 --- a/web/apps/auth/src/pages/two-factor/verify/index.tsx +++ b/web/apps/auth/src/pages/two-factor/verify.tsx @@ -1,7 +1,7 @@ import TwoFactorVerifyPage from "@ente/accounts/pages/two-factor/verify"; import { APPS } from "@ente/shared/apps/constants"; import { useContext } from "react"; -import { AppContext } from "../../_app"; +import { AppContext } from "../_app"; export default function TwoFactorVerify() { const appContext = useContext(AppContext); diff --git a/web/apps/auth/src/pages/verify/index.tsx b/web/apps/auth/src/pages/verify.tsx similarity index 100% rename from web/apps/auth/src/pages/verify/index.tsx rename to web/apps/auth/src/pages/verify.tsx diff --git a/web/apps/auth/src/services/code.ts b/web/apps/auth/src/services/code.ts new file mode 100644 index 0000000000..ca9ba16427 --- /dev/null +++ b/web/apps/auth/src/services/code.ts @@ -0,0 +1,154 @@ +import { URI } from "vscode-uri"; + +/** + * A parsed representation of an xOTP code URI. + * + * This is all the data we need to drive a OTP generator. + */ +export interface Code { + /** The uniquue id for the corresponding auth entity. */ + id?: String; + /** The type of the code. */ + type: "totp" | "hotp"; + /** The user's account or email for which this code is used. */ + account: string; + /** The name of the entity that issued this code. */ + issuer: string; + /** Number of digits in the code. */ + digits: number; + /** + * The time period (in seconds) for which a single OTP generated from this + * code remains valid. + */ + period: number; + /** The secret that is used to drive the OTP generator. */ + secret: string; + /** The (hashing) algorithim used by the OTP generator. */ + algorithm: "sha1" | "sha256" | "sha512"; + /** The original string from which this code was generated. */ + uriString?: string; +} + +/** + * Convert a OTP code URI into its parse representation, a {@link Code}. + * + * @param id A unique ID of this code within the auth app. + * + * @param uriString A string specifying how to generate a TOTP/HOTP/Steam OTP + * code. These strings are of the form: + * + * - (TOTP) + * otpauth://totp/account:user@example.org?algorithm=SHA1&digits=6&issuer=issuer&period=30&secret=ALPHANUM + */ +export const codeFromURIString = (id: string, uriString: string): Code => { + let santizedRawData = uriString + .replace(/\+/g, "%2B") + .replace(/:/g, "%3A") + .replaceAll("\r", ""); + if (santizedRawData.startsWith('"')) { + santizedRawData = santizedRawData.substring(1); + } + if (santizedRawData.endsWith('"')) { + santizedRawData = santizedRawData.substring( + 0, + santizedRawData.length - 1, + ); + } + + const uriParams = {}; + const searchParamsString = + decodeURIComponent(santizedRawData).split("?")[1]; + searchParamsString.split("&").forEach((pair) => { + const [key, value] = pair.split("="); + uriParams[key] = value; + }); + + const uri = URI.parse(santizedRawData); + let uriPath = decodeURIComponent(uri.path); + if (uriPath.startsWith("/otpauth://") || uriPath.startsWith("otpauth://")) { + uriPath = uriPath.split("otpauth://")[1]; + } else if (uriPath.startsWith("otpauth%3A//")) { + uriPath = uriPath.split("otpauth%3A//")[1]; + } + + return { + id, + type: _getType(uriPath), + account: _getAccount(uriPath), + issuer: _getIssuer(uriPath, uriParams), + digits: parseDigits(uriParams), + period: parsePeriod(uriParams), + secret: getSanitizedSecret(uriParams), + algorithm: parseAlgorithm(uriParams), + uriString, + }; +}; + +const _getAccount = (uriPath: string): string => { + try { + const path = decodeURIComponent(uriPath); + if (path.includes(":")) { + return path.split(":")[1]; + } else if (path.includes("/")) { + return path.split("/")[1]; + } + } catch (e) { + return ""; + } +}; + +const _getIssuer = (uriPath: string, uriParams: { get?: any }): string => { + try { + if (uriParams["issuer"] !== undefined) { + let issuer = uriParams["issuer"]; + // This is to handle bug in the ente auth app + if (issuer.endsWith("period")) { + issuer = issuer.substring(0, issuer.length - 6); + } + return issuer; + } + let path = decodeURIComponent(uriPath); + if (path.startsWith("totp/") || path.startsWith("hotp/")) { + path = path.substring(5); + } + if (path.includes(":")) { + return path.split(":")[0]; + } else if (path.includes("-")) { + return path.split("-")[0]; + } + return path; + } catch (e) { + return ""; + } +}; + +const parseDigits = (uriParams): number => + parseInt(uriParams["digits"] ?? "", 10) || 6; + +const parsePeriod = (uriParams): number => + parseInt(uriParams["period"] ?? "", 10) || 30; + +const parseAlgorithm = (uriParams): Code["algorithm"] => { + switch (uriParams["algorithm"]?.toLowerCase()) { + case "sha256": + return "sha256"; + case "sha512": + return "sha512"; + default: + return "sha1"; + } +}; + +const _getType = (uriPath: string): Code["type"] => { + const oauthType = uriPath.split("/")[0].substring(0); + if (oauthType.toLowerCase() === "totp") { + return "totp"; + } else if (oauthType.toLowerCase() === "hotp") { + return "hotp"; + } + throw new Error(`Unsupported format with host ${oauthType}`); +}; + +const getSanitizedSecret = (uriParams): string => { + return uriParams["secret"].replace(/ /g, "").toUpperCase(); +}; diff --git a/web/apps/auth/src/services/index.ts b/web/apps/auth/src/services/remote.ts similarity index 90% rename from web/apps/auth/src/services/index.ts rename to web/apps/auth/src/services/remote.ts index 5fd032215f..07b15d7d71 100644 --- a/web/apps/auth/src/services/index.ts +++ b/web/apps/auth/src/services/remote.ts @@ -6,10 +6,10 @@ import { getEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; import { getActualKey } from "@ente/shared/user"; import { HttpStatusCode } from "axios"; -import { AuthEntity, AuthKey } from "types/api"; -import { Code } from "types/code"; +import { codeFromURIString, type Code } from "services/code"; const ENDPOINT = getEndpoint(); + export const getAuthCodes = async (): Promise => { const masterKey = await getActualKey(); try { @@ -33,7 +33,7 @@ export const getAuthCodes = async (): Promise => { entity.header, authenticatorKey, ); - return Code.fromRawData(entity.id, decryptedCode); + return codeFromURIString(entity.id, decryptedCode); } catch (e) { log.error(`failed to parse codeId = ${entity.id}`); return null; @@ -65,6 +65,20 @@ export const getAuthCodes = async (): Promise => { } }; +interface AuthEntity { + id: string; + encryptedData: string | null; + header: string | null; + isDeleted: boolean; + createdAt: number; + updatedAt: number; +} + +interface AuthKey { + encryptedKey: string; + header: string; +} + export const getAuthKey = async (): Promise => { try { const resp = await HTTPService.get( diff --git a/web/apps/auth/src/types/api.ts b/web/apps/auth/src/types/api.ts deleted file mode 100644 index 569df81850..0000000000 --- a/web/apps/auth/src/types/api.ts +++ /dev/null @@ -1,13 +0,0 @@ -export interface AuthEntity { - id: string; - encryptedData: string | null; - header: string | null; - isDeleted: boolean; - createdAt: number; - updatedAt: number; -} - -export interface AuthKey { - encryptedKey: string; - header: string; -} diff --git a/web/apps/auth/src/types/code.ts b/web/apps/auth/src/types/code.ts deleted file mode 100644 index d61a2dcd68..0000000000 --- a/web/apps/auth/src/types/code.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { URI } from "vscode-uri"; - -type Type = "totp" | "TOTP" | "hotp" | "HOTP"; - -type AlgorithmType = - | "sha1" - | "SHA1" - | "sha256" - | "SHA256" - | "sha512" - | "SHA512"; - -export class Code { - static readonly defaultDigits = 6; - static readonly defaultAlgo = "sha1"; - static readonly defaultPeriod = 30; - - // id for the corresponding auth entity - id?: String; - account: string; - issuer: string; - digits?: number; - period: number; - secret: string; - algorithm: AlgorithmType; - type: Type; - rawData?: string; - - constructor( - account: string, - issuer: string, - digits: number | undefined, - period: number, - secret: string, - algorithm: AlgorithmType, - type: Type, - rawData?: string, - id?: string, - ) { - this.account = account; - this.issuer = issuer; - this.digits = digits; - this.period = period; - this.secret = secret; - this.algorithm = algorithm; - this.type = type; - this.rawData = rawData; - this.id = id; - } - - static fromRawData(id: string, rawData: string): Code { - let santizedRawData = rawData - .replace(/\+/g, "%2B") - .replace(/:/g, "%3A") - .replaceAll("\r", ""); - if (santizedRawData.startsWith('"')) { - santizedRawData = santizedRawData.substring(1); - } - if (santizedRawData.endsWith('"')) { - santizedRawData = santizedRawData.substring( - 0, - santizedRawData.length - 1, - ); - } - - const uriParams = {}; - const searchParamsString = - decodeURIComponent(santizedRawData).split("?")[1]; - searchParamsString.split("&").forEach((pair) => { - const [key, value] = pair.split("="); - uriParams[key] = value; - }); - - const uri = URI.parse(santizedRawData); - let uriPath = decodeURIComponent(uri.path); - if ( - uriPath.startsWith("/otpauth://") || - uriPath.startsWith("otpauth://") - ) { - uriPath = uriPath.split("otpauth://")[1]; - } else if (uriPath.startsWith("otpauth%3A//")) { - uriPath = uriPath.split("otpauth%3A//")[1]; - } - - return new Code( - Code._getAccount(uriPath), - Code._getIssuer(uriPath, uriParams), - Code._getDigits(uriParams), - Code._getPeriod(uriParams), - Code.getSanitizedSecret(uriParams), - Code._getAlgorithm(uriParams), - Code._getType(uriPath), - rawData, - id, - ); - } - - private static _getAccount(uriPath: string): string { - try { - const path = decodeURIComponent(uriPath); - if (path.includes(":")) { - return path.split(":")[1]; - } else if (path.includes("/")) { - return path.split("/")[1]; - } - } catch (e) { - return ""; - } - } - - private static _getIssuer( - uriPath: string, - uriParams: { get?: any }, - ): string { - try { - if (uriParams["issuer"] !== undefined) { - let issuer = uriParams["issuer"]; - // This is to handle bug in the ente auth app - if (issuer.endsWith("period")) { - issuer = issuer.substring(0, issuer.length - 6); - } - return issuer; - } - let path = decodeURIComponent(uriPath); - if (path.startsWith("totp/") || path.startsWith("hotp/")) { - path = path.substring(5); - } - if (path.includes(":")) { - return path.split(":")[0]; - } else if (path.includes("-")) { - return path.split("-")[0]; - } - return path; - } catch (e) { - return ""; - } - } - - private static _getDigits(uriParams): number { - try { - return parseInt(uriParams["digits"], 10) || Code.defaultDigits; - } catch (e) { - return Code.defaultDigits; - } - } - - private static _getPeriod(uriParams): number { - try { - return parseInt(uriParams["period"], 10) || Code.defaultPeriod; - } catch (e) { - return Code.defaultPeriod; - } - } - - private static _getAlgorithm(uriParams): AlgorithmType { - try { - const algorithm = uriParams["algorithm"].toLowerCase(); - if (algorithm === "sha256") { - return algorithm; - } else if (algorithm === "sha512") { - return algorithm; - } - } catch (e) { - // nothing - } - return "sha1"; - } - - private static _getType(uriPath: string): Type { - const oauthType = uriPath.split("/")[0].substring(0); - if (oauthType.toLowerCase() === "totp") { - return "totp"; - } else if (oauthType.toLowerCase() === "hotp") { - return "hotp"; - } - throw new Error(`Unsupported format with host ${oauthType}`); - } - - static getSanitizedSecret(uriParams): string { - return uriParams["secret"].replace(/ /g, "").toUpperCase(); - } -} diff --git a/web/apps/cast/.env b/web/apps/cast/.env new file mode 100644 index 0000000000..3f3b1cc9ae --- /dev/null +++ b/web/apps/cast/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx b/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx deleted file mode 100644 index 88f4d7c1fc..0000000000 --- a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { FilledCircleCheck } from "./FilledCircleCheck"; - -export const PairedSuccessfullyOverlay: React.FC = () => { - return ( -
-
- -

- Pairing Complete -

-

- We're preparing your album. -
This should only take a few seconds. -

-
-
- ); -}; diff --git a/web/apps/cast/src/components/LargeType.tsx b/web/apps/cast/src/components/PairingCode.tsx similarity index 75% rename from web/apps/cast/src/components/LargeType.tsx rename to web/apps/cast/src/components/PairingCode.tsx index 42ccb65e9f..fa1474bafc 100644 --- a/web/apps/cast/src/components/LargeType.tsx +++ b/web/apps/cast/src/components/PairingCode.tsx @@ -1,6 +1,6 @@ import { styled } from "@mui/material"; -const colourPool = [ +const colors = [ "#87CEFA", // Light Blue "#90EE90", // Light Green "#F08080", // Light Coral @@ -23,27 +23,34 @@ const colourPool = [ "#808000", // Light Olive ]; -export const LargeType = ({ chars }: { chars: string[] }) => { +interface PairingCodeProps { + code: string; +} + +export const PairingCode: React.FC = ({ code }) => { return ( - - {chars.map((char, i) => ( + + {code.split("").map((char, i) => ( {char} ))} - + ); }; -const Container = styled("div")` +const PairingCode_ = styled("div")` + border-radius: 10px; + overflow: hidden; + font-size: 4rem; font-weight: bold; font-family: monospace; diff --git a/web/apps/cast/src/components/Slide.tsx b/web/apps/cast/src/components/Slide.tsx deleted file mode 100644 index 8309f8bc2c..0000000000 --- a/web/apps/cast/src/components/Slide.tsx +++ /dev/null @@ -1,56 +0,0 @@ -interface SlideViewProps { - /** The URL of the image to show. */ - url: string; - /** The URL of the next image that we will transition to. */ - nextURL: string; -} - -/** - * Show the image at {@link url} in a full screen view. - * - * Also show {@link nextURL} in an hidden image view to prepare the browser for - * an imminent transition to it. - */ -export const SlideView: React.FC = ({ url, nextURL }) => { - return ( -
-
- - -
-
- ); -}; diff --git a/web/apps/cast/src/pages/_app.tsx b/web/apps/cast/src/pages/_app.tsx index 99b047d416..d85ac05422 100644 --- a/web/apps/cast/src/pages/_app.tsx +++ b/web/apps/cast/src/pages/_app.tsx @@ -1,4 +1,5 @@ import { CustomHead } from "@/next/components/Head"; +import { disableDiskLogs } from "@/next/log"; import { logUnhandledErrorsAndRejections } from "@/next/log-web"; import { APPS, APP_TITLES } from "@ente/shared/apps/constants"; import { getTheme } from "@ente/shared/themes"; @@ -11,6 +12,7 @@ import "styles/global.css"; export default function App({ Component, pageProps }: AppProps) { useEffect(() => { + disableDiskLogs(); logUnhandledErrorsAndRejections(true); return () => logUnhandledErrorsAndRejections(false); }, []); diff --git a/web/apps/cast/src/pages/index.tsx b/web/apps/cast/src/pages/index.tsx index bc0f6253db..37fcf3d4be 100644 --- a/web/apps/cast/src/pages/index.tsx +++ b/web/apps/cast/src/pages/index.tsx @@ -1,11 +1,12 @@ import log from "@/next/log"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; -import { LargeType } from "components/LargeType"; +import { styled } from "@mui/material"; +import { PairingCode } from "components/PairingCode"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { storeCastData } from "services/cast"; -import { advertiseCode, getCastData, register } from "services/pair"; -import { castReceiverLoadingIfNeeded } from "../utils/cast-receiver"; +import { readCastData, storeCastData } from "services/cast-data"; +import { getCastData, register } from "services/pair"; +import { advertiseOnChromecast } from "../services/chromecast"; export default function Index() { const [publicKeyB64, setPublicKeyB64] = useState(); @@ -15,22 +16,19 @@ export default function Index() { const router = useRouter(); useEffect(() => { - init(); - }, []); - - const init = () => { - register().then((r) => { - setPublicKeyB64(r.publicKeyB64); - setPrivateKeyB64(r.privateKeyB64); - setPairingCode(r.pairingCode); - }); - }; - - useEffect(() => { - castReceiverLoadingIfNeeded().then((cast) => - advertiseCode(cast, () => pairingCode), - ); - }, []); + if (!pairingCode) { + register().then((r) => { + setPublicKeyB64(r.publicKeyB64); + setPrivateKeyB64(r.privateKeyB64); + setPairingCode(r.pairingCode); + }); + } else { + advertiseOnChromecast( + () => pairingCode, + () => readCastData()?.collectionID, + ); + } + }, [pairingCode]); useEffect(() => { if (!publicKeyB64 || !privateKeyB64 || !pairingCode) return; @@ -48,77 +46,65 @@ export default function Index() { return; } - log.info("Pairing complete"); storeCastData(data); await router.push("/slideshow"); } catch (e) { - log.error("Failed to get cast data", e); - // Start again from the beginning. + // The pairing code becomes invalid after an hour, which will cause + // `getCastData` to fail. There might be other reasons this might + // fail too, but in all such cases, it is a reasonable idea to start + // again from the beginning. + log.warn("Failed to get cast data", e); setPairingCode(undefined); - init(); } }; return ( - <> -
-
- -

- Enter this code on Ente Photos to pair this - screen -

-
- {pairingCode ? ( - - ) : ( - - )} -
-

- Visit{" "} - - ente.io/cast - {" "} - for help -

-
-
- + + +

+ Enter this code on Ente Photos to pair this screen +

+ {pairingCode ? : } +

+ Visit{" "} + + ente.io/cast + {" "} + for help +

+
); } + +const Container = styled("div")` + height: 100%; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + text-align: center; + + h1 { + font-weight: normal; + } + + p { + font-size: 1.2rem; + } + a { + text-decoration: none; + color: #87cefa; + font-weight: bold; + } +`; + +const Spinner: React.FC = () => ( + + + +); + +const Spinner_ = styled("div")` + /* Roughly same height as the pairing code section to roduce layout shift */ + margin-block: 1.7rem; +`; diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx index bd3339b42b..326b183d4a 100644 --- a/web/apps/cast/src/pages/slideshow.tsx +++ b/web/apps/cast/src/pages/slideshow.tsx @@ -1,14 +1,17 @@ import log from "@/next/log"; -import { PairedSuccessfullyOverlay } from "components/PairedSuccessfullyOverlay"; -import { SlideView } from "components/Slide"; +import { ensure } from "@/utils/ensure"; +import { styled } from "@mui/material"; +import { FilledCircleCheck } from "components/FilledCircleCheck"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { readCastData, renderableImageURLs } from "services/cast"; +import { readCastData } from "services/cast-data"; +import { isChromecast } from "services/chromecast"; +import { imageURLGenerator } from "services/render"; export default function Slideshow() { const [loading, setLoading] = useState(true); const [imageURL, setImageURL] = useState(); - const [nextImageURL, setNextImageURL] = useState(); + const [isEmpty, setIsEmpty] = useState(false); const router = useRouter(); @@ -20,17 +23,18 @@ export default function Slideshow() { const loop = async () => { try { - const urlGenerator = renderableImageURLs(readCastData()); + const urlGenerator = imageURLGenerator(ensure(readCastData())); while (!stop) { - const { value: urls, done } = await urlGenerator.next(); - if (done) { - log.warn("Empty collection"); - pair(); + const { value: url, done } = await urlGenerator.next(); + if (done || !url) { + // No items in this callection can be shown. + setIsEmpty(true); + // Go back to pairing screen after 5 seconds. + setTimeout(pair, 5000); return; } - setImageURL(urls[0]); - setNextImageURL(urls[1]); + setImageURL(url); setLoading(false); } } catch (e) { @@ -46,7 +50,143 @@ export default function Slideshow() { }; }, []); - if (loading) return ; + if (loading) return ; + if (isEmpty) return ; - return ; + return isChromecast() ? ( + + ) : ( + + ); } + +const PairingComplete: React.FC = () => { + return ( + + +

Pairing Complete

+

+ We're preparing your album. +
This should only take a few seconds. +

+
+ ); +}; + +const Message = styled("div")` + display: flex; + flex-direction: column; + height: 100%; + justify-content: center; + align-items: center; + text-align: center; + + line-height: 1.5rem; + + h2 { + margin-block-end: 0; + } +`; + +const NoItems: React.FC = () => { + return ( + +

Try another album

+

+ This album has no photos that can be shown here +
Please try another album +

+
+ ); +}; + +interface SlideViewProps { + /** The URL of the image to show. */ + url: string; +} + +const SlideView: React.FC = ({ url }) => { + return ( + + + + ); +}; + +const SlideView_ = styled("div")` + width: 100%; + height: 100%; + + background-size: cover; + background-position: center; + background-repeat: no-repeat; + background-blend-mode: multiply; + background-color: rgba(0, 0, 0, 0.5); + + /* Smooth out the transition a bit. + * + * For the img itself, we set decoding="sync" to have it switch seamlessly. + * But there does not seem to be a way of setting decoding sync for the + * background image, and for large (multi-MB) images the background image + * switch is still visually non-atomic. + * + * As a workaround, add a long transition so that the background image + * transitions in a more "fade-to" manner. This effect might or might not be + * visually the best though. + * + * Does not work in Firefox, but that's fine, this is only a slight tweak, + * not a functional requirement. + */ + transition: all 2s; + + img { + width: 100%; + height: 100%; + backdrop-filter: blur(10px); + object-fit: contain; + } +`; + +/** + * Variant of {@link SlideView} for use when we're running on Chromecast. + * + * Chromecast devices have trouble with + * + * backdrop-filter: blur(10px); + * + * So emulate a cheaper approximation for use on Chromecast. + */ +const SlideViewChromecast: React.FC = ({ url }) => { + return ( + + + + + ); +}; + +const SlideViewChromecast_ = styled("div")` + width: 100%; + height: 100%; + + /* We can't set opacity of background-image, so use a wrapper */ + position: relative; + overflow: hidden; + + img.svc-bg { + position: absolute; + left: 0; + top: 0; + width: 100%; + height: 100%; + object-fit: cover; + opacity: 0.1; + } + + img.svc-content { + position: relative; + width: 100%; + height: 100%; + object-fit: contain; + } +`; diff --git a/web/apps/cast/src/services/cast-data.ts b/web/apps/cast/src/services/cast-data.ts new file mode 100644 index 0000000000..587d1db323 --- /dev/null +++ b/web/apps/cast/src/services/cast-data.ts @@ -0,0 +1,41 @@ +export interface CastData { + /** The ID of the callection we are casting. */ + collectionID: string; + /** A key to decrypt the collection we are casting. */ + collectionKey: string; + /** A credential to use for fetching media files for this cast session. */ + castToken: string; +} + +/** + * Save the data received after pairing with a sender into local storage. + * + * We will read in back when we start the slideshow. + */ +export const storeCastData = (payload: unknown) => { + if (!payload || typeof payload != "object") + throw new Error("Unexpected cast data"); + + // Iterate through all the keys of the payload object and save them to + // localStorage. We don't validate here, we'll validate when we read these + // values back in `readCastData`. + for (const key in payload) { + window.localStorage.setItem(key, payload[key]); + } +}; + +/** + * Read back the cast data we got after pairing. + * + * Sibling of {@link storeCastData}. It returns undefined if the expected data + * is not present in localStorage. + */ +export const readCastData = (): CastData | undefined => { + const collectionID = localStorage.getItem("collectionID"); + const collectionKey = localStorage.getItem("collectionKey"); + const castToken = localStorage.getItem("castToken"); + + return collectionID && collectionKey && castToken + ? { collectionID, collectionKey, castToken } + : undefined; +}; diff --git a/web/apps/cast/src/services/chromecast.ts b/web/apps/cast/src/services/chromecast.ts new file mode 100644 index 0000000000..e7539e8c51 --- /dev/null +++ b/web/apps/cast/src/services/chromecast.ts @@ -0,0 +1,227 @@ +/// + +import log from "@/next/log"; + +export type Cast = typeof cast; + +/** + * A holder for the "cast" global object exposed by the Chromecast SDK, + * alongwith auxiliary state we need around it. + */ +class CastReceiver { + /** + * A reference to the `cast` global object that the Chromecast Web Receiver + * SDK attaches to the window. + * + * https://developers.google.com/cast/docs/web_receiver/basic + */ + cast: Cast | undefined; + /** + * A promise that allows us to ensure multiple requests to load are funneled + * through the same reified load. + */ + loader: Promise | undefined; + /** + * True if we have already attached listeners (i.e. if we have "started" the + * Chromecast SDK). + * + * Note that "stopping" the Chromecast SDK causes the Chromecast device to + * reload our tab, so this is a one way flag. The stop is something that'll + * only get triggered when we're actually running on a Chromecast since it + * always happens in response to a message handler. + */ + haveStarted = false; + /** + * Cached result of the isChromecast test. + */ + isChromecast: boolean | undefined; + /** + * A callback to invoke to get the pairing code when we get a new incoming + * pairing request. + */ + pairingCode: (() => string | undefined) | undefined; + /** + * A callback to invoke to get the ID of the collection that is currently + * being shown (if any). + */ + collectionID: (() => string | undefined) | undefined; +} + +/** Singleton instance of {@link CastReceiver}. */ +const castReceiver = new CastReceiver(); + +/** + * Listen for incoming messages on the given {@link cast} receiver, replying to + * each of them with a pairing code obtained using the given {@link pairingCode} + * callback. Phase 2 of the pairing protocol. + * + * Calling this function multiple times is fine. The first time around, the + * Chromecast SDK will be loaded and will start listening. Subsequently, each + * time this is call, we'll update the callbacks, but otherwise just return + * immediately (letting the already attached listeners do their thing). + * + * @param pairingCode A callback to invoke to get the pairing code when we get a + * new incoming pairing request. + * + * @param collectionID A callback to invoke to get the ID of the collection that + * is currently being shown (if any). + * + * See: [Note: Pairing protocol]. + */ +export const advertiseOnChromecast = ( + pairingCode: () => string | undefined, + collectionID: () => string | undefined, +) => { + // Always update the callbacks. + castReceiver.pairingCode = pairingCode; + castReceiver.collectionID = collectionID; + + // No-op if we're already running. + if (castReceiver.haveStarted) return; + + void loadingChromecastSDKIfNeeded().then((cast) => advertiseCode(cast)); +}; + +/** + * Load the Chromecast Web Receiver SDK and return a reference to the `cast` + * global object that the SDK attaches to the window. + * + * Calling this function multiple times is fine, once the Chromecast SDK is + * loaded it'll thereafter return the reference to the same object always. + */ +const loadingChromecastSDKIfNeeded = async (): Promise => { + if (castReceiver.cast) return castReceiver.cast; + if (castReceiver.loader) return await castReceiver.loader; + + castReceiver.loader = new Promise((resolve) => { + const script = document.createElement("script"); + script.src = + "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; + script.addEventListener("load", () => { + castReceiver.cast = cast; + resolve(cast); + }); + document.body.appendChild(script); + }); + + return await castReceiver.loader; +}; + +const advertiseCode = (cast: Cast) => { + if (castReceiver.haveStarted) { + // Multiple attempts raced to completion, ignore all but the first. + return; + } + + castReceiver.haveStarted = true; + + // Prepare the Chromecast "context". + const context = cast.framework.CastReceiverContext.getInstance(); + const namespace = "urn:x-cast:pair-request"; + + const options = new cast.framework.CastReceiverOptions(); + // We don't use the media features of the Cast SDK. + options.skipPlayersLoad = true; + // Do not stop the casting if the receiver is unreachable. A user should be + // able to start a cast on their phone and then put it away, leaving the + // cast running on their big screen. + options.disableIdleTimeout = true; + + type ListenerProps = { + senderId: string; + data: unknown; + }; + + // Reply with the code that we have if anyone asks over Chromecast. + const incomingMessageListener = ({ senderId, data }: ListenerProps) => { + // The collection ID with is currently paired (if any). + const pairedCollectionID = castReceiver.collectionID?.(); + + // The collection ID in the request (if any). + const collectionID = + data && + typeof data == "object" && + typeof data["collectionID"] == "string" + ? data["collectionID"] + : undefined; + + // If the request does not have a collectionID (or if we're not showing + // anything currently), forego this check. + + if (collectionID && pairedCollectionID) { + // If we get another connection request for a _different_ collection + // ID, stop the app to allow the second device to reconnect using a + // freshly generated pairing code. + if (pairedCollectionID != collectionID) { + log.info(`request for a new collection ${collectionID}`); + context.stop(); + } else { + // Duplicate request for same collection that we're already + // showing. Ignore. + } + return; + } + + const code = castReceiver.pairingCode?.(); + if (!code) { + // No code, but if we're already showing a collection, then ignore. + if (pairedCollectionID) return; + + // Our caller waits until it has a pairing code before it calls + // `advertiseCode`, but there is still an edge case where we can + // find ourselves without a pairing code: + // + // 1. The current pairing code expires. We start the process to get + // a new one. + // + // 2. But before that happens, someone connects. + // + // The window where this can happen is short, so if we do find + // ourselves in this scenario, just shutdown. + log.error("got pairing request when refreshing pairing codes"); + context.stop(); + return; + } + + context.sendCustomMessage(namespace, senderId, { code }); + }; + + context.addCustomMessageListener( + namespace, + // We need to cast, the `senderId` is present in the message we get but + // not present in the TypeScript type. + incomingMessageListener as unknown as SystemEventHandler, + ); + + // Close the (chromecast) tab if the sender disconnects. + // + // Chromecast does a "shutdown" of our cast app when we call `context.stop`. + // This translates into it closing the tab where it is showing our app. + context.addEventListener( + cast.framework.system.EventType.SENDER_DISCONNECTED, + () => context.stop(), + ); + + // Start listening for Chromecast connections. + context.start(options); +}; + +/** + * Return true if we're running on a Chromecast device. + * + * This allows changing our app's behaviour when we're running on Chromecast. + * Such checks are needed because during our testing we found that in practice, + * some processing is too heavy for Chromecast hardware (we tested with a 2nd + * gen device, this might not be true for newer variants). + * + * This variable is lazily updated when we enter {@link renderableImageURLs}. It + * is kept at the top level to avoid passing it around. + */ +export const isChromecast = () => { + let isCast = castReceiver.isChromecast; + if (isCast === undefined) { + isCast = window.navigator.userAgent.includes("CrKey"); + castReceiver.isChromecast = isCast; + } + return isCast; +}; diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts index 187e19df84..c43529aaed 100644 --- a/web/apps/cast/src/services/detect-type.ts +++ b/web/apps/cast/src/services/detect-type.ts @@ -9,6 +9,9 @@ import FileType from "file-type"; * * It first peeks into the file's initial contents to detect the MIME type. If * that doesn't give any results, it tries to deduce it from the file's name. + * + * For the list of returned extensions, see (for our installed version): + * https://github.com/sindresorhus/file-type/blob/main/core.d.ts */ export const detectMediaMIMEType = async (file: File): Promise => { const chunkSizeForTypeDetection = 4100; diff --git a/web/apps/cast/src/services/pair.ts b/web/apps/cast/src/services/pair.ts index 66f9feddd1..36b54cf759 100644 --- a/web/apps/cast/src/services/pair.ts +++ b/web/apps/cast/src/services/pair.ts @@ -1,9 +1,8 @@ import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium"; import castGateway from "@ente/shared/network/cast"; -import { wait } from "@ente/shared/utils"; import _sodium from "libsodium-wrappers"; -import { type Cast } from "../utils/cast-receiver"; export interface Registration { /** A pairing code shown on the screen. A client can use this to connect. */ @@ -100,64 +99,6 @@ export const register = async (): Promise => { return { pairingCode, publicKeyB64, privateKeyB64 }; }; -/** - * Listen for incoming messages on the given {@link cast} receiver, replying to - * each of them with a pairing code obtained using the given {@link pairingCode} - * callback. Phase 2 of the pairing protocol. - * - * See: [Note: Pairing protocol]. - */ -export const advertiseCode = ( - cast: Cast, - pairingCode: () => string | undefined, -) => { - // Prepare the Chromecast "context". - const context = cast.framework.CastReceiverContext.getInstance(); - const namespace = "urn:x-cast:pair-request"; - - const options = new cast.framework.CastReceiverOptions(); - // Do not automatically close the connection when the sender disconnects. - options.maxInactivity = 3600; /* 1 hour */ - // TODO:Is this required? The docs say "(The default type of a message bus - // is JSON; if not provided here)." - options.customNamespaces = Object.assign({}); - options.customNamespaces[namespace] = - cast.framework.system.MessageType.JSON; - // TODO: This looks like the only one needed, but a comment with the reason - // might be good. - options.disableIdleTimeout = true; - - // Reply with the code that we have if anyone asks over Chromecast. - const incomingMessageListener = ({ senderId }: { senderId: string }) => { - const code = pairingCode(); - if (!code) { - log.warn( - "Ignoring incoming Chromecast message because we do not yet have a pairing code", - ); - return; - } - - context.sendCustomMessage(namespace, senderId, { code }); - }; - - context.addCustomMessageListener( - namespace, - // We need to cast, the `senderId` is present in the message we get but - // not present in the TypeScript type. - incomingMessageListener as unknown as SystemEventHandler, - ); - - // Shutdown ourselves if the sender disconnects. - // TODO(MR): I assume the page reloads on shutdown. Is that correct? - context.addEventListener( - cast.framework.system.EventType.SENDER_DISCONNECTED, - () => context.stop(), - ); - - // Start listening for Chromecast connections. - context.start(options); -}; - /** * Ask museum if anyone has sent a (encrypted) payload corresponding to the * given pairing code. If so, decrypt it using our private key and return the diff --git a/web/apps/cast/src/services/cast.ts b/web/apps/cast/src/services/render.ts similarity index 56% rename from web/apps/cast/src/services/cast.ts rename to web/apps/cast/src/services/render.ts index 8ead8962ab..79065c2afc 100644 --- a/web/apps/cast/src/services/cast.ts +++ b/web/apps/cast/src/services/render.ts @@ -1,14 +1,23 @@ import { FILE_TYPE } from "@/media/file-type"; -import { isNonWebImageFileExtension } from "@/media/formats"; +import { isHEICExtension, isNonWebImageFileExtension } from "@/media/formats"; import { decodeLivePhoto } from "@/media/live-photo"; +import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert"; +import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker"; import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; +import type { ComlinkWorker } from "@/next/worker/comlink-worker"; import { shuffled } from "@/utils/array"; -import { ensure, ensureString } from "@/utils/ensure"; +import { wait } from "@/utils/promise"; import ComlinkCryptoWorker from "@ente/shared/crypto"; +import { ApiError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; -import { getCastFileURL, getEndpoint } from "@ente/shared/network/api"; -import { wait } from "@ente/shared/utils"; +import { + getCastFileURL, + getCastThumbnailURL, + getEndpoint, +} from "@ente/shared/network/api"; +import type { AxiosResponse } from "axios"; +import type { CastData } from "services/cast-data"; import { detectMediaMIMEType } from "services/detect-type"; import { EncryptedEnteFile, @@ -16,53 +25,20 @@ import { FileMagicMetadata, FilePublicMagicMetadata, } from "types/file"; +import { isChromecast } from "./chromecast"; /** - * Save the data received after pairing with a sender into local storage. - * - * We will read in back when we start the slideshow. + * If we're using HEIC conversion, then this variable caches the comlink web + * worker we're using to perform the actual conversion. */ -export const storeCastData = (payload: unknown) => { - if (!payload || typeof payload != "object") - throw new Error("Unexpected cast data"); - - // Iterate through all the keys of the payload object and save them to - // localStorage. We don't validate here, we'll validate when we read these - // values back in `readCastData`. - for (const key in payload) { - window.localStorage.setItem(key, payload[key]); - } -}; - -interface CastData { - /** A key to decrypt the collection we are casting. */ - collectionKey: string; - /** A credential to use for fetching media files for this cast session. */ - castToken: string; -} - -/** - * Read back the cast data we got after pairing. - * - * Sibling of {@link storeCastData}. It throws an error if the expected data is - * not present in localStorage. - */ -export const readCastData = (): CastData => { - const collectionKey = ensureString(localStorage.getItem("collectionKey")); - const castToken = ensureString(localStorage.getItem("castToken")); - return { collectionKey, castToken }; -}; - -type RenderableImageURLPair = [url: string, nextURL: string]; +let heicWorker: ComlinkWorker | undefined; /** * An async generator function that loops through all the files in the - * collection, returning renderable URLs to each that can be displayed in a - * slideshow. + * collection, returning renderable image URLs to each that can be displayed in + * a slideshow. * - * Each time it resolves with a pair of URLs (a {@link RenderableImageURLPair}), - * one for the next slideshow image, and one for the slideshow image that will - * be displayed after that. It also pre-fetches the next to next URL each time. + * Each time it resolves with a (data) URL for the slideshow image to show next. * * If there are no renderable image in the collection, the sequence ends by * yielding `{done: true}`. @@ -73,37 +49,29 @@ type RenderableImageURLPair = [url: string, nextURL: string]; * * The generator ignores errors in the fetching and decoding of individual * images in the collection, skipping the erroneous ones and moving onward to - * the next one. It will however throw if there are errors when getting the - * collection itself. This can happen both the first time, or when we are about - * to loop around to the start of the collection. + * the next one. + * + * - It will however throw if there are errors when getting the collection + * itself. This can happen both the first time, or when we are about to loop + * around to the start of the collection. + * + * - It will also throw if three consecutive image fail. * * @param castData The collection to show and credentials to fetch the files * within it. */ -export const renderableImageURLs = async function* (castData: CastData) { +export const imageURLGenerator = async function* (castData: CastData) { const { collectionKey, castToken } = castData; /** - * We have a sliding window of four URLs, with the `urls[1]` being the one - * that is the one currently being shown in the slideshow. - * - * At each step, we shift the window towards the right by shifting out the - * leftmost (oldest) `urls[0]`, and adding a new one at the end. - * - * We can revoke url[0] when we shift it out because we know it is not being - * used anymore. - * - * We need to special case the first two renders to avoid revoking the - * initial URLs that are displayed the first two times. This results in a - * memory leak of the very first objectURL that we display. + * Keep a FIFO queue of the URLs that we've vended out recently so that we + * can revoke those that are not being shown anymore. */ - const urls: string[] = [""]; - let i = 0; + const previousURLs: string[] = []; + + /** Number of milliseconds to keep the slide on the screen. */ + const slideDuration = 12000; /* 12 s */ - /** - * Number of milliseconds to keep the slide on the screen. - */ - const slideDuration = 10000; /* 10 s */ /** * Time when we last yielded. * @@ -112,10 +80,18 @@ export const renderableImageURLs = async function* (castData: CastData) { */ let lastYieldTime = Date.now(); - // The first time around advance the lastYieldTime into the future so that + // The first time around regress the lastYieldTime into the past so that // we don't wait around too long for the first slide (we do want to wait a // bit, for the user to see the checkmark animation as reassurance). - lastYieldTime += 7500; /* 7.5 s */ + lastYieldTime -= slideDuration - 2500; /* wait at most 2.5 s */ + + /** + * Number of time we have caught an exception while trying to generate an + * image URL for individual files. + * + * When this happens three times consecutively, we throw. + */ + let consecutiveFailures = 0; while (true) { const encryptedFiles = shuffled( @@ -127,33 +103,50 @@ export const renderableImageURLs = async function* (castData: CastData) { for (const encryptedFile of encryptedFiles) { const file = await decryptEnteFile(encryptedFile, collectionKey); - if (!isFileEligibleForCast(file)) continue; + if (!isFileEligible(file)) continue; + let url: string; try { - urls.push(await createRenderableURL(castToken, file)); + url = await createRenderableURL(castToken, file); + consecutiveFailures = 0; haveEligibleFiles = true; } catch (e) { + consecutiveFailures += 1; + // 1, 2, bang! + if (consecutiveFailures == 3) throw e; + + if (e instanceof ApiError && e.httpStatusCode == 401) { + // The token has expired. This can happen, e.g., if the user + // opens the dialog to cast again, causing the client to + // invalidate existing tokens. + // + // Rethrow the error, which will bring us back to the + // pairing page. + throw e; + } + + // On all other errors (including temporary network issues), log.error("Skipping unrenderable file", e); + await wait(100); /* Breathe */ continue; } - if (urls.length < 4) continue; + // The last element of previousURLs is the URL that is currently + // being shown on screen. + // + // The last to last element is the one that was shown prior to that, + // and now can be safely revoked. + if (previousURLs.length > 1) + URL.revokeObjectURL(previousURLs.shift()); - const oldestURL = urls.shift(); - if (oldestURL && i !== 1) URL.revokeObjectURL(oldestURL); - i += 1; - - const urlPair: RenderableImageURLPair = [ - ensure(urls[0]), - ensure(urls[1]), - ]; + previousURLs.push(url); const elapsedTime = Date.now() - lastYieldTime; if (elapsedTime > 0 && elapsedTime < slideDuration) await wait(slideDuration - elapsedTime); lastYieldTime = Date.now(); - yield urlPair; + yield url; } // This collection does not have any files that we can show. @@ -172,7 +165,7 @@ const getEncryptedCollectionFiles = async ( ): Promise => { let files: EncryptedEnteFile[] = []; let sinceTime = 0; - let resp; + let resp: AxiosResponse; do { resp = await HTTPService.get( `${getEndpoint()}/cast/diff`, @@ -256,12 +249,19 @@ const decryptEnteFile = async ( return file; }; -const isFileEligibleForCast = (file: EnteFile) => { +const isFileEligible = (file: EnteFile) => { if (!isImageOrLivePhoto(file)) return false; if (file.info.fileSize > 100 * 1024 * 1024) return false; + // This check is fast but potentially incorrect because in practice we do + // encounter files that are incorrectly named and have a misleading + // extension. To detect the actual type, we need to sniff the MIME type, but + // that requires downloading and decrypting the file first. const [, extension] = nameAndExtension(file.metadata.title); - if (isNonWebImageFileExtension(extension)) return false; + if (isNonWebImageFileExtension(extension)) { + // Of the known non-web types, we support HEIC. + return isHEICExtension(extension); + } return true; }; @@ -271,6 +271,12 @@ const isImageOrLivePhoto = (file: EnteFile) => { return fileType == FILE_TYPE.IMAGE || fileType == FILE_TYPE.LIVE_PHOTO; }; +export const heicToJPEG = async (heicBlob: Blob) => { + let worker = heicWorker; + if (!worker) heicWorker = worker = createHEICConvertComlinkWorker(); + return await (await worker.remote).heicToJPEG(heicBlob); +}; + /** * Create and return a new data URL that can be used to show the given * {@link file} in our slideshow image viewer. @@ -278,27 +284,50 @@ const isImageOrLivePhoto = (file: EnteFile) => { * Once we're done showing the file, the URL should be revoked using * {@link URL.revokeObjectURL} to free up browser resources. */ -const createRenderableURL = async (castToken: string, file: EnteFile) => - URL.createObjectURL(await renderableImageBlob(castToken, file)); +const createRenderableURL = async (castToken: string, file: EnteFile) => { + const imageBlob = await renderableImageBlob(castToken, file); + return URL.createObjectURL(imageBlob); +}; const renderableImageBlob = async (castToken: string, file: EnteFile) => { - const fileName = file.metadata.title; - let blob = await downloadFile(castToken, file); - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - const { imageData } = await decodeLivePhoto(fileName, blob); + const shouldUseThumbnail = isChromecast(); + + let blob = await downloadFile(castToken, file, shouldUseThumbnail); + + let fileName = file.metadata.title; + if (!shouldUseThumbnail && file.metadata.fileType == FILE_TYPE.LIVE_PHOTO) { + const { imageData, imageFileName } = await decodeLivePhoto( + fileName, + blob, + ); + fileName = imageFileName; blob = new Blob([imageData]); } + + // We cannot rely on the file's extension to detect the file type, some + // files are incorrectly named. So use a MIME type sniffer first, but if + // that fails than fallback to the extension. const mimeType = await detectMediaMIMEType(new File([blob], fileName)); if (!mimeType) throw new Error(`Could not detect MIME type for file ${fileName}`); + + if (mimeType == "image/heif" || mimeType == "image/heic") + blob = await heicToJPEG(blob); + return new Blob([blob], { type: mimeType }); }; -const downloadFile = async (castToken: string, file: EnteFile) => { +const downloadFile = async ( + castToken: string, + file: EnteFile, + shouldUseThumbnail: boolean, +) => { if (!isImageOrLivePhoto(file)) throw new Error("Can only cast images and live photos"); - const url = getCastFileURL(file.id); + const url = shouldUseThumbnail + ? getCastThumbnailURL(file.id) + : getCastFileURL(file.id); const resp = await HTTPService.get( url, null, @@ -312,7 +341,11 @@ const downloadFile = async (castToken: string, file: EnteFile) => { const cryptoWorker = await ComlinkCryptoWorker.getInstance(); const decrypted = await cryptoWorker.decryptFile( new Uint8Array(resp.data), - await cryptoWorker.fromB64(file.file.decryptionHeader), + await cryptoWorker.fromB64( + shouldUseThumbnail + ? file.thumbnail.decryptionHeader + : file.file.decryptionHeader, + ), file.key, ); return new Response(decrypted).blob(); diff --git a/web/apps/cast/src/utils/cast-receiver.tsx b/web/apps/cast/src/utils/cast-receiver.tsx deleted file mode 100644 index 666a085edc..0000000000 --- a/web/apps/cast/src/utils/cast-receiver.tsx +++ /dev/null @@ -1,32 +0,0 @@ -/// - -export type Cast = typeof cast; - -let _cast: Cast | undefined; -let _loader: Promise | undefined; - -/** - * Load the Chromecast Web Receiver SDK and return a reference to the `cast` - * global object that the SDK attaches to the window. - * - * Calling this function multiple times is fine, once the Chromecast SDK is - * loaded it'll thereafter return the reference to the same object always. - * - * https://developers.google.com/cast/docs/web_receiver/basic - */ -export const castReceiverLoadingIfNeeded = async (): Promise => { - if (_cast) return _cast; - if (_loader) return await _loader; - - _loader = new Promise((resolve) => { - const script = document.createElement("script"); - script.src = - "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; - - script.addEventListener("load", () => resolve(cast)); - document.body.appendChild(script); - }); - const c = await _loader; - _cast = c; - return c; -}; diff --git a/web/apps/payments/.env b/web/apps/payments/.env new file mode 100644 index 0000000000..3f3b1cc9ae --- /dev/null +++ b/web/apps/payments/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/.env b/web/apps/photos/.env index a039e91055..978c677769 100644 --- a/web/apps/photos/.env +++ b/web/apps/photos/.env @@ -88,3 +88,5 @@ # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON=`cat path/to/expected.json` yarn dev # # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON = {} + +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json index 1196b4ddf7..8d515c07cd 100644 --- a/web/apps/photos/package.json +++ b/web/apps/photos/package.json @@ -16,28 +16,25 @@ "chrono-node": "^2.2.6", "date-fns": "^2", "debounce": "^2.0.0", - "density-clustering": "^1.3.0", "eventemitter3": "^4.0.7", "exifr": "^7.1.3", "fast-srp-hap": "^2.0.4", "ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm", "formik": "^2.1.5", "hdbscan": "0.0.1-alpha.5", - "heic-convert": "^2.0.0", "idb": "^7.1.1", "leaflet": "^1.9.4", "leaflet-defaulticon-compatibility": "^0.1.1", "localforage": "^1.9.0", "memoize-one": "^6.0.0", - "mime-types": "^2.1.35", - "ml-matrix": "^6.10.4", + "ml-matrix": "^6.11", "otpauth": "^9.0.2", "p-debounce": "^4.0.0", "p-queue": "^7.1.0", "photoswipe": "file:./thirdparty/photoswipe", "piexifjs": "^1.0.6", "pure-react-carousel": "^1.30.1", - "react-dropzone": "^11.2.4", + "react-dropzone": "^14.2", "react-otp-input": "^2.3.1", "react-select": "^4.3.1", "react-top-loading-bar": "^2.0.1", @@ -45,7 +42,7 @@ "react-window": "^1.8.6", "sanitize-filename": "^1.6.3", "similarity-transformation": "^0.0.1", - "transformation-matrix": "^2.15.0", + "transformation-matrix": "^2.16", "uuid": "^9.0.1", "vscode-uri": "^3.0.7", "xml-js": "^1.6.11", diff --git a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx index 3d9d061663..8b92f1cbb1 100644 --- a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx +++ b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx @@ -32,7 +32,11 @@ declare global { } } -export default function AlbumCastDialog(props: Props) { +export default function AlbumCastDialog({ + show, + onHide, + currentCollection, +}: Props) { const [view, setView] = useState< "choose" | "auto" | "pin" | "auto-cast-error" >("choose"); @@ -51,7 +55,7 @@ export default function AlbumCastDialog(props: Props) { ) => { try { await doCast(value.trim()); - props.onHide(); + onHide(); } catch (e) { const error = e as Error; let fieldError: string; @@ -80,8 +84,8 @@ export default function AlbumCastDialog(props: Props) { // ok, they exist. let's give them the good stuff. const payload = JSON.stringify({ castToken: castToken, - collectionID: props.currentCollection.id, - collectionKey: props.currentCollection.key, + collectionID: currentCollection.id, + collectionKey: currentCollection.key, }); const encryptedPayload = await boxSeal(btoa(payload), tvPublicKeyB64); @@ -89,7 +93,7 @@ export default function AlbumCastDialog(props: Props) { await castGateway.publishCastPayload( pin, encryptedPayload, - props.currentCollection.id, + currentCollection.id, castToken, ); }; @@ -119,7 +123,7 @@ export default function AlbumCastDialog(props: Props) { doCast(code) .then(() => { setView("choose"); - props.onHide(); + onHide(); }) .catch((e) => { setView("auto-cast-error"); @@ -129,8 +133,9 @@ export default function AlbumCastDialog(props: Props) { }, ); + const collectionID = currentCollection.id; session - .sendMessage("urn:x-cast:pair-request", {}) + .sendMessage("urn:x-cast:pair-request", { collectionID }) .then(() => { log.debug(() => "Message sent successfully"); }) @@ -142,16 +147,16 @@ export default function AlbumCastDialog(props: Props) { }, [view]); useEffect(() => { - if (props.show) { + if (show) { castGateway.revokeAllTokens(); } - }, [props.show]); + }, [show]); return ( {t("LEAVE_ALBUM")} + } + onClick={handleCollectionAction( + CollectionActions.SHOW_ALBUM_CAST_DIALOG, + false, + )} + > + {t("CAST_ALBUM_TO_TV")} + ); } diff --git a/web/apps/photos/src/components/DeleteAccountModal.tsx b/web/apps/photos/src/components/DeleteAccountModal.tsx index 744fbf3129..d6eb3a0373 100644 --- a/web/apps/photos/src/components/DeleteAccountModal.tsx +++ b/web/apps/photos/src/components/DeleteAccountModal.tsx @@ -1,5 +1,4 @@ import log from "@/next/log"; -import { logoutUser } from "@ente/accounts/services/user"; import DialogBoxV2 from "@ente/shared/components/DialogBoxV2"; import EnteButton from "@ente/shared/components/EnteButton"; import { DELETE_ACCOUNT_EMAIL } from "@ente/shared/constants/urls"; @@ -43,7 +42,8 @@ const getReasonOptions = (): DropdownOption[] => { }; const DeleteAccountModal = ({ open, onClose }: Iprops) => { - const { setDialogBoxAttributesV2, isMobile } = useContext(AppContext); + const { setDialogBoxAttributesV2, isMobile, logout } = + useContext(AppContext); const { authenticateUser } = useContext(GalleryContext); const [loading, setLoading] = useState(false); const deleteAccountChallenge = useRef(); @@ -145,7 +145,7 @@ const DeleteAccountModal = ({ open, onClose }: Iprops) => { ); const { reason, feedback } = reasonAndFeedbackRef.current; await deleteAccount(decryptedChallenge, reason, feedback); - logoutUser(); + logout(); } catch (e) { log.error("solveChallengeAndDeleteAccount failed", e); somethingWentWrong(); diff --git a/web/apps/photos/src/components/Directory/index.tsx b/web/apps/photos/src/components/Directory/index.tsx index a995811349..2fc4be58ea 100644 --- a/web/apps/photos/src/components/Directory/index.tsx +++ b/web/apps/photos/src/components/Directory/index.tsx @@ -1,8 +1,7 @@ import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; import LinkButton from "@ente/shared/components/LinkButton"; -import { Tooltip } from "@mui/material"; -import { styled } from "@mui/material/styles"; +import { Tooltip, styled } from "@mui/material"; const DirectoryPathContainer = styled(LinkButton)( ({ width }) => ` diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index 42edddbf11..c4e1f5854f 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -1,24 +1,6 @@ +import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import { - Backdrop, - Box, - CircularProgress, - IconButton, - Tab, - Tabs, - Typography, -} from "@mui/material"; -import { - Dispatch, - MutableRefObject, - SetStateAction, - createContext, - useContext, - useEffect, - useRef, - useState, -} from "react"; - +import { ensure } from "@/utils/ensure"; import { CenteredFlex, HorizontalFlex, @@ -32,6 +14,15 @@ import CropIcon from "@mui/icons-material/Crop"; import CropOriginalIcon from "@mui/icons-material/CropOriginal"; import DownloadIcon from "@mui/icons-material/Download"; import MenuIcon from "@mui/icons-material/Menu"; +import { + Backdrop, + Box, + CircularProgress, + IconButton, + Tab, + Tabs, + Typography, +} from "@mui/material"; import { EnteDrawer } from "components/EnteDrawer"; import { EnteMenuItem } from "components/Menu/EnteMenuItem"; import MenuItemDivider from "components/Menu/MenuItemDivider"; @@ -39,10 +30,18 @@ import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import MenuSectionTitle from "components/Menu/MenuSectionTitle"; import { CORNER_THRESHOLD, FILTER_DEFAULT_VALUES } from "constants/photoEditor"; import { t } from "i18next"; -import mime from "mime-types"; import { AppContext } from "pages/_app"; +import { + Dispatch, + MutableRefObject, + SetStateAction, + createContext, + useContext, + useEffect, + useRef, + useState, +} from "react"; import { getLocalCollections } from "services/collectionService"; -import { detectFileTypeInfo } from "services/detect-type"; import downloadManager from "services/download"; import uploadManager from "services/upload/uploadManager"; import { EnteFile } from "types/file"; @@ -72,13 +71,6 @@ export const ImageEditorOverlayContext = createContext( type OperationTab = "crop" | "transform" | "colours"; -const getEditedFileName = (fileName: string) => { - const fileNameParts = fileName.split("."); - const extension = fileNameParts.pop(); - const editedFileName = `${fileNameParts.join(".")}-edited.${extension}`; - return editedFileName; -}; - export interface CropBoxProps { x: number; y: number; @@ -94,6 +86,10 @@ const ImageEditorOverlay = (props: IProps) => { const parentRef = useRef(null); const [fileURL, setFileURL] = useState(""); + // The MIME type of the original file that we are editing. + // + // It _should_ generally be present, but it is not guaranteed to be. + const [mimeType, setMIMEType] = useState(); const [currentRotationAngle, setCurrentRotationAngle] = useState(0); @@ -372,6 +368,10 @@ const ImageEditorOverlay = (props: IProps) => { ); img.src = srcURLs.url as string; setFileURL(srcURLs.url as string); + // We're casting the srcURLs.url to string above, i.e. this code + // is not meant to run for the live photos scenario. For images, + // we usually will have the mime type. + setMIMEType(srcURLs.mimeType); } else { img.src = fileURL; } @@ -430,37 +430,6 @@ const ImageEditorOverlay = (props: IProps) => { loadCanvas(); }, [props.show, props.file]); - const exportCanvasToBlob = (): Promise => { - try { - const canvas = originalSizeCanvasRef.current; - if (!canvas) return; - - const mimeType = mime.lookup(props.file.metadata.title); - - const image = new Image(); - image.src = canvas.toDataURL(); - - const context = canvas.getContext("2d"); - if (!context) return; - return new Promise((resolve) => { - canvas.toBlob(resolve, mimeType); - }); - } catch (e) { - log.error("Error exporting canvas to blob", e); - throw e; - } - }; - - const getEditedFile = async () => { - const blob = await exportCanvasToBlob(); - if (!blob) { - throw Error("no blob"); - } - const editedFileName = getEditedFileName(props.file.metadata.title); - const editedFile = new File([blob], editedFileName); - return editedFile; - }; - const handleClose = () => { setFileURL(null); props.onClose(); @@ -480,25 +449,23 @@ const ImageEditorOverlay = (props: IProps) => { return <>; } - const downloadEditedPhoto = async () => { - try { - if (!canvasRef.current) return; + const getEditedFile = async () => { + const originalSizeCanvas = ensure(originalSizeCanvasRef.current); + const originalFileName = props.file.metadata.title; + return canvasToFile(originalSizeCanvas, originalFileName, mimeType); + }; - const editedFile = await getEditedFile(); - const fileType = await detectFileTypeInfo(editedFile); - const tempImgURL = URL.createObjectURL( - new Blob([editedFile], { type: fileType.mimeType }), - ); - downloadUsingAnchor(tempImgURL, editedFile.name); - } catch (e) { - log.error("Error downloading edited photo", e); - } + const downloadEditedPhoto = async () => { + if (!canvasRef.current) return; + + const f = await getEditedFile(); + // Revokes the URL after downloading. + downloadUsingAnchor(URL.createObjectURL(f), f.name); }; const saveCopyToEnte = async () => { + if (!canvasRef.current) return; try { - if (!canvasRef.current) return; - const collections = await getLocalCollections(); const collection = collections.find( @@ -678,7 +645,7 @@ const ImageEditorOverlay = (props: IProps) => { setCurrentTab(value); }} > - + { }; export default ImageEditorOverlay; + +/** + * Create a new {@link File} with the contents of the given canvas. + * + * @param canvas A {@link HTMLCanvasElement} whose contents we want to download + * as a file. + * + * @param originalFileName The name of the original file which was used to seed + * the canvas. This will be used as a base name for the generated file (with an + * "-edited" suffix). + * + * @param originalMIMEType The MIME type of the original file which was used to + * seed the canvas. When possible, we try to download a file in the same format, + * but this is not guaranteed and depends on browser support. If the original + * MIME type can not be preserved, a PNG file will be downloaded. + */ +const canvasToFile = async ( + canvas: HTMLCanvasElement, + originalFileName: string, + originalMIMEType?: string, +): Promise => { + const image = new Image(); + image.src = canvas.toDataURL(); + + // Browsers are required to support "image/png". They may also support + // "image/jpeg" and "image/webp". Potentially they may even support more + // formats, but to keep this scoped we limit to these three. + let [mimeType, extension] = ["image/png", "png"]; + switch (originalMIMEType) { + case "image/jpeg": + mimeType = originalMIMEType; + extension = "jpeg"; + break; + case "image/webp": + mimeType = originalMIMEType; + extension = "webp"; + break; + default: + break; + } + + const blob = ensure( + await new Promise((resolve) => canvas.toBlob(resolve, mimeType)), + ); + + const [originalName] = nameAndExtension(originalFileName); + const fileName = `${originalName}-edited.${extension}`; + + log.debug(() => ({ a: "canvas => file", blob, type: blob.type, mimeType })); + + return new File([blob], fileName); +}; diff --git a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx index 40de098f5e..00b8979d5a 100644 --- a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx +++ b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx @@ -1,5 +1,4 @@ -import { Paper } from "@mui/material"; -import { styled } from "@mui/material/styles"; +import { Paper, styled } from "@mui/material"; export const LivePhotoBtnContainer = styled(Paper)` border-radius: 4px; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx index 6ebc0d9422..3b739520e2 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx @@ -5,7 +5,7 @@ import { t } from "i18next"; import { AppContext } from "pages/_app"; import { useContext } from "react"; import { components } from "react-select"; -import { IndexStatus } from "types/machineLearning/ui"; +import { IndexStatus } from "services/face/db"; import { Suggestion, SuggestionType } from "types/search"; const { Menu } = components; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx index 3f737b3e0c..62d4a1f434 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx @@ -9,6 +9,7 @@ import { useCallback, useContext, useEffect, useRef, useState } from "react"; import { components } from "react-select"; import AsyncSelect from "react-select/async"; import { InputActionMeta } from "react-select/src/types"; +import type { Person } from "services/face/people"; import { City } from "services/locationSearchService"; import { getAutoCompleteSuggestions, @@ -17,7 +18,6 @@ import { import { Collection } from "types/collection"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; import { ClipSearchScores, DateValue, diff --git a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx index 6dc9b851e9..ed03bc9175 100644 --- a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx +++ b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx @@ -1,18 +1,17 @@ +import { VerticallyCenteredFlex } from "@ente/shared/components/Container"; import ChevronRight from "@mui/icons-material/ChevronRight"; import ScienceIcon from "@mui/icons-material/Science"; import { Box, DialogProps, Stack, Typography } from "@mui/material"; import { EnteDrawer } from "components/EnteDrawer"; +import { EnteMenuItem } from "components/Menu/EnteMenuItem"; +import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import MenuSectionTitle from "components/Menu/MenuSectionTitle"; import Titlebar from "components/Titlebar"; import { MLSearchSettings } from "components/ml/MLSearchSettings"; import { t } from "i18next"; -import { useContext, useEffect, useState } from "react"; - -import { VerticallyCenteredFlex } from "@ente/shared/components/Container"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import isElectron from "is-electron"; import { AppContext } from "pages/_app"; +import { useContext, useEffect, useState } from "react"; import { CLIPIndexingStatus, clipService } from "services/clip-service"; import { formatNumber } from "utils/number/format"; diff --git a/web/apps/photos/src/components/Sidebar/DebugSection.tsx b/web/apps/photos/src/components/Sidebar/DebugSection.tsx deleted file mode 100644 index e336374030..0000000000 --- a/web/apps/photos/src/components/Sidebar/DebugSection.tsx +++ /dev/null @@ -1,69 +0,0 @@ -import log from "@/next/log"; -import { savedLogs } from "@/next/log-web"; -import { downloadAsFile } from "@ente/shared/utils"; -import Typography from "@mui/material/Typography"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { t } from "i18next"; -import { AppContext } from "pages/_app"; -import { useContext, useEffect, useState } from "react"; -import { Trans } from "react-i18next"; -import { isInternalUser } from "utils/user"; -import { testUpload } from "../../../tests/upload.test"; - -export default function DebugSection() { - const appContext = useContext(AppContext); - const [appVersion, setAppVersion] = useState(); - - const electron = globalThis.electron; - - useEffect(() => { - electron?.appVersion().then((v) => setAppVersion(v)); - }); - - const confirmLogDownload = () => - appContext.setDialogMessage({ - title: t("DOWNLOAD_LOGS"), - content: , - proceed: { - text: t("DOWNLOAD"), - variant: "accent", - action: downloadLogs, - }, - close: { - text: t("CANCEL"), - }, - }); - - const downloadLogs = () => { - log.info("Downloading logs"); - if (electron) electron.openLogDirectory(); - else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs()); - }; - - return ( - <> - - {appVersion && ( - - {appVersion} - - )} - {isInternalUser() && ( - - )} - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/DisableMap.tsx b/web/apps/photos/src/components/Sidebar/DisableMap.tsx deleted file mode 100644 index ef793166e7..0000000000 --- a/web/apps/photos/src/components/Sidebar/DisableMap.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { Box, Button, Stack, Typography } from "@mui/material"; -import Titlebar from "components/Titlebar"; -import { t } from "i18next"; -import { Trans } from "react-i18next"; - -export default function EnableMap({ onClose, disableMap, onRootClose }) { - return ( - - - - - - - - - - - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/EnableMap.tsx b/web/apps/photos/src/components/Sidebar/EnableMap.tsx deleted file mode 100644 index 868485af0e..0000000000 --- a/web/apps/photos/src/components/Sidebar/EnableMap.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { Box, Button, Link, Stack, Typography } from "@mui/material"; -import Titlebar from "components/Titlebar"; -import { t } from "i18next"; -import { Trans } from "react-i18next"; - -export const OPEN_STREET_MAP_LINK = "https://www.openstreetmap.org/"; -export default function EnableMap({ onClose, enableMap, onRootClose }) { - return ( - - - - - {" "} - - - ), - }} - /> - - - - - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/ExitSection.tsx b/web/apps/photos/src/components/Sidebar/ExitSection.tsx deleted file mode 100644 index 6f9492b779..0000000000 --- a/web/apps/photos/src/components/Sidebar/ExitSection.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import { t } from "i18next"; -import { useContext, useState } from "react"; - -import { logoutUser } from "@ente/accounts/services/user"; -import DeleteAccountModal from "components/DeleteAccountModal"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { AppContext } from "pages/_app"; - -export default function ExitSection() { - const { setDialogMessage } = useContext(AppContext); - - const [deleteAccountModalView, setDeleteAccountModalView] = useState(false); - - const closeDeleteAccountModal = () => setDeleteAccountModalView(false); - const openDeleteAccountModal = () => setDeleteAccountModalView(true); - - const confirmLogout = () => { - setDialogMessage({ - title: t("LOGOUT_MESSAGE"), - proceed: { - text: t("LOGOUT"), - action: logoutUser, - variant: "critical", - }, - close: { text: t("CANCEL") }, - }); - }; - - return ( - <> - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/Header.tsx b/web/apps/photos/src/components/Sidebar/Header.tsx deleted file mode 100644 index 4adb12fe7c..0000000000 --- a/web/apps/photos/src/components/Sidebar/Header.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { SpaceBetweenFlex } from "@ente/shared/components/Container"; -import { EnteLogo } from "@ente/shared/components/EnteLogo"; -import CloseIcon from "@mui/icons-material/Close"; -import { IconButton } from "@mui/material"; - -interface IProps { - closeSidebar: () => void; -} - -export default function HeaderSection({ closeSidebar }: IProps) { - return ( - - - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/HelpSection.tsx b/web/apps/photos/src/components/Sidebar/HelpSection.tsx deleted file mode 100644 index 4cc97c414a..0000000000 --- a/web/apps/photos/src/components/Sidebar/HelpSection.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { t } from "i18next"; -import { useContext } from "react"; - -import EnteSpinner from "@ente/shared/components/EnteSpinner"; -import { Typography } from "@mui/material"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { NoStyleAnchor } from "components/pages/sharedAlbum/GoToEnte"; -import isElectron from "is-electron"; -import { AppContext } from "pages/_app"; -import { GalleryContext } from "pages/gallery"; -import exportService from "services/export"; -import { openLink } from "utils/common"; -import { getDownloadAppMessage } from "utils/ui"; - -export default function HelpSection() { - const { setDialogMessage } = useContext(AppContext); - const { openExportModal } = useContext(GalleryContext); - - const openRoadmap = () => - openLink("https://github.com/ente-io/ente/discussions", true); - - const contactSupport = () => openLink("mailto:support@ente.io", true); - - function openExport() { - if (isElectron()) { - openExportModal(); - } else { - setDialogMessage(getDownloadAppMessage()); - } - } - - return ( - <> - - - - {t("SUPPORT")} - - - } - variant="secondary" - /> - - ) - } - variant="secondary" - /> - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/MapSetting.tsx b/web/apps/photos/src/components/Sidebar/MapSetting.tsx new file mode 100644 index 0000000000..430f7667f5 --- /dev/null +++ b/web/apps/photos/src/components/Sidebar/MapSetting.tsx @@ -0,0 +1,226 @@ +import log from "@/next/log"; +import { + Box, + Button, + DialogProps, + Link, + Stack, + Typography, +} from "@mui/material"; +import { EnteDrawer } from "components/EnteDrawer"; +import { EnteMenuItem } from "components/Menu/EnteMenuItem"; +import { MenuItemGroup } from "components/Menu/MenuItemGroup"; +import Titlebar from "components/Titlebar"; +import { t } from "i18next"; +import { AppContext } from "pages/_app"; +import { useContext, useEffect, useState } from "react"; +import { Trans } from "react-i18next"; +import { getMapEnabledStatus } from "services/userService"; + +export default function MapSettings({ open, onClose, onRootClose }) { + const { mapEnabled, updateMapEnabled } = useContext(AppContext); + const [modifyMapEnabledView, setModifyMapEnabledView] = useState(false); + + const openModifyMapEnabled = () => setModifyMapEnabledView(true); + const closeModifyMapEnabled = () => setModifyMapEnabledView(false); + + useEffect(() => { + if (!open) { + return; + } + const main = async () => { + const remoteMapValue = await getMapEnabledStatus(); + updateMapEnabled(remoteMapValue); + }; + main(); + }, [open]); + + const handleRootClose = () => { + onClose(); + onRootClose(); + }; + + const handleDrawerClose: DialogProps["onClose"] = (_, reason) => { + if (reason === "backdropClick") { + handleRootClose(); + } else { + onClose(); + } + }; + + return ( + + + + + + + + + + + + + + + + + ); +} + +const ModifyMapEnabled = ({ open, onClose, onRootClose, mapEnabled }) => { + const { somethingWentWrong, updateMapEnabled } = useContext(AppContext); + + const disableMap = async () => { + try { + await updateMapEnabled(false); + onClose(); + } catch (e) { + log.error("Disable Map failed", e); + somethingWentWrong(); + } + }; + + const enableMap = async () => { + try { + await updateMapEnabled(true); + onClose(); + } catch (e) { + log.error("Enable Map failed", e); + somethingWentWrong(); + } + }; + + const handleRootClose = () => { + onClose(); + onRootClose(); + }; + + const handleDrawerClose: DialogProps["onClose"] = (_, reason) => { + if (reason === "backdropClick") { + handleRootClose(); + } else { + onClose(); + } + }; + + return ( + + + {mapEnabled ? ( + + ) : ( + + )} + + + ); +}; + +function EnableMap({ onClose, enableMap, onRootClose }) { + return ( + + + + + {" "} + + + ), + }} + /> + + + + + + + + + ); +} + +function DisableMap({ onClose, disableMap, onRootClose }) { + return ( + + + + + + + + + + + + + + + ); +} diff --git a/web/apps/photos/src/components/Sidebar/MapSetting/ModifyMapEnabled.tsx b/web/apps/photos/src/components/Sidebar/MapSetting/ModifyMapEnabled.tsx deleted file mode 100644 index 0a4c0b9dcc..0000000000 --- a/web/apps/photos/src/components/Sidebar/MapSetting/ModifyMapEnabled.tsx +++ /dev/null @@ -1,76 +0,0 @@ -import log from "@/next/log"; -import { Box, DialogProps } from "@mui/material"; -import { EnteDrawer } from "components/EnteDrawer"; -import { AppContext } from "pages/_app"; -import { useContext } from "react"; -import DisableMap from "../DisableMap"; -import EnableMap from "../EnableMap"; - -const ModifyMapEnabled = ({ open, onClose, onRootClose, mapEnabled }) => { - const { somethingWentWrong, updateMapEnabled } = useContext(AppContext); - - const disableMap = async () => { - try { - await updateMapEnabled(false); - onClose(); - } catch (e) { - log.error("Disable Map failed", e); - somethingWentWrong(); - } - }; - - const enableMap = async () => { - try { - await updateMapEnabled(true); - onClose(); - } catch (e) { - log.error("Enable Map failed", e); - somethingWentWrong(); - } - }; - - const handleRootClose = () => { - onClose(); - onRootClose(); - }; - - const handleDrawerClose: DialogProps["onClose"] = (_, reason) => { - if (reason === "backdropClick") { - handleRootClose(); - } else { - onClose(); - } - }; - - return ( - - - {mapEnabled ? ( - - ) : ( - - )} - - - ); -}; - -export default ModifyMapEnabled; diff --git a/web/apps/photos/src/components/Sidebar/MapSetting/index.tsx b/web/apps/photos/src/components/Sidebar/MapSetting/index.tsx deleted file mode 100644 index 5832baca55..0000000000 --- a/web/apps/photos/src/components/Sidebar/MapSetting/index.tsx +++ /dev/null @@ -1,82 +0,0 @@ -import { Box, DialogProps, Stack } from "@mui/material"; -import { EnteDrawer } from "components/EnteDrawer"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { MenuItemGroup } from "components/Menu/MenuItemGroup"; -import Titlebar from "components/Titlebar"; -import { t } from "i18next"; -import { AppContext } from "pages/_app"; -import { useContext, useEffect, useState } from "react"; -import { getMapEnabledStatus } from "services/userService"; -import ModifyMapEnabled from "./ModifyMapEnabled"; - -export default function MapSettings({ open, onClose, onRootClose }) { - const { mapEnabled, updateMapEnabled } = useContext(AppContext); - const [modifyMapEnabledView, setModifyMapEnabledView] = useState(false); - - const openModifyMapEnabled = () => setModifyMapEnabledView(true); - const closeModifyMapEnabled = () => setModifyMapEnabledView(false); - - useEffect(() => { - if (!open) { - return; - } - const main = async () => { - const remoteMapValue = await getMapEnabledStatus(); - updateMapEnabled(remoteMapValue); - }; - main(); - }, [open]); - - const handleRootClose = () => { - onClose(); - onRootClose(); - }; - - const handleDrawerClose: DialogProps["onClose"] = (_, reason) => { - if (reason === "backdropClick") { - handleRootClose(); - } else { - onClose(); - } - }; - - return ( - - - - - - - - - - - - - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/Preferences/index.tsx b/web/apps/photos/src/components/Sidebar/Preferences.tsx similarity index 62% rename from web/apps/photos/src/components/Sidebar/Preferences/index.tsx rename to web/apps/photos/src/components/Sidebar/Preferences.tsx index 04dc79a13c..8d4ae10588 100644 --- a/web/apps/photos/src/components/Sidebar/Preferences/index.tsx +++ b/web/apps/photos/src/components/Sidebar/Preferences.tsx @@ -1,13 +1,20 @@ +import { + getLocaleInUse, + setLocaleInUse, + supportedLocales, + type SupportedLocale, +} from "@/next/i18n"; import ChevronRight from "@mui/icons-material/ChevronRight"; import { Box, DialogProps, Stack } from "@mui/material"; +import DropdownInput from "components/DropdownInput"; import { EnteDrawer } from "components/EnteDrawer"; import { EnteMenuItem } from "components/Menu/EnteMenuItem"; import Titlebar from "components/Titlebar"; import { t } from "i18next"; +import { useRouter } from "next/router"; import { useState } from "react"; -import AdvancedSettings from "../AdvancedSettings"; -import MapSettings from "../MapSetting"; -import { LanguageSelector } from "./LanguageSelector"; +import AdvancedSettings from "./AdvancedSettings"; +import MapSettings from "./MapSetting"; export default function Preferences({ open, onClose, onRootClose }) { const [advancedSettingsView, setAdvancedSettingsView] = useState(false); @@ -76,3 +83,53 @@ export default function Preferences({ open, onClose, onRootClose }) { ); } + +const LanguageSelector = () => { + const locale = getLocaleInUse(); + // Enhancement: Is this full reload needed? + const router = useRouter(); + + const updateCurrentLocale = (newLocale: SupportedLocale) => { + setLocaleInUse(newLocale); + router.reload(); + }; + + const options = supportedLocales.map((locale) => ({ + label: localeName(locale), + value: locale, + })); + + return ( + + ); +}; + +/** + * Human readable name for each supported locale. + */ +const localeName = (locale: SupportedLocale) => { + switch (locale) { + case "en-US": + return "English"; + case "fr-FR": + return "Français"; + case "de-DE": + return "Deutsch"; + case "zh-CN": + return "中文"; + case "nl-NL": + return "Nederlands"; + case "es-ES": + return "Español"; + case "pt-BR": + return "Brazilian Portuguese"; + case "ru-RU": + return "Russian"; + } +}; diff --git a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx deleted file mode 100644 index bdc0d5a84f..0000000000 --- a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx +++ /dev/null @@ -1,63 +0,0 @@ -import { - getLocaleInUse, - setLocaleInUse, - supportedLocales, - type SupportedLocale, -} from "@/next/i18n"; -import DropdownInput, { DropdownOption } from "components/DropdownInput"; -import { t } from "i18next"; -import { useRouter } from "next/router"; - -/** - * Human readable name for each supported locale - * - * TODO (MR): This names themselves should be localized. - */ -export const localeName = (locale: SupportedLocale) => { - switch (locale) { - case "en-US": - return "English"; - case "fr-FR": - return "Français"; - case "de-DE": - return "Deutsch"; - case "zh-CN": - return "中文"; - case "nl-NL": - return "Nederlands"; - case "es-ES": - return "Español"; - case "pt-BR": - return "Brazilian Portuguese"; - case "ru-RU": - return "Russian"; - } -}; - -const getLanguageOptions = (): DropdownOption[] => { - return supportedLocales.map((locale) => ({ - label: localeName(locale), - value: locale, - })); -}; - -export const LanguageSelector = () => { - const locale = getLocaleInUse(); - // Enhancement: Is this full reload needed? - const router = useRouter(); - - const updateCurrentLocale = (newLocale: SupportedLocale) => { - setLocaleInUse(newLocale); - router.reload(); - }; - - return ( - - ); -}; diff --git a/web/apps/photos/src/components/Sidebar/ShortcutSection.tsx b/web/apps/photos/src/components/Sidebar/ShortcutSection.tsx deleted file mode 100644 index dce298844e..0000000000 --- a/web/apps/photos/src/components/Sidebar/ShortcutSection.tsx +++ /dev/null @@ -1,102 +0,0 @@ -import { t } from "i18next"; -import { useContext, useEffect, useState } from "react"; - -import ArchiveOutlined from "@mui/icons-material/ArchiveOutlined"; -import CategoryIcon from "@mui/icons-material/Category"; -import DeleteOutline from "@mui/icons-material/DeleteOutline"; -import LockOutlined from "@mui/icons-material/LockOutlined"; -import VisibilityOff from "@mui/icons-material/VisibilityOff"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { - ARCHIVE_SECTION, - DUMMY_UNCATEGORIZED_COLLECTION, - TRASH_SECTION, -} from "constants/collection"; -import { GalleryContext } from "pages/gallery"; -import { getUncategorizedCollection } from "services/collectionService"; -import { CollectionSummaries } from "types/collection"; -interface Iprops { - closeSidebar: () => void; - collectionSummaries: CollectionSummaries; -} - -export default function ShortcutSection({ - closeSidebar, - collectionSummaries, -}: Iprops) { - const galleryContext = useContext(GalleryContext); - const [uncategorizedCollectionId, setUncategorizedCollectionID] = - useState(); - - useEffect(() => { - const main = async () => { - const unCategorizedCollection = await getUncategorizedCollection(); - if (unCategorizedCollection) { - setUncategorizedCollectionID(unCategorizedCollection.id); - } else { - setUncategorizedCollectionID(DUMMY_UNCATEGORIZED_COLLECTION); - } - }; - main(); - }, []); - - const openUncategorizedSection = () => { - galleryContext.setActiveCollectionID(uncategorizedCollectionId); - closeSidebar(); - }; - - const openTrashSection = () => { - galleryContext.setActiveCollectionID(TRASH_SECTION); - closeSidebar(); - }; - - const openArchiveSection = () => { - galleryContext.setActiveCollectionID(ARCHIVE_SECTION); - closeSidebar(); - }; - - const openHiddenSection = () => { - galleryContext.openHiddenSection(() => { - closeSidebar(); - }); - }; - - return ( - <> - } - onClick={openUncategorizedSection} - variant="captioned" - label={t("UNCATEGORIZED")} - subText={collectionSummaries - .get(uncategorizedCollectionId) - ?.fileCount.toString()} - /> - } - onClick={openArchiveSection} - variant="captioned" - label={t("ARCHIVE_SECTION_NAME")} - subText={collectionSummaries - .get(ARCHIVE_SECTION) - ?.fileCount.toString()} - /> - } - onClick={openHiddenSection} - variant="captioned" - label={t("HIDDEN")} - subIcon={} - /> - } - onClick={openTrashSection} - variant="captioned" - label={t("TRASH")} - subText={collectionSummaries - .get(TRASH_SECTION) - ?.fileCount.toString()} - /> - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/backgroundOverlay.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/backgroundOverlay.tsx deleted file mode 100644 index eb9c85f510..0000000000 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/backgroundOverlay.tsx +++ /dev/null @@ -1,11 +0,0 @@ -export function BackgroundOverlay() { - return ( - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/clickOverlay.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/clickOverlay.tsx deleted file mode 100644 index 7890558089..0000000000 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/clickOverlay.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { FlexWrapper, Overlay } from "@ente/shared/components/Container"; -import ChevronRightIcon from "@mui/icons-material/ChevronRight"; -export function ClickOverlay({ onClick }) { - return ( - - - - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/index.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/index.tsx index 848792817a..514c43df81 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/index.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/index.tsx @@ -1,8 +1,7 @@ +import { FlexWrapper, Overlay } from "@ente/shared/components/Container"; +import ChevronRightIcon from "@mui/icons-material/ChevronRight"; import { Box, Skeleton } from "@mui/material"; import { UserDetails } from "types/user"; -import { BackgroundOverlay } from "./backgroundOverlay"; -import { ClickOverlay } from "./clickOverlay"; - import { SubscriptionCardContentOverlay } from "./contentOverlay"; const SUBSCRIPTION_CARD_SIZE = 152; @@ -32,3 +31,29 @@ export default function SubscriptionCard({ userDetails, onClick }: Iprops) { ); } + +function BackgroundOverlay() { + return ( + + ); +} + +function ClickOverlay({ onClick }) { + return ( + + + + + + ); +} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/styledComponents.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/styledComponents.tsx index 4d0a15e9d6..90bea72ce7 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/styledComponents.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/styledComponents.tsx @@ -1,5 +1,5 @@ +import CircleIcon from "@mui/icons-material/Circle"; import { LinearProgress, styled } from "@mui/material"; -import { DotSeparator } from "../styledComponents"; export const Progressbar = styled(LinearProgress)(() => ({ ".MuiLinearProgress-bar": { @@ -13,6 +13,12 @@ Progressbar.defaultProps = { variant: "determinate", }; +const DotSeparator = styled(CircleIcon)` + font-size: 4px; + margin: 0 ${({ theme }) => theme.spacing(1)}; + color: inherit; +`; + export const LegendIndicator = styled(DotSeparator)` font-size: 8.71px; margin: 0; diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionStatus/index.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionStatus/index.tsx deleted file mode 100644 index 9ae19f640f..0000000000 --- a/web/apps/photos/src/components/Sidebar/SubscriptionStatus/index.tsx +++ /dev/null @@ -1,130 +0,0 @@ -import Box from "@mui/material/Box"; -import { t } from "i18next"; -import { GalleryContext } from "pages/gallery"; -import { MouseEventHandler, useContext, useMemo } from "react"; -import { Trans } from "react-i18next"; -import { UserDetails } from "types/user"; -import { - hasAddOnBonus, - hasExceededStorageQuota, - hasPaidSubscription, - hasStripeSubscription, - isOnFreePlan, - isSubscriptionActive, - isSubscriptionCancelled, - isSubscriptionPastDue, -} from "utils/billing"; - -import { Typography } from "@mui/material"; -import LinkButton from "components/pages/gallery/LinkButton"; -import billingService from "services/billingService"; -import { isFamilyAdmin, isPartOfFamily } from "utils/user/family"; - -export default function SubscriptionStatus({ - userDetails, -}: { - userDetails: UserDetails; -}) { - const { showPlanSelectorModal } = useContext(GalleryContext); - - const hasAMessage = useMemo(() => { - if (!userDetails) { - return false; - } - if ( - isPartOfFamily(userDetails.familyData) && - !isFamilyAdmin(userDetails.familyData) - ) { - return false; - } - if ( - hasPaidSubscription(userDetails.subscription) && - !isSubscriptionCancelled(userDetails.subscription) - ) { - return false; - } - return true; - }, [userDetails]); - - const handleClick = useMemo(() => { - const eventHandler: MouseEventHandler = (e) => { - e.stopPropagation(); - if (userDetails) { - if (isSubscriptionActive(userDetails.subscription)) { - if (hasExceededStorageQuota(userDetails)) { - showPlanSelectorModal(); - } - } else { - if ( - hasStripeSubscription(userDetails.subscription) && - isSubscriptionPastDue(userDetails.subscription) - ) { - billingService.redirectToCustomerPortal(); - } else { - showPlanSelectorModal(); - } - } - } - }; - return eventHandler; - }, [userDetails]); - - if (!hasAMessage) { - return <>; - } - - const messages = []; - if (!hasAddOnBonus(userDetails.bonusData)) { - if (isSubscriptionActive(userDetails.subscription)) { - if (isOnFreePlan(userDetails.subscription)) { - messages.push( - , - ); - } else if (isSubscriptionCancelled(userDetails.subscription)) { - messages.push( - t("RENEWAL_CANCELLED_SUBSCRIPTION_INFO", { - date: userDetails.subscription?.expiryTime, - }), - ); - } - } else { - messages.push( - , - }} - />, - ); - } - } - - if (hasExceededStorageQuota(userDetails) && messages.length === 0) { - messages.push( - , - }} - />, - ); - } - - return ( - - - {messages} - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx deleted file mode 100644 index 6b4a6f43d5..0000000000 --- a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx +++ /dev/null @@ -1,222 +0,0 @@ -import log from "@/next/log"; -import RecoveryKey from "@ente/shared/components/RecoveryKey"; -import { - ACCOUNTS_PAGES, - PHOTOS_PAGES as PAGES, -} from "@ente/shared/constants/pages"; -import TwoFactorModal from "components/TwoFactor/Modal"; -import { t } from "i18next"; -import { useRouter } from "next/router"; -import { AppContext } from "pages/_app"; -import { useContext, useState } from "react"; -// import mlIDbStorage from 'utils/storage/mlIDbStorage'; -import { - configurePasskeyRecovery, - isPasskeyRecoveryEnabled, -} from "@ente/accounts/services/passkey"; -import { APPS, CLIENT_PACKAGE_NAMES } from "@ente/shared/apps/constants"; -import ThemeSwitcher from "@ente/shared/components/ThemeSwitcher"; -import { getRecoveryKey } from "@ente/shared/crypto/helpers"; -import { - encryptToB64, - generateEncryptionKey, -} from "@ente/shared/crypto/internal/libsodium"; -import { getAccountsURL } from "@ente/shared/network/api"; -import { THEME_COLOR } from "@ente/shared/themes/constants"; -import { EnteMenuItem } from "components/Menu/EnteMenuItem"; -import { WatchFolder } from "components/WatchFolder"; -import isElectron from "is-electron"; -import { getAccountsToken } from "services/userService"; -import { getDownloadAppMessage } from "utils/ui"; -import { isInternalUser } from "utils/user"; -import Preferences from "./Preferences"; - -export default function UtilitySection({ closeSidebar }) { - const router = useRouter(); - const appContext = useContext(AppContext); - const { - setDialogMessage, - startLoading, - watchFolderView, - setWatchFolderView, - themeColor, - setThemeColor, - } = appContext; - - const [recoverModalView, setRecoveryModalView] = useState(false); - const [twoFactorModalView, setTwoFactorModalView] = useState(false); - const [preferencesView, setPreferencesView] = useState(false); - - const openPreferencesOptions = () => setPreferencesView(true); - const closePreferencesOptions = () => setPreferencesView(false); - - const openRecoveryKeyModal = () => setRecoveryModalView(true); - const closeRecoveryKeyModal = () => setRecoveryModalView(false); - - const openTwoFactorModal = () => setTwoFactorModalView(true); - const closeTwoFactorModal = () => setTwoFactorModalView(false); - - const openWatchFolder = () => { - if (isElectron()) { - setWatchFolderView(true); - } else { - setDialogMessage(getDownloadAppMessage()); - } - }; - const closeWatchFolder = () => setWatchFolderView(false); - - const redirectToChangePasswordPage = () => { - closeSidebar(); - router.push(PAGES.CHANGE_PASSWORD); - }; - - const redirectToChangeEmailPage = () => { - closeSidebar(); - router.push(PAGES.CHANGE_EMAIL); - }; - - const redirectToAccountsPage = async () => { - closeSidebar(); - - try { - // check if the user has passkey recovery enabled - const recoveryEnabled = await isPasskeyRecoveryEnabled(); - if (!recoveryEnabled) { - // let's create the necessary recovery information - const recoveryKey = await getRecoveryKey(); - - const resetSecret = await generateEncryptionKey(); - - const encryptionResult = await encryptToB64( - resetSecret, - recoveryKey, - ); - - await configurePasskeyRecovery( - resetSecret, - encryptionResult.encryptedData, - encryptionResult.nonce, - ); - } - - const accountsToken = await getAccountsToken(); - - window.open( - `${getAccountsURL()}${ - ACCOUNTS_PAGES.ACCOUNT_HANDOFF - }?package=${CLIENT_PACKAGE_NAMES.get( - APPS.PHOTOS, - )}&token=${accountsToken}`, - ); - } catch (e) { - log.error("failed to redirect to accounts page", e); - } - }; - - const redirectToDeduplicatePage = () => router.push(PAGES.DEDUPLICATE); - - const somethingWentWrong = () => - setDialogMessage({ - title: t("ERROR"), - content: t("RECOVER_KEY_GENERATION_FAILED"), - close: { variant: "critical" }, - }); - - const toggleTheme = () => { - setThemeColor((themeColor) => - themeColor === THEME_COLOR.DARK - ? THEME_COLOR.LIGHT - : THEME_COLOR.DARK, - ); - }; - - return ( - <> - {isElectron() && ( - - )} - - {isInternalUser() && ( - - } - /> - )} - - - {isInternalUser() && ( - - )} - - - - - - - - - - - {isElectron() && ( - - )} - - - ); -} diff --git a/web/apps/photos/src/components/Sidebar/index.tsx b/web/apps/photos/src/components/Sidebar/index.tsx index a93eb23876..300d06ed62 100644 --- a/web/apps/photos/src/components/Sidebar/index.tsx +++ b/web/apps/photos/src/components/Sidebar/index.tsx @@ -1,13 +1,93 @@ -import { Divider, Stack } from "@mui/material"; +import log from "@/next/log"; +import { savedLogs } from "@/next/log-web"; +import { + configurePasskeyRecovery, + isPasskeyRecoveryEnabled, +} from "@ente/accounts/services/passkey"; +import { APPS, CLIENT_PACKAGE_NAMES } from "@ente/shared/apps/constants"; +import { SpaceBetweenFlex } from "@ente/shared/components/Container"; +import { EnteLogo } from "@ente/shared/components/EnteLogo"; +import EnteSpinner from "@ente/shared/components/EnteSpinner"; +import RecoveryKey from "@ente/shared/components/RecoveryKey"; +import ThemeSwitcher from "@ente/shared/components/ThemeSwitcher"; +import { + ACCOUNTS_PAGES, + PHOTOS_PAGES as PAGES, +} from "@ente/shared/constants/pages"; +import { getRecoveryKey } from "@ente/shared/crypto/helpers"; +import { + encryptToB64, + generateEncryptionKey, +} from "@ente/shared/crypto/internal/libsodium"; +import { useLocalState } from "@ente/shared/hooks/useLocalState"; +import { getAccountsURL } from "@ente/shared/network/api"; +import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; +import { THEME_COLOR } from "@ente/shared/themes/constants"; +import { downloadAsFile } from "@ente/shared/utils"; +import ArchiveOutlined from "@mui/icons-material/ArchiveOutlined"; +import CategoryIcon from "@mui/icons-material/Category"; +import CloseIcon from "@mui/icons-material/Close"; +import DeleteOutline from "@mui/icons-material/DeleteOutline"; +import LockOutlined from "@mui/icons-material/LockOutlined"; +import VisibilityOff from "@mui/icons-material/VisibilityOff"; +import { + Box, + Divider, + IconButton, + Skeleton, + Stack, + styled, +} from "@mui/material"; +import Typography from "@mui/material/Typography"; +import DeleteAccountModal from "components/DeleteAccountModal"; +import { EnteDrawer } from "components/EnteDrawer"; +import { EnteMenuItem } from "components/Menu/EnteMenuItem"; +import TwoFactorModal from "components/TwoFactor/Modal"; +import { WatchFolder } from "components/WatchFolder"; +import LinkButton from "components/pages/gallery/LinkButton"; +import { NoStyleAnchor } from "components/pages/sharedAlbum/GoToEnte"; +import { + ARCHIVE_SECTION, + DUMMY_UNCATEGORIZED_COLLECTION, + TRASH_SECTION, +} from "constants/collection"; +import { t } from "i18next"; +import isElectron from "is-electron"; +import { useRouter } from "next/router"; +import { AppContext } from "pages/_app"; +import { GalleryContext } from "pages/gallery"; +import { + MouseEventHandler, + useContext, + useEffect, + useMemo, + useState, +} from "react"; +import { Trans } from "react-i18next"; +import billingService from "services/billingService"; +import { getUncategorizedCollection } from "services/collectionService"; +import exportService from "services/export"; +import { getAccountsToken, getUserDetailsV2 } from "services/userService"; import { CollectionSummaries } from "types/collection"; -import DebugSection from "./DebugSection"; -import ExitSection from "./ExitSection"; -import HeaderSection from "./Header"; -import HelpSection from "./HelpSection"; -import ShortcutSection from "./ShortcutSection"; -import UtilitySection from "./UtilitySection"; -import { DrawerSidebar } from "./styledComponents"; -import UserDetailsSection from "./userDetailsSection"; +import { UserDetails } from "types/user"; +import { + hasAddOnBonus, + hasExceededStorageQuota, + hasPaidSubscription, + hasStripeSubscription, + isOnFreePlan, + isSubscriptionActive, + isSubscriptionCancelled, + isSubscriptionPastDue, +} from "utils/billing"; +import { openLink } from "utils/common"; +import { getDownloadAppMessage } from "utils/ui"; +import { isInternalUser } from "utils/user"; +import { isFamilyAdmin, isPartOfFamily } from "utils/user/family"; +import { testUpload } from "../../../tests/upload.test"; +import { MemberSubscriptionManage } from "../MemberSubscriptionManage"; +import Preferences from "./Preferences"; +import SubscriptionCard from "./SubscriptionCard"; interface Iprops { collectionSummaries: CollectionSummaries; @@ -40,3 +120,658 @@ export default function Sidebar({ ); } + +const DrawerSidebar = styled(EnteDrawer)(({ theme }) => ({ + "& .MuiPaper-root": { + padding: theme.spacing(1.5), + }, +})); + +DrawerSidebar.defaultProps = { anchor: "left" }; + +interface HeaderSectionProps { + closeSidebar: () => void; +} + +const HeaderSection: React.FC = ({ closeSidebar }) => { + return ( + + + + + + + ); +}; + +interface UserDetailsSectionProps { + sidebarView: boolean; +} + +const UserDetailsSection: React.FC = ({ + sidebarView, +}) => { + const galleryContext = useContext(GalleryContext); + + const [userDetails, setUserDetails] = useLocalState( + LS_KEYS.USER_DETAILS, + ); + const [memberSubscriptionManageView, setMemberSubscriptionManageView] = + useState(false); + + const openMemberSubscriptionManage = () => + setMemberSubscriptionManageView(true); + const closeMemberSubscriptionManage = () => + setMemberSubscriptionManageView(false); + + useEffect(() => { + if (!sidebarView) { + return; + } + const main = async () => { + const userDetails = await getUserDetailsV2(); + setUserDetails(userDetails); + setData(LS_KEYS.SUBSCRIPTION, userDetails.subscription); + setData(LS_KEYS.FAMILY_DATA, userDetails.familyData); + setData(LS_KEYS.USER, { + ...getData(LS_KEYS.USER), + email: userDetails.email, + }); + }; + main(); + }, [sidebarView]); + + const isMemberSubscription = useMemo( + () => + userDetails && + isPartOfFamily(userDetails.familyData) && + !isFamilyAdmin(userDetails.familyData), + [userDetails], + ); + + const handleSubscriptionCardClick = () => { + if (isMemberSubscription) { + openMemberSubscriptionManage(); + } else { + if ( + hasStripeSubscription(userDetails.subscription) && + isSubscriptionPastDue(userDetails.subscription) + ) { + billingService.redirectToCustomerPortal(); + } else { + galleryContext.showPlanSelectorModal(); + } + } + }; + + return ( + <> + + + {userDetails ? ( + userDetails.email + ) : ( + + )} + + + + + + {isMemberSubscription && ( + + )} + + ); +}; + +interface SubscriptionStatusProps { + userDetails: UserDetails; +} + +const SubscriptionStatus: React.FC = ({ + userDetails, +}) => { + const { showPlanSelectorModal } = useContext(GalleryContext); + + const hasAMessage = useMemo(() => { + if (!userDetails) { + return false; + } + if ( + isPartOfFamily(userDetails.familyData) && + !isFamilyAdmin(userDetails.familyData) + ) { + return false; + } + if ( + hasPaidSubscription(userDetails.subscription) && + !isSubscriptionCancelled(userDetails.subscription) + ) { + return false; + } + return true; + }, [userDetails]); + + const handleClick = useMemo(() => { + const eventHandler: MouseEventHandler = (e) => { + e.stopPropagation(); + if (userDetails) { + if (isSubscriptionActive(userDetails.subscription)) { + if (hasExceededStorageQuota(userDetails)) { + showPlanSelectorModal(); + } + } else { + if ( + hasStripeSubscription(userDetails.subscription) && + isSubscriptionPastDue(userDetails.subscription) + ) { + billingService.redirectToCustomerPortal(); + } else { + showPlanSelectorModal(); + } + } + } + }; + return eventHandler; + }, [userDetails]); + + if (!hasAMessage) { + return <>; + } + + let message: React.ReactNode; + if (!hasAddOnBonus(userDetails.bonusData)) { + if (isSubscriptionActive(userDetails.subscription)) { + if (isOnFreePlan(userDetails.subscription)) { + message = ( + + ); + } else if (isSubscriptionCancelled(userDetails.subscription)) { + message = t("RENEWAL_CANCELLED_SUBSCRIPTION_INFO", { + date: userDetails.subscription?.expiryTime, + }); + } + } else { + message = ( + , + }} + /> + ); + } + } + + if (!message && hasExceededStorageQuota(userDetails)) { + message = ( + , + }} + /> + ); + } + + if (!message) return <>; + + return ( + + + {message} + + + ); +}; + +interface ShortcutSectionProps { + closeSidebar: () => void; + collectionSummaries: CollectionSummaries; +} + +const ShortcutSection: React.FC = ({ + closeSidebar, + collectionSummaries, +}) => { + const galleryContext = useContext(GalleryContext); + const [uncategorizedCollectionId, setUncategorizedCollectionID] = + useState(); + + useEffect(() => { + const main = async () => { + const unCategorizedCollection = await getUncategorizedCollection(); + if (unCategorizedCollection) { + setUncategorizedCollectionID(unCategorizedCollection.id); + } else { + setUncategorizedCollectionID(DUMMY_UNCATEGORIZED_COLLECTION); + } + }; + main(); + }, []); + + const openUncategorizedSection = () => { + galleryContext.setActiveCollectionID(uncategorizedCollectionId); + closeSidebar(); + }; + + const openTrashSection = () => { + galleryContext.setActiveCollectionID(TRASH_SECTION); + closeSidebar(); + }; + + const openArchiveSection = () => { + galleryContext.setActiveCollectionID(ARCHIVE_SECTION); + closeSidebar(); + }; + + const openHiddenSection = () => { + galleryContext.openHiddenSection(() => { + closeSidebar(); + }); + }; + + return ( + <> + } + onClick={openUncategorizedSection} + variant="captioned" + label={t("UNCATEGORIZED")} + subText={collectionSummaries + .get(uncategorizedCollectionId) + ?.fileCount.toString()} + /> + } + onClick={openArchiveSection} + variant="captioned" + label={t("ARCHIVE_SECTION_NAME")} + subText={collectionSummaries + .get(ARCHIVE_SECTION) + ?.fileCount.toString()} + /> + } + onClick={openHiddenSection} + variant="captioned" + label={t("HIDDEN")} + subIcon={} + /> + } + onClick={openTrashSection} + variant="captioned" + label={t("TRASH")} + subText={collectionSummaries + .get(TRASH_SECTION) + ?.fileCount.toString()} + /> + + ); +}; + +interface UtilitySectionProps { + closeSidebar: () => void; +} + +const UtilitySection: React.FC = ({ closeSidebar }) => { + const router = useRouter(); + const appContext = useContext(AppContext); + const { + setDialogMessage, + startLoading, + watchFolderView, + setWatchFolderView, + themeColor, + setThemeColor, + } = appContext; + + const [recoverModalView, setRecoveryModalView] = useState(false); + const [twoFactorModalView, setTwoFactorModalView] = useState(false); + const [preferencesView, setPreferencesView] = useState(false); + + const openPreferencesOptions = () => setPreferencesView(true); + const closePreferencesOptions = () => setPreferencesView(false); + + const openRecoveryKeyModal = () => setRecoveryModalView(true); + const closeRecoveryKeyModal = () => setRecoveryModalView(false); + + const openTwoFactorModal = () => setTwoFactorModalView(true); + const closeTwoFactorModal = () => setTwoFactorModalView(false); + + const openWatchFolder = () => { + if (isElectron()) { + setWatchFolderView(true); + } else { + setDialogMessage(getDownloadAppMessage()); + } + }; + const closeWatchFolder = () => setWatchFolderView(false); + + const redirectToChangePasswordPage = () => { + closeSidebar(); + router.push(PAGES.CHANGE_PASSWORD); + }; + + const redirectToChangeEmailPage = () => { + closeSidebar(); + router.push(PAGES.CHANGE_EMAIL); + }; + + const redirectToAccountsPage = async () => { + closeSidebar(); + + try { + // check if the user has passkey recovery enabled + const recoveryEnabled = await isPasskeyRecoveryEnabled(); + if (!recoveryEnabled) { + // let's create the necessary recovery information + const recoveryKey = await getRecoveryKey(); + + const resetSecret = await generateEncryptionKey(); + + const encryptionResult = await encryptToB64( + resetSecret, + recoveryKey, + ); + + await configurePasskeyRecovery( + resetSecret, + encryptionResult.encryptedData, + encryptionResult.nonce, + ); + } + + const accountsToken = await getAccountsToken(); + + window.open( + `${getAccountsURL()}${ + ACCOUNTS_PAGES.ACCOUNT_HANDOFF + }?package=${CLIENT_PACKAGE_NAMES.get( + APPS.PHOTOS, + )}&token=${accountsToken}`, + ); + } catch (e) { + log.error("failed to redirect to accounts page", e); + } + }; + + const redirectToDeduplicatePage = () => router.push(PAGES.DEDUPLICATE); + + const somethingWentWrong = () => + setDialogMessage({ + title: t("ERROR"), + content: t("RECOVER_KEY_GENERATION_FAILED"), + close: { variant: "critical" }, + }); + + const toggleTheme = () => { + setThemeColor((themeColor) => + themeColor === THEME_COLOR.DARK + ? THEME_COLOR.LIGHT + : THEME_COLOR.DARK, + ); + }; + + return ( + <> + {isElectron() && ( + + )} + + {isInternalUser() && ( + + } + /> + )} + + + {isInternalUser() && ( + + )} + + + + + + + + + + + {isElectron() && ( + + )} + + + ); +}; + +const HelpSection: React.FC = () => { + const { setDialogMessage } = useContext(AppContext); + const { openExportModal } = useContext(GalleryContext); + + const openRoadmap = () => + openLink("https://github.com/ente-io/ente/discussions", true); + + const contactSupport = () => openLink("mailto:support@ente.io", true); + + function openExport() { + if (isElectron()) { + openExportModal(); + } else { + setDialogMessage(getDownloadAppMessage()); + } + } + + return ( + <> + + + + {t("SUPPORT")} + + + } + variant="secondary" + /> + + ) + } + variant="secondary" + /> + + ); +}; + +const ExitSection: React.FC = () => { + const { setDialogMessage, logout } = useContext(AppContext); + + const [deleteAccountModalView, setDeleteAccountModalView] = useState(false); + + const closeDeleteAccountModal = () => setDeleteAccountModalView(false); + const openDeleteAccountModal = () => setDeleteAccountModalView(true); + + const confirmLogout = () => { + setDialogMessage({ + title: t("LOGOUT_MESSAGE"), + proceed: { + text: t("LOGOUT"), + action: logout, + variant: "critical", + }, + close: { text: t("CANCEL") }, + }); + }; + + return ( + <> + + + + + ); +}; + +const DebugSection: React.FC = () => { + const appContext = useContext(AppContext); + const [appVersion, setAppVersion] = useState(); + + const electron = globalThis.electron; + + useEffect(() => { + electron?.appVersion().then((v) => setAppVersion(v)); + }); + + const confirmLogDownload = () => + appContext.setDialogMessage({ + title: t("DOWNLOAD_LOGS"), + content: , + proceed: { + text: t("DOWNLOAD"), + variant: "accent", + action: downloadLogs, + }, + close: { + text: t("CANCEL"), + }, + }); + + const downloadLogs = () => { + log.info("Downloading logs"); + if (electron) electron.openLogDirectory(); + else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs()); + }; + + return ( + <> + + {appVersion && ( + + {appVersion} + + )} + {isInternalUser() && ( + + )} + + ); +}; diff --git a/web/apps/photos/src/components/Sidebar/styledComponents.tsx b/web/apps/photos/src/components/Sidebar/styledComponents.tsx deleted file mode 100644 index d2b2f6b2b9..0000000000 --- a/web/apps/photos/src/components/Sidebar/styledComponents.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import CircleIcon from "@mui/icons-material/Circle"; -import { styled } from "@mui/material"; -import { EnteDrawer } from "components/EnteDrawer"; - -export const DrawerSidebar = styled(EnteDrawer)(({ theme }) => ({ - "& .MuiPaper-root": { - padding: theme.spacing(1.5), - }, -})); - -DrawerSidebar.defaultProps = { anchor: "left" }; - -export const DotSeparator = styled(CircleIcon)` - font-size: 4px; - margin: 0 ${({ theme }) => theme.spacing(1)}; - color: inherit; -`; diff --git a/web/apps/photos/src/components/Sidebar/userDetailsSection.tsx b/web/apps/photos/src/components/Sidebar/userDetailsSection.tsx deleted file mode 100644 index 4d1bf3cb15..0000000000 --- a/web/apps/photos/src/components/Sidebar/userDetailsSection.tsx +++ /dev/null @@ -1,96 +0,0 @@ -import { useLocalState } from "@ente/shared/hooks/useLocalState"; -import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; -import { Box, Skeleton } from "@mui/material"; -import Typography from "@mui/material/Typography"; -import { GalleryContext } from "pages/gallery"; -import { useContext, useEffect, useMemo, useState } from "react"; -import billingService from "services/billingService"; -import { getUserDetailsV2 } from "services/userService"; -import { UserDetails } from "types/user"; -import { hasStripeSubscription, isSubscriptionPastDue } from "utils/billing"; -import { isFamilyAdmin, isPartOfFamily } from "utils/user/family"; -import { MemberSubscriptionManage } from "../MemberSubscriptionManage"; -import SubscriptionCard from "./SubscriptionCard"; -import SubscriptionStatus from "./SubscriptionStatus"; - -export default function UserDetailsSection({ sidebarView }) { - const galleryContext = useContext(GalleryContext); - - const [userDetails, setUserDetails] = useLocalState( - LS_KEYS.USER_DETAILS, - ); - const [memberSubscriptionManageView, setMemberSubscriptionManageView] = - useState(false); - - const openMemberSubscriptionManage = () => - setMemberSubscriptionManageView(true); - const closeMemberSubscriptionManage = () => - setMemberSubscriptionManageView(false); - - useEffect(() => { - if (!sidebarView) { - return; - } - const main = async () => { - const userDetails = await getUserDetailsV2(); - setUserDetails(userDetails); - setData(LS_KEYS.SUBSCRIPTION, userDetails.subscription); - setData(LS_KEYS.FAMILY_DATA, userDetails.familyData); - setData(LS_KEYS.USER, { - ...getData(LS_KEYS.USER), - email: userDetails.email, - }); - }; - main(); - }, [sidebarView]); - - const isMemberSubscription = useMemo( - () => - userDetails && - isPartOfFamily(userDetails.familyData) && - !isFamilyAdmin(userDetails.familyData), - [userDetails], - ); - - const handleSubscriptionCardClick = () => { - if (isMemberSubscription) { - openMemberSubscriptionManage(); - } else { - if ( - hasStripeSubscription(userDetails.subscription) && - isSubscriptionPastDue(userDetails.subscription) - ) { - billingService.redirectToCustomerPortal(); - } else { - galleryContext.showPlanSelectorModal(); - } - } - }; - - return ( - <> - - - {userDetails ? ( - userDetails.email - ) : ( - - )} - - - - - - {isMemberSubscription && ( - - )} - - ); -} diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index 7174306556..bea54c645b 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,6 +1,8 @@ import { basename } from "@/next/file"; import log from "@/next/log"; import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc"; +import { firstNonEmpty } from "@/utils/array"; +import { ensure } from "@/utils/ensure"; import { CustomError } from "@ente/shared/error"; import { isPromise } from "@ente/shared/utils"; import DiscFullIcon from "@mui/icons-material/DiscFull"; @@ -324,17 +326,17 @@ export default function Uploader({ // Trigger an upload when any of the dependencies change. useEffect(() => { - // Re the paths: + // About the paths: // // - These are not necessarily the full paths. In particular, when // running on the browser they'll be the relative paths (at best) or // just the file-name otherwise. // // - All the paths use POSIX separators. See inline comments. + // const allItemAndPaths = [ - // See: [Note: webkitRelativePath]. In particular, they use POSIX - // separators. - webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]), + // Relative path (using POSIX separators) or the file's name. + webFiles.map((f) => [f, pathLikeForWebFile(f)]), // The paths we get from the desktop app all eventually come either // from electron.selectDirectory or electron.pathForFile, both of // which return POSIX paths. @@ -822,6 +824,37 @@ const desktopFilesAndZipItems = async (electron: Electron, files: File[]) => { return { fileAndPaths, zipItems }; }; +/** + * Return the relative path or name of a File object selected or + * drag-and-dropped on the web. + * + * There are three cases here: + * + * 1. If the user selects individual file(s), then the returned File objects + * will only have a `name`. + * + * 2. If the user selects directory(ies), then the returned File objects will + * have a `webkitRelativePath`. For more details, see [Note: + * webkitRelativePath]. In particular, these will POSIX separators. + * + * 3. If the user drags-and-drops, then the react-dropzone library that we use + * will internally convert `webkitRelativePath` to `path`, but otherwise it + * behaves same as case 2. + * https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L1214 + */ +const pathLikeForWebFile = (file: File): string => + ensure( + firstNonEmpty([ + // We need to check first, since path is not a property of + // the standard File objects. + "path" in file && typeof file.path == "string" + ? file.path + : undefined, + file.webkitRelativePath, + file.name, + ]), + ); + // This is used to prompt the user the make upload strategy choice interface ImportSuggestion { rootFolderName: string; diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx index 13e33fc6d3..e22e2f541a 100644 --- a/web/apps/photos/src/components/UploadSelectorInputs.tsx +++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx @@ -1,9 +1,24 @@ -export default function UploadSelectorInputs({ +type GetInputProps = () => React.HTMLAttributes; + +interface UploadSelectorInputsProps { + getDragAndDropInputProps: GetInputProps; + getFileSelectorInputProps: GetInputProps; + getFolderSelectorInputProps: GetInputProps; + getZipFileSelectorInputProps?: GetInputProps; +} + +/** + * Create a bunch of HTML inputs elements, one each for the given props. + * + * These hidden input element serve as the way for us to show various file / + * folder Selector dialogs and handle drag and drop inputs. + */ +export const UploadSelectorInputs: React.FC = ({ getDragAndDropInputProps, getFileSelectorInputProps, getFolderSelectorInputProps, getZipFileSelectorInputProps, -}) { +}) => { return ( <> @@ -14,4 +29,4 @@ export default function UploadSelectorInputs({ )} ); -} +}; diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx index 710a541683..4d2144e0ce 100644 --- a/web/apps/photos/src/components/WatchFolder.tsx +++ b/web/apps/photos/src/components/WatchFolder.tsx @@ -25,8 +25,8 @@ import { Stack, Tooltip, Typography, + styled, } from "@mui/material"; -import { styled } from "@mui/material/styles"; import { CollectionMappingChoiceModal } from "components/Upload/CollectionMappingChoiceModal"; import { t } from "i18next"; import { AppContext } from "pages/_app"; diff --git a/web/apps/photos/src/components/ml/MLSearchSettings.tsx b/web/apps/photos/src/components/ml/MLSearchSettings.tsx index 409df4fc6f..d71dffab7e 100644 --- a/web/apps/photos/src/components/ml/MLSearchSettings.tsx +++ b/web/apps/photos/src/components/ml/MLSearchSettings.tsx @@ -270,14 +270,7 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) { {" "} {/* */} -

- We're putting finishing touches, coming back soon! -

-

- - Existing indexed faces will continue to show. - -

+ We're putting finishing touches, coming back soon!
{isInternalUserForML() && ( diff --git a/web/apps/photos/src/components/ml/PeopleList.tsx b/web/apps/photos/src/components/ml/PeopleList.tsx index 4691d4b650..da003d97d5 100644 --- a/web/apps/photos/src/components/ml/PeopleList.tsx +++ b/web/apps/photos/src/components/ml/PeopleList.tsx @@ -1,11 +1,12 @@ +import { blobCache } from "@/next/blob-cache"; import log from "@/next/log"; import { Skeleton, styled } from "@mui/material"; import { Legend } from "components/PhotoViewer/styledComponents/Legend"; import { t } from "i18next"; import React, { useEffect, useState } from "react"; +import mlIDbStorage from "services/face/db"; +import type { Person } from "services/face/people"; import { EnteFile } from "types/file"; -import { Face, Person } from "types/machineLearning"; -import { getPeopleList, getUnidentifiedFaces } from "utils/machineLearning"; const FaceChipContainer = styled("div")` display: flex; @@ -57,10 +58,7 @@ export const PeopleList = React.memo((props: PeopleListProps) => { props.onSelect && props.onSelect(person, index) } > - + ))} @@ -108,7 +106,7 @@ export function UnidentifiedFaces(props: { file: EnteFile; updateMLDataIndex: number; }) { - const [faces, setFaces] = useState>([]); + const [faces, setFaces] = useState<{ id: string }[]>([]); useEffect(() => { let didCancel = false; @@ -136,10 +134,7 @@ export function UnidentifiedFaces(props: { {faces && faces.map((face, index) => ( - + ))} @@ -149,32 +144,22 @@ export function UnidentifiedFaces(props: { interface FaceCropImageViewProps { faceID: string; - cacheKey?: string; } -const FaceCropImageView: React.FC = ({ - faceID, - cacheKey, -}) => { +const FaceCropImageView: React.FC = ({ faceID }) => { const [objectURL, setObjectURL] = useState(); useEffect(() => { let didCancel = false; - const electron = globalThis.electron; - - if (faceID && electron) { - electron - .legacyFaceCrop(faceID) - /* - cachedOrNew("face-crops", cacheKey, async () => { - const user = await ensureLocalUser(); - return machineLearningService.regenerateFaceCrop( - user.token, - user.id, - faceId, - ); - })*/ + if (faceID) { + blobCache("face-crops") + .then((cache) => cache.get(faceID)) .then((data) => { + /* + TODO(MR): regen if needed and get this to work on web too. + cachedOrNew("face-crops", cacheKey, async () => { + return regenerateFaceCrop(faceId); + })*/ if (data) { const blob = new Blob([data]); if (!didCancel) setObjectURL(URL.createObjectURL(blob)); @@ -186,7 +171,7 @@ const FaceCropImageView: React.FC = ({ didCancel = true; if (objectURL) URL.revokeObjectURL(objectURL); }; - }, [faceID, cacheKey]); + }, [faceID]); return objectURL ? ( @@ -194,3 +179,45 @@ const FaceCropImageView: React.FC = ({ ); }; + +async function getPeopleList(file: EnteFile): Promise { + let startTime = Date.now(); + const mlFileData = await mlIDbStorage.getFile(file.id); + log.info( + "getPeopleList:mlFilesStore:getItem", + Date.now() - startTime, + "ms", + ); + if (!mlFileData?.faces || mlFileData.faces.length < 1) { + return []; + } + + const peopleIds = mlFileData.faces + .filter((f) => f.personId !== null && f.personId !== undefined) + .map((f) => f.personId); + if (!peopleIds || peopleIds.length < 1) { + return []; + } + // log.info("peopleIds: ", peopleIds); + startTime = Date.now(); + const peoplePromises = peopleIds.map( + (p) => mlIDbStorage.getPerson(p) as Promise, + ); + const peopleList = await Promise.all(peoplePromises); + log.info( + "getPeopleList:mlPeopleStore:getItems", + Date.now() - startTime, + "ms", + ); + // log.info("peopleList: ", peopleList); + + return peopleList; +} + +async function getUnidentifiedFaces(file: EnteFile): Promise<{ id: string }[]> { + const mlFileData = await mlIDbStorage.getFile(file.id); + + return mlFileData?.faces?.filter( + (f) => f.personId === null || f.personId === undefined, + ); +} diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx new file mode 100644 index 0000000000..6fe86769e1 --- /dev/null +++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx @@ -0,0 +1,356 @@ +import log from "@/next/log"; +import { SpaceBetweenFlex } from "@ente/shared/components/Container"; +import { SUPPORT_EMAIL } from "@ente/shared/constants/urls"; +import Close from "@mui/icons-material/Close"; +import { IconButton, Link, Stack } from "@mui/material"; +import Box from "@mui/material/Box"; +import Typography from "@mui/material/Typography"; +import { PLAN_PERIOD } from "constants/gallery"; +import { t } from "i18next"; +import { AppContext } from "pages/_app"; +import { GalleryContext } from "pages/gallery"; +import { useContext, useEffect, useMemo, useState } from "react"; +import { Trans } from "react-i18next"; +import billingService, { type PlansResponse } from "services/billingService"; +import { Plan } from "types/billing"; +import { SetLoading } from "types/gallery"; +import { + getLocalUserSubscription, + hasAddOnBonus, + hasMobileSubscription, + hasPaidSubscription, + hasStripeSubscription, + isOnFreePlan, + isSubscriptionActive, + isSubscriptionCancelled, + isUserSubscribedPlan, + planForSubscription, + updateSubscription, +} from "utils/billing"; +import { bytesInGB } from "utils/units"; +import { getLocalUserDetails } from "utils/user"; +import { getTotalFamilyUsage, isPartOfFamily } from "utils/user/family"; +import { ManageSubscription } from "./manageSubscription"; +import { PeriodToggler } from "./periodToggler"; +import Plans from "./plans"; +import { BFAddOnRow } from "./plans/BfAddOnRow"; + +interface Props { + closeModal: any; + setLoading: SetLoading; +} + +function PlanSelectorCard(props: Props) { + const subscription = useMemo(() => getLocalUserSubscription(), []); + const [plansResponse, setPlansResponse] = useState< + PlansResponse | undefined + >(); + + const [planPeriod, setPlanPeriod] = useState( + subscription?.period || PLAN_PERIOD.MONTH, + ); + const galleryContext = useContext(GalleryContext); + const appContext = useContext(AppContext); + const bonusData = useMemo(() => { + const userDetails = getLocalUserDetails(); + if (!userDetails) { + return null; + } + return userDetails.bonusData; + }, []); + + const usage = useMemo(() => { + const userDetails = getLocalUserDetails(); + if (!userDetails) { + return 0; + } + return isPartOfFamily(userDetails.familyData) + ? getTotalFamilyUsage(userDetails.familyData) + : userDetails.usage; + }, []); + + const togglePeriod = () => { + setPlanPeriod((prevPeriod) => + prevPeriod === PLAN_PERIOD.MONTH + ? PLAN_PERIOD.YEAR + : PLAN_PERIOD.MONTH, + ); + }; + function onReopenClick() { + appContext.closeMessageDialog(); + galleryContext.showPlanSelectorModal(); + } + useEffect(() => { + const main = async () => { + try { + props.setLoading(true); + const response = await billingService.getPlans(); + const { plans } = response; + if (isSubscriptionActive(subscription)) { + const planNotListed = + plans.filter((plan) => + isUserSubscribedPlan(plan, subscription), + ).length === 0; + if ( + subscription && + !isOnFreePlan(subscription) && + planNotListed + ) { + plans.push(planForSubscription(subscription)); + } + } + setPlansResponse(response); + } catch (e) { + log.error("plan selector modal open failed", e); + props.closeModal(); + appContext.setDialogMessage({ + title: t("OPEN_PLAN_SELECTOR_MODAL_FAILED"), + content: t("UNKNOWN_ERROR"), + close: { text: t("CLOSE"), variant: "secondary" }, + proceed: { + text: t("REOPEN_PLAN_SELECTOR_MODAL"), + variant: "accent", + action: onReopenClick, + }, + }); + } finally { + props.setLoading(false); + } + }; + main(); + }, []); + + async function onPlanSelect(plan: Plan) { + if ( + !hasPaidSubscription(subscription) || + isSubscriptionCancelled(subscription) + ) { + try { + props.setLoading(true); + await billingService.buySubscription(plan.stripeID); + } catch (e) { + props.setLoading(false); + appContext.setDialogMessage({ + title: t("ERROR"), + content: t("SUBSCRIPTION_PURCHASE_FAILED"), + close: { variant: "critical" }, + }); + } + } else if (hasStripeSubscription(subscription)) { + appContext.setDialogMessage({ + title: t("update_subscription_title"), + content: t("UPDATE_SUBSCRIPTION_MESSAGE"), + proceed: { + text: t("UPDATE_SUBSCRIPTION"), + action: updateSubscription.bind( + null, + plan, + appContext.setDialogMessage, + props.setLoading, + props.closeModal, + ), + variant: "accent", + }, + close: { text: t("CANCEL") }, + }); + } else if (hasMobileSubscription(subscription)) { + appContext.setDialogMessage({ + title: t("CANCEL_SUBSCRIPTION_ON_MOBILE"), + content: t("CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE"), + close: { variant: "secondary" }, + }); + } else { + appContext.setDialogMessage({ + title: t("MANAGE_PLAN"), + content: ( + , + }} + values={{ emailID: SUPPORT_EMAIL }} + /> + ), + close: { variant: "secondary" }, + }); + } + } + + const { closeModal, setLoading } = props; + + const commonCardData = { + subscription, + bonusData, + closeModal, + planPeriod, + togglePeriod, + setLoading, + }; + + const plansList = ( + + ); + + return ( + <> + + {hasPaidSubscription(subscription) ? ( + + {plansList} + + ) : ( + + {plansList} + + )} + + + ); +} + +export default PlanSelectorCard; + +function FreeSubscriptionPlanSelectorCard({ + children, + subscription, + bonusData, + closeModal, + setLoading, + planPeriod, + togglePeriod, +}) { + return ( + <> + + {t("CHOOSE_PLAN")} + + + + + + + + {t("TWO_MONTHS_FREE")} + + + {children} + {hasAddOnBonus(bonusData) && ( + + )} + {hasAddOnBonus(bonusData) && ( + + )} + + + + ); +} + +function PaidSubscriptionPlanSelectorCard({ + children, + subscription, + bonusData, + closeModal, + usage, + planPeriod, + togglePeriod, + setLoading, +}) { + return ( + <> + + + + + {t("SUBSCRIPTION")} + + + {bytesInGB(subscription.storage, 2)}{" "} + {t("storage_unit.gb")} + + + + + + + + + + + + + + + + `1px solid ${theme.palette.divider}`} + p={1.5} + borderRadius={(theme) => `${theme.shape.borderRadius}px`} + > + + + + {t("TWO_MONTHS_FREE")} + + + {children} + + + + + {!isSubscriptionCancelled(subscription) + ? t("RENEWAL_ACTIVE_SUBSCRIPTION_STATUS", { + date: subscription.expiryTime, + }) + : t("RENEWAL_CANCELLED_SUBSCRIPTION_STATUS", { + date: subscription.expiryTime, + })} + + {hasAddOnBonus(bonusData) && ( + + )} + + + + + + ); +} diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx deleted file mode 100644 index a2ac1090b7..0000000000 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx +++ /dev/null @@ -1,64 +0,0 @@ -import { Stack } from "@mui/material"; -import Box from "@mui/material/Box"; -import Typography from "@mui/material/Typography"; -import { t } from "i18next"; -import { hasAddOnBonus } from "utils/billing"; -import { ManageSubscription } from "../manageSubscription"; -import { PeriodToggler } from "../periodToggler"; -import Plans from "../plans"; -import { BFAddOnRow } from "../plans/BfAddOnRow"; - -export default function FreeSubscriptionPlanSelectorCard({ - plans, - subscription, - bonusData, - closeModal, - setLoading, - planPeriod, - togglePeriod, - onPlanSelect, -}) { - return ( - <> - - {t("CHOOSE_PLAN")} - - - - - - - - {t("TWO_MONTHS_FREE")} - - - - {hasAddOnBonus(bonusData) && ( - - )} - {hasAddOnBonus(bonusData) && ( - - )} - - - - ); -} diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx deleted file mode 100644 index 2ef3c361fd..0000000000 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx +++ /dev/null @@ -1,202 +0,0 @@ -import log from "@/next/log"; -import { SUPPORT_EMAIL } from "@ente/shared/constants/urls"; -import { useLocalState } from "@ente/shared/hooks/useLocalState"; -import { LS_KEYS } from "@ente/shared/storage/localStorage"; -import { Link, Stack } from "@mui/material"; -import { PLAN_PERIOD } from "constants/gallery"; -import { t } from "i18next"; -import { AppContext } from "pages/_app"; -import { GalleryContext } from "pages/gallery"; -import { useContext, useEffect, useMemo, useState } from "react"; -import { Trans } from "react-i18next"; -import billingService from "services/billingService"; -import { Plan } from "types/billing"; -import { SetLoading } from "types/gallery"; -import { - getLocalUserSubscription, - hasMobileSubscription, - hasPaidSubscription, - hasStripeSubscription, - isOnFreePlan, - isSubscriptionActive, - isSubscriptionCancelled, - isUserSubscribedPlan, - planForSubscription, - updateSubscription, -} from "utils/billing"; -import { getLocalUserDetails } from "utils/user"; -import { getTotalFamilyUsage, isPartOfFamily } from "utils/user/family"; -import FreeSubscriptionPlanSelectorCard from "./free"; -import PaidSubscriptionPlanSelectorCard from "./paid"; - -interface Props { - closeModal: any; - setLoading: SetLoading; -} - -function PlanSelectorCard(props: Props) { - const subscription = useMemo(() => getLocalUserSubscription(), []); - const [plans, setPlans] = useLocalState(LS_KEYS.PLANS); - - const [planPeriod, setPlanPeriod] = useState( - subscription?.period || PLAN_PERIOD.MONTH, - ); - const galleryContext = useContext(GalleryContext); - const appContext = useContext(AppContext); - const bonusData = useMemo(() => { - const userDetails = getLocalUserDetails(); - if (!userDetails) { - return null; - } - return userDetails.bonusData; - }, []); - - const usage = useMemo(() => { - const userDetails = getLocalUserDetails(); - if (!userDetails) { - return 0; - } - return isPartOfFamily(userDetails.familyData) - ? getTotalFamilyUsage(userDetails.familyData) - : userDetails.usage; - }, []); - - const togglePeriod = () => { - setPlanPeriod((prevPeriod) => - prevPeriod === PLAN_PERIOD.MONTH - ? PLAN_PERIOD.YEAR - : PLAN_PERIOD.MONTH, - ); - }; - function onReopenClick() { - appContext.closeMessageDialog(); - galleryContext.showPlanSelectorModal(); - } - useEffect(() => { - const main = async () => { - try { - props.setLoading(true); - const plans = await billingService.getPlans(); - if (isSubscriptionActive(subscription)) { - const planNotListed = - plans.filter((plan) => - isUserSubscribedPlan(plan, subscription), - ).length === 0; - if ( - subscription && - !isOnFreePlan(subscription) && - planNotListed - ) { - plans.push(planForSubscription(subscription)); - } - } - setPlans(plans); - } catch (e) { - log.error("plan selector modal open failed", e); - props.closeModal(); - appContext.setDialogMessage({ - title: t("OPEN_PLAN_SELECTOR_MODAL_FAILED"), - content: t("UNKNOWN_ERROR"), - close: { text: t("CLOSE"), variant: "secondary" }, - proceed: { - text: t("REOPEN_PLAN_SELECTOR_MODAL"), - variant: "accent", - action: onReopenClick, - }, - }); - } finally { - props.setLoading(false); - } - }; - main(); - }, []); - - async function onPlanSelect(plan: Plan) { - if ( - !hasPaidSubscription(subscription) || - isSubscriptionCancelled(subscription) - ) { - try { - props.setLoading(true); - await billingService.buySubscription(plan.stripeID); - } catch (e) { - props.setLoading(false); - appContext.setDialogMessage({ - title: t("ERROR"), - content: t("SUBSCRIPTION_PURCHASE_FAILED"), - close: { variant: "critical" }, - }); - } - } else if (hasStripeSubscription(subscription)) { - appContext.setDialogMessage({ - title: t("update_subscription_title"), - content: t("UPDATE_SUBSCRIPTION_MESSAGE"), - proceed: { - text: t("UPDATE_SUBSCRIPTION"), - action: updateSubscription.bind( - null, - plan, - appContext.setDialogMessage, - props.setLoading, - props.closeModal, - ), - variant: "accent", - }, - close: { text: t("CANCEL") }, - }); - } else if (hasMobileSubscription(subscription)) { - appContext.setDialogMessage({ - title: t("CANCEL_SUBSCRIPTION_ON_MOBILE"), - content: t("CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE"), - close: { variant: "secondary" }, - }); - } else { - appContext.setDialogMessage({ - title: t("MANAGE_PLAN"), - content: ( - , - }} - values={{ emailID: SUPPORT_EMAIL }} - /> - ), - close: { variant: "secondary" }, - }); - } - } - - return ( - <> - - {hasPaidSubscription(subscription) ? ( - - ) : ( - - )} - - - ); -} - -export default PlanSelectorCard; diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx deleted file mode 100644 index ba318330ec..0000000000 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx +++ /dev/null @@ -1,109 +0,0 @@ -import { SpaceBetweenFlex } from "@ente/shared/components/Container"; -import Close from "@mui/icons-material/Close"; -import { IconButton, Stack } from "@mui/material"; -import Box from "@mui/material/Box"; -import Typography from "@mui/material/Typography"; -import { t } from "i18next"; -import { Trans } from "react-i18next"; -import { hasAddOnBonus, isSubscriptionCancelled } from "utils/billing"; -import { bytesInGB } from "utils/units"; -import { ManageSubscription } from "../manageSubscription"; -import { PeriodToggler } from "../periodToggler"; -import Plans from "../plans"; -import { BFAddOnRow } from "../plans/BfAddOnRow"; - -export default function PaidSubscriptionPlanSelectorCard({ - plans, - subscription, - bonusData, - closeModal, - usage, - planPeriod, - togglePeriod, - onPlanSelect, - setLoading, -}) { - return ( - <> - - - - - {t("SUBSCRIPTION")} - - - {bytesInGB(subscription.storage, 2)}{" "} - {t("storage_unit.gb")} - - - - - - - - - - - - - - - - `1px solid ${theme.palette.divider}`} - p={1.5} - borderRadius={(theme) => `${theme.shape.borderRadius}px`} - > - - - - {t("TWO_MONTHS_FREE")} - - - - - - - - {!isSubscriptionCancelled(subscription) - ? t("RENEWAL_ACTIVE_SUBSCRIPTION_STATUS", { - date: subscription.expiryTime, - }) - : t("RENEWAL_CANCELLED_SUBSCRIPTION_STATUS", { - date: subscription.expiryTime, - })} - - {hasAddOnBonus(bonusData) && ( - - )} - - - - - - ); -} diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/FreePlanRow.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/FreePlanRow.tsx deleted file mode 100644 index f3651e12d4..0000000000 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/FreePlanRow.tsx +++ /dev/null @@ -1,28 +0,0 @@ -import { SpaceBetweenFlex } from "@ente/shared/components/Container"; -import ArrowForward from "@mui/icons-material/ArrowForward"; -import { Box, IconButton, styled, Typography } from "@mui/material"; -import { t } from "i18next"; - -const RowContainer = styled(SpaceBetweenFlex)(({ theme }) => ({ - gap: theme.spacing(1.5), - padding: theme.spacing(1.5, 1), - cursor: "pointer", - "&:hover .endIcon": { - backgroundColor: "rgba(255,255,255,0.08)", - }, -})); -export function FreePlanRow({ closeModal }) { - return ( - - - {t("FREE_PLAN_OPTION_LABEL")} - - {t("FREE_PLAN_DESCRIPTION")} - - - - - - - ); -} diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx index ed1a666edb..31e97c68e6 100644 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx +++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx @@ -1,5 +1,9 @@ -import { Stack } from "@mui/material"; +import { SpaceBetweenFlex } from "@ente/shared/components/Container"; +import ArrowForward from "@mui/icons-material/ArrowForward"; +import { Box, IconButton, Stack, Typography, styled } from "@mui/material"; import { PLAN_PERIOD } from "constants/gallery"; +import { t } from "i18next"; +import type { PlansResponse } from "services/billingService"; import { Plan, Subscription } from "types/billing"; import { BonusData } from "types/user"; import { @@ -8,11 +12,11 @@ import { isPopularPlan, isUserSubscribedPlan, } from "utils/billing"; -import { FreePlanRow } from "./FreePlanRow"; +import { formattedStorageByteSize } from "utils/units"; import { PlanRow } from "./planRow"; interface Iprops { - plans: Plan[]; + plansResponse: PlansResponse | undefined; planPeriod: PLAN_PERIOD; subscription: Subscription; bonusData?: BonusData; @@ -21,30 +25,70 @@ interface Iprops { } const Plans = ({ - plans, + plansResponse, planPeriod, subscription, bonusData, onPlanSelect, closeModal, -}: Iprops) => ( - - {plans - ?.filter((plan) => plan.period === planPeriod) - ?.map((plan) => ( - - ))} - {!hasPaidSubscription(subscription) && !hasAddOnBonus(bonusData) && ( - - )} - -); +}: Iprops) => { + const { freePlan, plans } = plansResponse ?? {}; + return ( + + {plans + ?.filter((plan) => plan.period === planPeriod) + ?.map((plan) => ( + + ))} + {!hasPaidSubscription(subscription) && + !hasAddOnBonus(bonusData) && + freePlan && ( + + )} + + ); +}; export default Plans; + +interface FreePlanRowProps { + storage: number; + closeModal: () => void; +} + +const FreePlanRow: React.FC = ({ closeModal, storage }) => { + return ( + + + {t("FREE_PLAN_OPTION_LABEL")} + + {t("free_plan_description", { + storage: formattedStorageByteSize(storage), + })} + + + + + + + ); +}; + +const FreePlanRow_ = styled(SpaceBetweenFlex)(({ theme }) => ({ + gap: theme.spacing(1.5), + padding: theme.spacing(1.5, 1), + cursor: "pointer", + "&:hover .endIcon": { + backgroundColor: "rgba(255,255,255,0.08)", + }, +})); diff --git a/web/apps/photos/src/constants/mlConfig.ts b/web/apps/photos/src/constants/mlConfig.ts deleted file mode 100644 index 929594e1c1..0000000000 --- a/web/apps/photos/src/constants/mlConfig.ts +++ /dev/null @@ -1,56 +0,0 @@ -import { JobConfig } from "types/common/job"; -import { MLSearchConfig, MLSyncConfig } from "types/machineLearning"; - -export const DEFAULT_ML_SYNC_JOB_CONFIG: JobConfig = { - intervalSec: 5, - // TODO: finalize this after seeing effects on and from machine sleep - maxItervalSec: 960, - backoffMultiplier: 2, -}; - -export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = { - batchSize: 200, - imageSource: "Original", - faceDetection: { - method: "YoloFace", - }, - faceCrop: { - enabled: true, - method: "ArcFace", - padding: 0.25, - maxSize: 256, - blobOptions: { - type: "image/jpeg", - quality: 0.8, - }, - }, - faceAlignment: { - method: "ArcFace", - }, - blurDetection: { - method: "Laplacian", - threshold: 15, - }, - faceEmbedding: { - method: "MobileFaceNet", - faceSize: 112, - generateTsne: true, - }, - faceClustering: { - method: "Hdbscan", - minClusterSize: 3, - minSamples: 5, - clusterSelectionEpsilon: 0.6, - clusterSelectionMethod: "leaf", - minInputSize: 50, - // maxDistanceInsideCluster: 0.4, - generateDebugInfo: true, - }, - mlVersion: 3, -}; - -export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = { - enabled: false, -}; - -export const MAX_ML_SYNC_ERROR_COUNT = 1; diff --git a/web/apps/photos/src/pages/404.tsx b/web/apps/photos/src/pages/404.tsx index 6cca72b77e..dcd621c703 100644 --- a/web/apps/photos/src/pages/404.tsx +++ b/web/apps/photos/src/pages/404.tsx @@ -1,9 +1,3 @@ -import { APPS } from "@ente/shared/apps/constants"; -import NotFoundPage from "@ente/shared/next/pages/404"; -import { AppContext } from "pages/_app"; -import { useContext } from "react"; +import Page from "@ente/shared/next/pages/404"; -export default function NotFound() { - const appContext = useContext(AppContext); - return ; -} +export default Page; diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx index 0e80d0df9f..7d82f7cc37 100644 --- a/web/apps/photos/src/pages/_app.tsx +++ b/web/apps/photos/src/pages/_app.tsx @@ -26,7 +26,6 @@ import EnteSpinner from "@ente/shared/components/EnteSpinner"; import { MessageContainer } from "@ente/shared/components/MessageContainer"; import AppNavbar from "@ente/shared/components/Navbar/app"; import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; -import { Events, eventBus } from "@ente/shared/events"; import { useLocalState } from "@ente/shared/hooks/useLocalState"; import HTTPService from "@ente/shared/network/HTTPService"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; @@ -52,7 +51,12 @@ import "photoswipe/dist/photoswipe.css"; import { createContext, useEffect, useRef, useState } from "react"; import LoadingBar from "react-top-loading-bar"; import DownloadManager from "services/download"; -import exportService, { resumeExportsIfNeeded } from "services/export"; +import { resumeExportsIfNeeded } from "services/export"; +import { photosLogout } from "services/logout"; +import { + getMLSearchConfig, + updateMLSearchConfig, +} from "services/machineLearning/machineLearningService"; import mlWorkManager from "services/machineLearning/mlWorkManager"; import { getFamilyPortalRedirectURL, @@ -64,10 +68,6 @@ import { NotificationAttributes, SetNotificationAttributes, } from "types/Notification"; -import { - getMLSearchConfig, - updateMLSearchConfig, -} from "utils/machineLearning/config"; import { getUpdateAvailableForDownloadMessage, getUpdateReadyToInstallMessage, @@ -100,6 +100,7 @@ type AppContextType = { setDialogBoxAttributesV2: SetDialogBoxAttributesV2; isCFProxyDisabled: boolean; setIsCFProxyDisabled: (disabled: boolean) => void; + logout: () => void; }; export const AppContext = createContext(null); @@ -188,14 +189,6 @@ export default function App({ Component, pageProps }: AppProps) { } }; loadMlSearchState(); - try { - eventBus.on(Events.LOGOUT, () => { - setMlSearchEnabled(false); - mlWorkManager.setMlSearchEnabled(false); - }); - } catch (e) { - log.error("Error while subscribing to logout event", e); - } }, []); useEffect(() => { @@ -213,13 +206,6 @@ export default function App({ Component, pageProps }: AppProps) { await resumeExportsIfNeeded(); }; initExport(); - try { - eventBus.on(Events.LOGOUT, () => { - exportService.disableContinuousExport(); - }); - } catch (e) { - log.error("Error while subscribing to logout event", e); - } }, []); const setUserOnline = () => setOffline(false); @@ -336,6 +322,11 @@ export default function App({ Component, pageProps }: AppProps) { content: t("UNKNOWN_ERROR"), }); + const logout = () => { + setMlSearchEnabled(false); + void photosLogout().then(() => router.push(PAGES.ROOT)); + }; + const title = isI18nReady ? t("TITLE", { context: APPS.PHOTOS }) : APP_TITLES.get(APPS.PHOTOS); @@ -394,6 +385,7 @@ export default function App({ Component, pageProps }: AppProps) { updateMapEnabled, isCFProxyDisabled, setIsCFProxyDisabled, + logout, }} > {(loading || !isI18nReady) && ( diff --git a/web/apps/photos/src/pages/gallery/index.tsx b/web/apps/photos/src/pages/gallery/index.tsx index ba0d53d604..cb0ae1bf15 100644 --- a/web/apps/photos/src/pages/gallery/index.tsx +++ b/web/apps/photos/src/pages/gallery/index.tsx @@ -1,82 +1,37 @@ -import { - SESSION_KEYS, - clearKeys, - getKey, -} from "@ente/shared/storage/sessionStorage"; -import { Typography, styled } from "@mui/material"; -import { t } from "i18next"; -import { useRouter } from "next/router"; -import { - createContext, - useContext, - useEffect, - useMemo, - useRef, - useState, -} from "react"; -import { - constructEmailList, - createAlbum, - getAllLatestCollections, - getAllLocalCollections, - getCollectionSummaries, - getFavItemIds, - getHiddenItemsSummary, - getSectionSummaries, -} from "services/collectionService"; -import { getLocalFiles, syncFiles } from "services/fileService"; - -import { checkSubscriptionPurchase } from "utils/billing"; - +import log from "@/next/log"; +import { APPS } from "@ente/shared/apps/constants"; +import { CenteredFlex } from "@ente/shared/components/Container"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; +import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; +import { getRecoveryKey } from "@ente/shared/crypto/helpers"; +import { CustomError } from "@ente/shared/error"; +import { useFileInput } from "@ente/shared/hooks/useFileInput"; +import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded"; +import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; +import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { + getToken, isFirstLogin, justSignedUp, setIsFirstLogin, setJustSignedUp, } from "@ente/shared/storage/localStorage/helpers"; -import CollectionSelector, { - CollectionSelectorAttributes, -} from "components/Collections/CollectionSelector"; -import FullScreenDropZone from "components/FullScreenDropZone"; -import { LoadingOverlay } from "components/LoadingOverlay"; -import PhotoFrame from "components/PhotoFrame"; -import Sidebar from "components/Sidebar"; -import SelectedFileOptions from "components/pages/gallery/SelectedFileOptions"; -import { useDropzone } from "react-dropzone"; import { - isTokenValid, - syncMapEnabled, - validateKey, -} from "services/userService"; -import { preloadImage } from "utils/common"; -import { - FILE_OPS_TYPE, - constructFileToCollectionMap, - getSelectedFiles, - getUniqueFiles, - handleFileOps, - mergeMetadata, - sortFiles, -} from "utils/file"; - -import log from "@/next/log"; -import { APPS } from "@ente/shared/apps/constants"; -import { CenteredFlex } from "@ente/shared/components/Container"; -import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; -import { CustomError } from "@ente/shared/error"; -import useFileInput from "@ente/shared/hooks/useFileInput"; -import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded"; -import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; -import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; -import { getToken } from "@ente/shared/storage/localStorage/helpers"; + SESSION_KEYS, + clearKeys, + getKey, +} from "@ente/shared/storage/sessionStorage"; import { User } from "@ente/shared/user/types"; import { isPromise } from "@ente/shared/utils"; +import { Typography, styled } from "@mui/material"; import AuthenticateUserModal from "components/AuthenticateUserModal"; import Collections from "components/Collections"; import CollectionNamer, { CollectionNamerAttributes, } from "components/Collections/CollectionNamer"; +import CollectionSelector, { + CollectionSelectorAttributes, +} from "components/Collections/CollectionSelector"; import ExportModal from "components/ExportModal"; import { FilesDownloadProgress, @@ -85,13 +40,18 @@ import { import FixCreationTime, { FixCreationTimeAttributes, } from "components/FixCreationTime"; +import FullScreenDropZone from "components/FullScreenDropZone"; import GalleryEmptyState from "components/GalleryEmptyState"; +import { LoadingOverlay } from "components/LoadingOverlay"; +import PhotoFrame from "components/PhotoFrame"; import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList"; import SearchResultInfo from "components/Search/SearchResultInfo"; +import Sidebar from "components/Sidebar"; import Uploader from "components/Upload/Uploader"; -import UploadInputs from "components/UploadSelectorInputs"; +import { UploadSelectorInputs } from "components/UploadSelectorInputs"; import { GalleryNavbar } from "components/pages/gallery/Navbar"; import PlanSelector from "components/pages/gallery/PlanSelector"; +import SelectedFileOptions from "components/pages/gallery/SelectedFileOptions"; import { ALL_SECTION, ARCHIVE_SECTION, @@ -100,15 +60,41 @@ import { TRASH_SECTION, } from "constants/collection"; import { SYNC_INTERVAL_IN_MICROSECONDS } from "constants/gallery"; +import { t } from "i18next"; +import { useRouter } from "next/router"; import { AppContext } from "pages/_app"; +import { + createContext, + useContext, + useEffect, + useMemo, + useRef, + useState, +} from "react"; +import { useDropzone } from "react-dropzone"; import { clipService } from "services/clip-service"; -import { constructUserIDToEmailMap } from "services/collectionService"; +import { + constructEmailList, + constructUserIDToEmailMap, + createAlbum, + getAllLatestCollections, + getAllLocalCollections, + getCollectionSummaries, + getFavItemIds, + getHiddenItemsSummary, + getSectionSummaries, +} from "services/collectionService"; import downloadManager from "services/download"; -import { syncEmbeddings, syncFileEmbeddings } from "services/embeddingService"; +import { + syncCLIPEmbeddings, + syncFaceEmbeddings, +} from "services/embeddingService"; import { syncEntities } from "services/entityService"; +import { getLocalFiles, syncFiles } from "services/fileService"; import locationSearchService from "services/locationSearchService"; import { getLocalTrashedFiles, syncTrash } from "services/trashService"; import uploadManager from "services/upload/uploadManager"; +import { isTokenValid, syncMapEnabled } from "services/userService"; import { Collection, CollectionSummaries } from "types/collection"; import { EnteFile } from "types/file"; import { @@ -120,6 +106,7 @@ import { } from "types/gallery"; import { Search, SearchResultSummary, UpdateSearch } from "types/search"; import { FamilyData } from "types/user"; +import { checkSubscriptionPurchase } from "utils/billing"; import { COLLECTION_OPS_TYPE, constructCollectionNameMap, @@ -131,8 +118,19 @@ import { splitNormalAndHiddenCollections, } from "utils/collection"; import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker"; +import { preloadImage } from "utils/common"; +import { + FILE_OPS_TYPE, + constructFileToCollectionMap, + getSelectedFiles, + getUniqueFiles, + handleFileOps, + mergeMetadata, + sortFiles, +} from "utils/file"; import { isArchivedFile } from "utils/magicMetadata"; import { getSessionExpiredMessage } from "utils/ui"; +import { isInternalUserForML } from "utils/user"; import { getLocalFamilyData } from "utils/user/family"; export const DeadCenter = styled("div")` @@ -201,8 +199,11 @@ export default function Gallery() { const [isPhotoSwipeOpen, setIsPhotoSwipeOpen] = useState(false); const { + // A function to call to get the props we should apply to the container, getRootProps: getDragAndDropRootProps, + // ... the props we should apply to the element, getInputProps: getDragAndDropInputProps, + // ... and the files that we got. acceptedFiles: dragAndDropFiles, } = useDropzone({ noClick: true, @@ -210,23 +211,23 @@ export default function Gallery() { disabled: shouldDisableDropzone, }); const { - selectedFiles: fileSelectorFiles, - open: openFileSelector, getInputProps: getFileSelectorInputProps, + openSelector: openFileSelector, + selectedFiles: fileSelectorFiles, } = useFileInput({ directory: false, }); const { - selectedFiles: folderSelectorFiles, - open: openFolderSelector, getInputProps: getFolderSelectorInputProps, + openSelector: openFolderSelector, + selectedFiles: folderSelectorFiles, } = useFileInput({ directory: true, }); const { - selectedFiles: fileSelectorZipFiles, - open: openZipFileSelector, getInputProps: getZipFileSelectorInputProps, + openSelector: openZipFileSelector, + selectedFiles: fileSelectorZipFiles, } = useFileInput({ directory: false, accept: ".zip", @@ -245,8 +246,13 @@ export default function Gallery() { const [tempHiddenFileIds, setTempHiddenFileIds] = useState>( new Set(), ); - const { startLoading, finishLoading, setDialogMessage, ...appContext } = - useContext(AppContext); + const { + startLoading, + finishLoading, + setDialogMessage, + logout, + ...appContext + } = useContext(AppContext); const [collectionSummaries, setCollectionSummaries] = useState(); const [hiddenCollectionSummaries, setHiddenCollectionSummaries] = @@ -315,6 +321,19 @@ export default function Gallery() { const [isClipSearchResult, setIsClipSearchResult] = useState(false); + // Ensure that the keys in local storage are not malformed by verifying that + // the recoveryKey can be decrypted with the masterKey. + // Note: This is not bullet-proof. + const validateKey = async () => { + try { + await getRecoveryKey(); + return true; + } catch (e) { + logout(); + return false; + } + }; + useEffect(() => { appContext.showNavBar(true); const key = getKey(SESSION_KEYS.ENCRYPTION_KEY); @@ -668,7 +687,7 @@ export default function Gallery() { }, [collections, hiddenCollections]); const showSessionExpiredMessage = () => { - setDialogMessage(getSessionExpiredMessage()); + setDialogMessage(getSessionExpiredMessage(logout)); }; const syncWithRemote = async (force = false, silent = false) => { @@ -698,10 +717,10 @@ export default function Gallery() { await syncTrash(collections, setTrashedFiles); await syncEntities(); await syncMapEnabled(); - await syncEmbeddings(); const electron = globalThis.electron; if (electron) { - await syncFileEmbeddings(); + await syncCLIPEmbeddings(); + if (isInternalUserForML()) await syncFaceEmbeddings(); } if (clipService.isPlatformSupported()) { void clipService.scheduleImageEmbeddingExtraction(); @@ -1013,14 +1032,14 @@ export default function Gallery() { setSelectedFiles: setSelected, }} > - - + {blockingLoad && ( diff --git a/web/apps/photos/src/pages/shared-albums/index.tsx b/web/apps/photos/src/pages/shared-albums/index.tsx index ee6284d4a2..d26e93eade 100644 --- a/web/apps/photos/src/pages/shared-albums/index.tsx +++ b/web/apps/photos/src/pages/shared-albums/index.tsx @@ -1,15 +1,50 @@ import log from "@/next/log"; +import { APPS } from "@ente/shared/apps/constants"; import { CenteredFlex, SpaceBetweenFlex, VerticallyCentered, } from "@ente/shared/components/Container"; +import EnteSpinner from "@ente/shared/components/EnteSpinner"; +import FormPaper from "@ente/shared/components/Form/FormPaper"; +import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title"; +import OverflowMenu from "@ente/shared/components/OverflowMenu/menu"; +import { OverflowMenuOption } from "@ente/shared/components/OverflowMenu/option"; +import SingleInputForm, { + SingleInputFormProps, +} from "@ente/shared/components/SingleInputForm"; +import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; +import { ENTE_WEBSITE_LINK } from "@ente/shared/constants/urls"; +import ComlinkCryptoWorker from "@ente/shared/crypto"; import { CustomError, parseSharingErrorCodes } from "@ente/shared/error"; +import { useFileInput } from "@ente/shared/hooks/useFileInput"; +import AddPhotoAlternateOutlined from "@mui/icons-material/AddPhotoAlternateOutlined"; +import FileDownloadOutlinedIcon from "@mui/icons-material/FileDownloadOutlined"; +import MoreHoriz from "@mui/icons-material/MoreHoriz"; +import Typography from "@mui/material/Typography"; +import bs58 from "bs58"; +import { CollectionInfo } from "components/Collections/CollectionInfo"; +import { CollectionInfoBarWrapper } from "components/Collections/styledComponents"; +import { + FilesDownloadProgress, + FilesDownloadProgressAttributes, +} from "components/FilesDownloadProgress"; +import FullScreenDropZone from "components/FullScreenDropZone"; +import { LoadingOverlay } from "components/LoadingOverlay"; import PhotoFrame from "components/PhotoFrame"; +import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList"; +import UploadButton from "components/Upload/UploadButton"; +import Uploader from "components/Upload/Uploader"; +import { UploadSelectorInputs } from "components/UploadSelectorInputs"; +import SharedAlbumNavbar from "components/pages/sharedAlbum/Navbar"; +import SelectedFileOptions from "components/pages/sharedAlbum/SelectedFileOptions"; import { ALL_SECTION } from "constants/collection"; import { t } from "i18next"; +import { useRouter } from "next/router"; import { AppContext } from "pages/_app"; import { useContext, useEffect, useMemo, useRef, useState } from "react"; +import { useDropzone } from "react-dropzone"; +import downloadManager from "services/download"; import { getLocalPublicCollection, getLocalPublicCollectionPassword, @@ -25,50 +60,6 @@ import { } from "services/publicCollectionService"; import { Collection } from "types/collection"; import { EnteFile } from "types/file"; -import { - downloadSelectedFiles, - getSelectedFiles, - mergeMetadata, - sortFiles, -} from "utils/file"; -import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; - -import { logoutUser } from "@ente/accounts/services/user"; -import { APPS } from "@ente/shared/apps/constants"; -import EnteSpinner from "@ente/shared/components/EnteSpinner"; -import FormPaper from "@ente/shared/components/Form/FormPaper"; -import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title"; -import OverflowMenu from "@ente/shared/components/OverflowMenu/menu"; -import { OverflowMenuOption } from "@ente/shared/components/OverflowMenu/option"; -import SingleInputForm, { - SingleInputFormProps, -} from "@ente/shared/components/SingleInputForm"; -import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; -import { ENTE_WEBSITE_LINK } from "@ente/shared/constants/urls"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import useFileInput from "@ente/shared/hooks/useFileInput"; -import AddPhotoAlternateOutlined from "@mui/icons-material/AddPhotoAlternateOutlined"; -import FileDownloadOutlinedIcon from "@mui/icons-material/FileDownloadOutlined"; -import MoreHoriz from "@mui/icons-material/MoreHoriz"; -import Typography from "@mui/material/Typography"; -import bs58 from "bs58"; -import { CollectionInfo } from "components/Collections/CollectionInfo"; -import { CollectionInfoBarWrapper } from "components/Collections/styledComponents"; -import { - FilesDownloadProgress, - FilesDownloadProgressAttributes, -} from "components/FilesDownloadProgress"; -import FullScreenDropZone from "components/FullScreenDropZone"; -import { LoadingOverlay } from "components/LoadingOverlay"; -import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList"; -import UploadButton from "components/Upload/UploadButton"; -import Uploader from "components/Upload/Uploader"; -import UploadSelectorInputs from "components/UploadSelectorInputs"; -import SharedAlbumNavbar from "components/pages/sharedAlbum/Navbar"; -import SelectedFileOptions from "components/pages/sharedAlbum/SelectedFileOptions"; -import { useRouter } from "next/router"; -import { useDropzone } from "react-dropzone"; -import downloadManager from "services/download"; import { SelectedState, SetFilesDownloadProgressAttributes, @@ -76,6 +67,13 @@ import { UploadTypeSelectorIntent, } from "types/gallery"; import { downloadCollectionFiles, isHiddenCollection } from "utils/collection"; +import { + downloadSelectedFiles, + getSelectedFiles, + mergeMetadata, + sortFiles, +} from "utils/file"; +import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; export default function PublicCollectionGallery() { const token = useRef(null); @@ -118,16 +116,16 @@ export default function PublicCollectionGallery() { disabled: shouldDisableDropzone, }); const { - selectedFiles: fileSelectorFiles, - open: openFileSelector, getInputProps: getFileSelectorInputProps, + openSelector: openFileSelector, + selectedFiles: fileSelectorFiles, } = useFileInput({ directory: false, }); const { - selectedFiles: folderSelectorFiles, - open: openFolderSelector, getInputProps: getFolderSelectorInputProps, + openSelector: openFolderSelector, + selectedFiles: folderSelectorFiles, } = useFileInput({ directory: true, }); @@ -186,7 +184,7 @@ export default function PublicCollectionGallery() { nonClosable: true, proceed: { text: t("LOGIN"), - action: logoutUser, + action: () => router.push(PAGES.ROOT), variant: "accent", }, }); @@ -543,14 +541,13 @@ export default function PublicCollectionGallery() { photoListFooter, }} > - + { + public async getPlans(): Promise { const token = getToken(); try { let response; @@ -37,8 +47,7 @@ class billingService { }, ); } - const { plans } = response.data; - return plans; + return response.data; } catch (e) { log.error("failed to get plans", e); } diff --git a/web/apps/photos/src/services/clip-service.ts b/web/apps/photos/src/services/clip-service.ts index 703c89cf4b..915f9ae03e 100644 --- a/web/apps/photos/src/services/clip-service.ts +++ b/web/apps/photos/src/services/clip-service.ts @@ -11,7 +11,7 @@ import { Embedding } from "types/embedding"; import { EnteFile } from "types/file"; import { getPersonalFiles } from "utils/file"; import downloadManager from "./download"; -import { getLocalEmbeddings, putEmbedding } from "./embeddingService"; +import { localCLIPEmbeddings, putEmbedding } from "./embeddingService"; import { getAllLocalFiles, getLocalFiles } from "./fileService"; /** Status of CLIP indexing on the images in the user's local library. */ @@ -80,21 +80,20 @@ class CLIPService { this.liveEmbeddingExtractionQueue = new PQueue({ concurrency: 1, }); - eventBus.on(Events.LOGOUT, this.logoutHandler, this); } isPlatformSupported = () => { return isElectron(); }; - private logoutHandler = async () => { + async logout() { if (this.embeddingExtractionInProgress) { this.embeddingExtractionInProgress.abort(); } if (this.onFileUploadedHandler) { await this.removeOnFileUploadListener(); } - }; + } setupOnFileUploadListener = async () => { try { @@ -185,7 +184,7 @@ class CLIPService { }; getTextEmbeddingIfAvailable = async (text: string) => { - return ensureElectron().clipTextEmbeddingIfAvailable(text); + return ensureElectron().computeCLIPTextEmbeddingIfAvailable(text); }; private runClipEmbeddingExtraction = async (canceller: AbortController) => { @@ -195,7 +194,7 @@ class CLIPService { return; } const localFiles = getPersonalFiles(await getAllLocalFiles(), user); - const existingEmbeddings = await getLocalEmbeddings(); + const existingEmbeddings = await localCLIPEmbeddings(); const pendingFiles = await getNonClipEmbeddingExtractedFiles( localFiles, existingEmbeddings, @@ -295,7 +294,7 @@ class CLIPService { const file = await localFile .arrayBuffer() .then((buffer) => new Uint8Array(buffer)); - return await ensureElectron().clipImageEmbedding(file); + return await ensureElectron().computeCLIPImageEmbedding(file); }; private encryptAndUploadEmbedding = async ( @@ -329,7 +328,8 @@ class CLIPService { private extractFileClipImageEmbedding = async (file: EnteFile) => { const thumb = await downloadManager.getThumbnail(file); - const embedding = await ensureElectron().clipImageEmbedding(thumb); + const embedding = + await ensureElectron().computeCLIPImageEmbedding(thumb); return embedding; }; @@ -394,7 +394,7 @@ export const computeClipMatchScore = async ( const initialIndexingStatus = async (): Promise => { const user = getData(LS_KEYS.USER); if (!user) throw new Error("Orphan CLIP indexing without a login"); - const allEmbeddings = await getLocalEmbeddings(); + const allEmbeddings = await localCLIPEmbeddings(); const localFiles = getPersonalFiles(await getLocalFiles(), user); const pendingFiles = await getNonClipEmbeddingExtractedFiles( localFiles, diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts index 7b0171da11..eb979af875 100644 --- a/web/apps/photos/src/services/download/index.ts +++ b/web/apps/photos/src/services/download/index.ts @@ -1,18 +1,17 @@ import { FILE_TYPE } from "@/media/file-type"; import { decodeLivePhoto } from "@/media/live-photo"; -import { openCache, type BlobCache } from "@/next/blob-cache"; +import { blobCache, type BlobCache } from "@/next/blob-cache"; import log from "@/next/log"; import { APPS } from "@ente/shared/apps/constants"; import ComlinkCryptoWorker from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; -import { Events, eventBus } from "@ente/shared/events"; import { isPlaybackPossible } from "@ente/shared/media/video-playback"; import { Remote } from "comlink"; import isElectron from "is-electron"; import * as ffmpeg from "services/ffmpeg"; import { EnteFile } from "types/file"; -import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file"; +import { getRenderableImage } from "utils/file"; import { PhotosDownloadClient } from "./clients/photos"; import { PublicAlbumsDownloadClient } from "./clients/publicAlbums"; @@ -31,6 +30,16 @@ export type SourceURLs = { isOriginal: boolean; isRenderable: boolean; type: "normal" | "livePhoto"; + /** + * Best effort attempt at obtaining the MIME type. + * + * Known cases where it is missing: + * + * - Live photos (these have a different code path for obtaining the URL). + * - A video that is passes the isPlayable test in the browser. + * + */ + mimeType?: string; }; export type OnDownloadProgress = (event: { @@ -82,7 +91,7 @@ class DownloadManagerImpl { } this.downloadClient = createDownloadClient(app, tokens); try { - this.thumbnailCache = await openCache("thumbs"); + this.thumbnailCache = await blobCache("thumbs"); } catch (e) { log.error( "Failed to open thumbnail cache, will continue without it", @@ -91,13 +100,12 @@ class DownloadManagerImpl { } // TODO (MR): Revisit full file caching cf disk space usage // try { - // if (isElectron()) this.fileCache = await openCache("files"); + // if (isElectron()) this.fileCache = await cache("files"); // } catch (e) { // log.error("Failed to open file cache, will continue without it", e); // } this.cryptoWorker = await ComlinkCryptoWorker.getInstance(); this.ready = true; - eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this); } private ensureInitialized() { @@ -107,21 +115,15 @@ class DownloadManagerImpl { ); } - private async logoutHandler() { - try { - log.info("downloadManger logoutHandler started"); - this.ready = false; - this.cryptoWorker = null; - this.downloadClient = null; - this.fileObjectURLPromises.clear(); - this.fileConversionPromises.clear(); - this.thumbnailObjectURLPromises.clear(); - this.fileDownloadProgress.clear(); - this.progressUpdater = () => {}; - log.info("downloadManager logoutHandler completed"); - } catch (e) { - log.error("downloadManager logoutHandler failed", e); - } + async logout() { + this.ready = false; + this.cryptoWorker = null; + this.downloadClient = null; + this.fileObjectURLPromises.clear(); + this.fileConversionPromises.clear(); + this.thumbnailObjectURLPromises.clear(); + this.fileDownloadProgress.clear(); + this.progressUpdater = () => {}; } updateToken(token: string, passwordToken?: string) { @@ -287,7 +289,7 @@ class DownloadManagerImpl { await this.cryptoWorker.fromB64(file.file.decryptionHeader), file.key, ); - return generateStreamFromArrayBuffer(decrypted); + return new Response(decrypted).body; } catch (e) { if (e.message === CustomError.PROCESSING_FAILED) { log.error( @@ -304,119 +306,90 @@ class DownloadManagerImpl { if (cachedBlob) res = new Response(cachedBlob); else { res = await this.downloadClient.downloadFileStream(file); - this.fileCache?.put(cacheKey, await res.blob()); + // We don't have a files cache currently, so this was already a + // no-op. But even if we had a cache, this seems sus, because + // res.blob() will read the stream and I'd think then trying to do + // the subsequent read of the stream again below won't work. + + // this.fileCache?.put(cacheKey, await res.blob()); } const reader = res.body.getReader(); const contentLength = +res.headers.get("Content-Length") ?? 0; let downloadedBytes = 0; - const stream = new ReadableStream({ - start: async (controller) => { - try { - const decryptionHeader = await this.cryptoWorker.fromB64( - file.file.decryptionHeader, - ); - const fileKey = await this.cryptoWorker.fromB64(file.key); - const { pullState, decryptionChunkSize } = - await this.cryptoWorker.initChunkDecryption( - decryptionHeader, - fileKey, - ); - let data = new Uint8Array(); - // The following function handles each data chunk - const push = () => { - // "done" is a Boolean and value a "Uint8Array" - reader.read().then(async ({ done, value }) => { - try { - // Is there more data to read? - if (!done) { - downloadedBytes += value.byteLength; - onDownloadProgress({ - loaded: downloadedBytes, - total: contentLength, - }); - const buffer = new Uint8Array( - data.byteLength + value.byteLength, - ); - buffer.set(new Uint8Array(data), 0); - buffer.set( - new Uint8Array(value), - data.byteLength, - ); - if (buffer.length > decryptionChunkSize) { - const fileData = buffer.slice( - 0, - decryptionChunkSize, - ); - try { - const { decryptedData } = - await this.cryptoWorker.decryptFileChunk( - fileData, - pullState, - ); - controller.enqueue(decryptedData); - data = - buffer.slice( - decryptionChunkSize, - ); - } catch (e) { - if ( - e.message === - CustomError.PROCESSING_FAILED - ) { - log.error( - `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`, - e, - ); - } - throw e; - } - } else { - data = buffer; - } - push(); - } else { - if (data) { - try { - const { decryptedData } = - await this.cryptoWorker.decryptFileChunk( - data, - pullState, - ); - controller.enqueue(decryptedData); - data = null; - } catch (e) { - if ( - e.message === - CustomError.PROCESSING_FAILED - ) { - log.error( - `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`, - e, - ); - } - throw e; - } - } - controller.close(); - } - } catch (e) { - log.error("Failed to process file chunk", e); - controller.error(e); - } - }); - }; + const decryptionHeader = await this.cryptoWorker.fromB64( + file.file.decryptionHeader, + ); + const fileKey = await this.cryptoWorker.fromB64(file.key); + const { pullState, decryptionChunkSize } = + await this.cryptoWorker.initChunkDecryption( + decryptionHeader, + fileKey, + ); - push(); - } catch (e) { - log.error("Failed to process file stream", e); - controller.error(e); - } + let leftoverBytes = new Uint8Array(); + + return new ReadableStream({ + pull: async (controller) => { + // Each time pull is called, we want to enqueue at least once. + let didEnqueue = false; + do { + // done is a boolean and value is an Uint8Array. When done + // is true value will be empty. + const { done, value } = await reader.read(); + + let data: Uint8Array; + if (done) { + data = leftoverBytes; + } else { + downloadedBytes += value.length; + onDownloadProgress({ + loaded: downloadedBytes, + total: contentLength, + }); + + data = new Uint8Array( + leftoverBytes.length + value.length, + ); + data.set(new Uint8Array(leftoverBytes), 0); + data.set(new Uint8Array(value), leftoverBytes.length); + } + + // data.length might be a multiple of decryptionChunkSize, + // and we might need multiple iterations to drain it all. + while (data.length >= decryptionChunkSize) { + const { decryptedData } = + await this.cryptoWorker.decryptFileChunk( + data.slice(0, decryptionChunkSize), + pullState, + ); + controller.enqueue(decryptedData); + didEnqueue = true; + data = data.slice(decryptionChunkSize); + } + + if (done) { + // Send off the remaining bytes without waiting for a + // full chunk, no more bytes are going to come. + if (data.length) { + const { decryptedData } = + await this.cryptoWorker.decryptFileChunk( + data, + pullState, + ); + controller.enqueue(decryptedData); + } + // Don't loop again even if we didn't enqueue. + didEnqueue = true; + controller.close(); + } else { + // Save it for the next pull. + leftoverBytes = data; + } + } while (!didEnqueue); }, }); - - return stream; } trackDownloadProgress = (fileID: number, fileSize: number) => { @@ -475,27 +448,37 @@ async function getRenderableFileURL( originalFileURL: string, forceConvert: boolean, ): Promise { - let srcURLs: SourceURLs["url"]; + const existingOrNewObjectURL = (convertedBlob: Blob) => + convertedBlob + ? convertedBlob === fileBlob + ? originalFileURL + : URL.createObjectURL(convertedBlob) + : undefined; + + let url: SourceURLs["url"]; + let isOriginal: boolean; + let isRenderable: boolean; + let type: SourceURLs["type"] = "normal"; + let mimeType: string | undefined; + switch (file.metadata.fileType) { case FILE_TYPE.IMAGE: { const convertedBlob = await getRenderableImage( file.metadata.title, fileBlob, ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; + const convertedURL = existingOrNewObjectURL(convertedBlob); + url = convertedURL; + isOriginal = convertedURL === originalFileURL; + isRenderable = !!convertedURL; + mimeType = convertedBlob?.type; break; } case FILE_TYPE.LIVE_PHOTO: { - srcURLs = await getRenderableLivePhotoURL( - file, - fileBlob, - forceConvert, - ); + url = await getRenderableLivePhotoURL(file, fileBlob, forceConvert); + isOriginal = false; + isRenderable = false; + type = "livePhoto"; break; } case FILE_TYPE.VIDEO: { @@ -504,52 +487,24 @@ async function getRenderableFileURL( fileBlob, forceConvert, ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; + const convertedURL = existingOrNewObjectURL(convertedBlob); + url = convertedURL; + isOriginal = convertedURL === originalFileURL; + isRenderable = !!convertedURL; + mimeType = convertedBlob?.type; break; } default: { - srcURLs = originalFileURL; + url = originalFileURL; + isOriginal = true; + isRenderable = false; break; } } - let isOriginal: boolean; - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - isOriginal = false; - } else { - isOriginal = (srcURLs as string) === (originalFileURL as string); - } - - return { - url: srcURLs, - isOriginal, - isRenderable: - file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs, - type: - file.metadata.fileType === FILE_TYPE.LIVE_PHOTO - ? "livePhoto" - : "normal", - }; + return { url, isOriginal, isRenderable, type, mimeType }; } -const getFileObjectURL = ( - originalFileURL: string, - originalBlob: Blob, - convertedBlob: Blob, -) => { - const convertedURL = convertedBlob - ? convertedBlob === originalBlob - ? originalFileURL - : URL.createObjectURL(convertedBlob) - : null; - return convertedURL; -}; - async function getRenderableLivePhotoURL( file: EnteFile, fileBlob: Blob, @@ -610,10 +565,9 @@ async function getPlayableVideo( if (!forceConvert && !runOnWeb && !isElectron()) { return null; } - // TODO(MR): This might not work for very large (~ GB) videos. Test. log.info(`Converting video ${videoNameTitle} to mp4`); const convertedVideoData = await ffmpeg.convertToMP4(videoBlob); - return new Blob([convertedVideoData]); + return new Blob([convertedVideoData], { type: "video/mp4" }); } } catch (e) { log.error("Video conversion failed", e); diff --git a/web/apps/photos/src/services/embeddingService.ts b/web/apps/photos/src/services/embeddingService.ts index 36af848424..56cebe5a03 100644 --- a/web/apps/photos/src/services/embeddingService.ts +++ b/web/apps/photos/src/services/embeddingService.ts @@ -7,6 +7,7 @@ import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import localForage from "@ente/shared/storage/localForage"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; +import { FileML } from "services/face/remote"; import type { Embedding, EmbeddingModel, @@ -15,31 +16,30 @@ import type { PutEmbeddingRequest, } from "types/embedding"; import { EnteFile } from "types/file"; -import { - getLatestVersionEmbeddings, - getLatestVersionFileEmbeddings, -} from "utils/embedding"; -import { FileML } from "utils/machineLearning/mldataMappers"; import { getLocalCollections } from "./collectionService"; import { getAllLocalFiles } from "./fileService"; import { getLocalTrashedFiles } from "./trashService"; -const ENDPOINT = getEndpoint(); - const DIFF_LIMIT = 500; -const EMBEDDINGS_TABLE_V1 = "embeddings"; -const EMBEDDINGS_TABLE = "embeddings_v2"; +/** Local storage key suffix for embedding sync times */ +const embeddingSyncTimeLSKeySuffix = "embedding_sync_time"; +/** Local storage key for CLIP embeddings. */ +const clipEmbeddingsLSKey = "embeddings_v2"; const FILE_EMBEDING_TABLE = "file_embeddings"; -const EMBEDDING_SYNC_TIME_TABLE = "embedding_sync_time"; -export const getAllLocalEmbeddings = async () => { +/** Return all CLIP embeddings that we have available locally. */ +export const localCLIPEmbeddings = async () => + (await storedCLIPEmbeddings()).filter(({ model }) => model === "onnx-clip"); + +const storedCLIPEmbeddings = async () => { const embeddings: Array = - await localForage.getItem(EMBEDDINGS_TABLE); + await localForage.getItem(clipEmbeddingsLSKey); if (!embeddings) { - await localForage.removeItem(EMBEDDINGS_TABLE_V1); - await localForage.removeItem(EMBEDDING_SYNC_TIME_TABLE); - await localForage.setItem(EMBEDDINGS_TABLE, []); + // Migrate + await localForage.removeItem("embeddings"); + await localForage.removeItem("embedding_sync_time"); + await localForage.setItem(clipEmbeddingsLSKey, []); return []; } return embeddings; @@ -54,15 +54,10 @@ export const getFileMLEmbeddings = async (): Promise => { return embeddings; }; -export const getLocalEmbeddings = async () => { - const embeddings = await getAllLocalEmbeddings(); - return embeddings.filter((embedding) => embedding.model === "onnx-clip"); -}; - const getModelEmbeddingSyncTime = async (model: EmbeddingModel) => { return ( (await localForage.getItem( - `${model}-${EMBEDDING_SYNC_TIME_TABLE}`, + `${model}-${embeddingSyncTimeLSKeySuffix}`, )) ?? 0 ); }; @@ -71,13 +66,17 @@ const setModelEmbeddingSyncTime = async ( model: EmbeddingModel, time: number, ) => { - await localForage.setItem(`${model}-${EMBEDDING_SYNC_TIME_TABLE}`, time); + await localForage.setItem(`${model}-${embeddingSyncTimeLSKeySuffix}`, time); }; -export const syncEmbeddings = async () => { - const models: EmbeddingModel[] = ["onnx-clip"]; +/** + * Fetch new CLIP embeddings with the server and save them locally. Also prune + * local embeddings for any files no longer exist locally. + */ +export const syncCLIPEmbeddings = async () => { + const model: EmbeddingModel = "onnx-clip"; try { - let allEmbeddings = await getAllLocalEmbeddings(); + let allEmbeddings = await storedCLIPEmbeddings(); const localFiles = await getAllLocalFiles(); const hiddenAlbums = await getLocalCollections("hidden"); const localTrashFiles = await getLocalTrashedFiles(); @@ -89,79 +88,80 @@ export const syncEmbeddings = async () => { await cleanupDeletedEmbeddings( allLocalFiles, allEmbeddings, - EMBEDDINGS_TABLE, + clipEmbeddingsLSKey, ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); - for (const model of models) { - let modelLastSinceTime = await getModelEmbeddingSyncTime(model); - log.info( - `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, - ); - let response: GetEmbeddingDiffResponse; - do { - response = await getEmbeddingsDiff(modelLastSinceTime, model); - if (!response.diff?.length) { - return; - } - const newEmbeddings = await Promise.all( - response.diff.map(async (embedding) => { - try { - const { - encryptedEmbedding, - decryptionHeader, - ...rest - } = embedding; - const worker = - await ComlinkCryptoWorker.getInstance(); - const fileKey = fileIdToKeyMap.get( - embedding.fileID, - ); - if (!fileKey) { - throw Error(CustomError.FILE_NOT_FOUND); - } - const decryptedData = await worker.decryptEmbedding( - encryptedEmbedding, - decryptionHeader, - fileIdToKeyMap.get(embedding.fileID), - ); - return { - ...rest, - embedding: decryptedData, - } as Embedding; - } catch (e) { - let hasHiddenAlbums = false; - if (e.message === CustomError.FILE_NOT_FOUND) { - hasHiddenAlbums = hiddenAlbums?.length > 0; - } - log.error( - `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, - e, - ); + let modelLastSinceTime = await getModelEmbeddingSyncTime(model); + log.info( + `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, + ); + let response: GetEmbeddingDiffResponse; + do { + response = await getEmbeddingsDiff(modelLastSinceTime, model); + if (!response.diff?.length) { + return; + } + // Note: in rare cases we might get a diff entry for an embedding + // corresponding to a file which has been deleted (but whose + // embedding is enqueued for deletion). Client should expect such a + // scenario (all it has to do is just ignore them). + const newEmbeddings = await Promise.all( + response.diff.map(async (embedding) => { + try { + const { + encryptedEmbedding, + decryptionHeader, + ...rest + } = embedding; + const worker = await ComlinkCryptoWorker.getInstance(); + const fileKey = fileIdToKeyMap.get(embedding.fileID); + if (!fileKey) { + throw Error(CustomError.FILE_NOT_FOUND); } - }), - ); - allEmbeddings = getLatestVersionEmbeddings([ - ...allEmbeddings, - ...newEmbeddings, - ]); - if (response.diff.length) { - modelLastSinceTime = response.diff.slice(-1)[0].updatedAt; - } - await localForage.setItem(EMBEDDINGS_TABLE, allEmbeddings); - await setModelEmbeddingSyncTime(model, modelLastSinceTime); - log.info( - `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, - ); - } while (response.diff.length === DIFF_LIMIT); - } + const decryptedData = await worker.decryptEmbedding( + encryptedEmbedding, + decryptionHeader, + fileIdToKeyMap.get(embedding.fileID), + ); + + return { + ...rest, + embedding: decryptedData, + } as Embedding; + } catch (e) { + let hasHiddenAlbums = false; + if (e.message === CustomError.FILE_NOT_FOUND) { + hasHiddenAlbums = hiddenAlbums?.length > 0; + } + log.error( + `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, + e, + ); + } + }), + ); + allEmbeddings = getLatestVersionEmbeddings([ + ...allEmbeddings, + ...newEmbeddings, + ]); + modelLastSinceTime = response.diff.reduce( + (max, { updatedAt }) => Math.max(max, updatedAt), + modelLastSinceTime, + ); + await localForage.setItem(clipEmbeddingsLSKey, allEmbeddings); + await setModelEmbeddingSyncTime(model, modelLastSinceTime); + log.info( + `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, + ); + } while (response.diff.length > 0); } catch (e) { log.error("Sync embeddings failed", e); } }; -export const syncFileEmbeddings = async () => { - const models: EmbeddingModel[] = ["file-ml-clip-face"]; +export const syncFaceEmbeddings = async () => { + const model: EmbeddingModel = "file-ml-clip-face"; try { let allEmbeddings: FileML[] = await getFileMLEmbeddings(); const localFiles = await getAllLocalFiles(); @@ -178,69 +178,100 @@ export const syncFileEmbeddings = async () => { FILE_EMBEDING_TABLE, ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); - for (const model of models) { - let modelLastSinceTime = await getModelEmbeddingSyncTime(model); - log.info( - `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, - ); - let response: GetEmbeddingDiffResponse; - do { - response = await getEmbeddingsDiff(modelLastSinceTime, model); - if (!response.diff?.length) { - return; - } - const newEmbeddings = await Promise.all( - response.diff.map(async (embedding) => { - try { - const worker = - await ComlinkCryptoWorker.getInstance(); - const fileKey = fileIdToKeyMap.get( - embedding.fileID, - ); - if (!fileKey) { - throw Error(CustomError.FILE_NOT_FOUND); - } - const decryptedData = await worker.decryptMetadata( - embedding.encryptedEmbedding, - embedding.decryptionHeader, - fileIdToKeyMap.get(embedding.fileID), - ); - return { - ...decryptedData, - updatedAt: embedding.updatedAt, - } as unknown as FileML; - } catch (e) { - let hasHiddenAlbums = false; - if (e.message === CustomError.FILE_NOT_FOUND) { - hasHiddenAlbums = hiddenAlbums?.length > 0; - } - log.error( - `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, - e, - ); + let modelLastSinceTime = await getModelEmbeddingSyncTime(model); + log.info( + `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`, + ); + let response: GetEmbeddingDiffResponse; + do { + response = await getEmbeddingsDiff(modelLastSinceTime, model); + if (!response.diff?.length) { + return; + } + const newEmbeddings = await Promise.all( + response.diff.map(async (embedding) => { + try { + const worker = await ComlinkCryptoWorker.getInstance(); + const fileKey = fileIdToKeyMap.get(embedding.fileID); + if (!fileKey) { + throw Error(CustomError.FILE_NOT_FOUND); } - }), - ); - allEmbeddings = getLatestVersionFileEmbeddings([ - ...allEmbeddings, - ...newEmbeddings, - ]); - if (response.diff.length) { - modelLastSinceTime = response.diff.slice(-1)[0].updatedAt; - } - await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings); - await setModelEmbeddingSyncTime(model, modelLastSinceTime); - log.info( - `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, - ); - } while (response.diff.length === DIFF_LIMIT); - } + const decryptedData = await worker.decryptMetadata( + embedding.encryptedEmbedding, + embedding.decryptionHeader, + fileIdToKeyMap.get(embedding.fileID), + ); + + return { + ...decryptedData, + updatedAt: embedding.updatedAt, + } as unknown as FileML; + } catch (e) { + let hasHiddenAlbums = false; + if (e.message === CustomError.FILE_NOT_FOUND) { + hasHiddenAlbums = hiddenAlbums?.length > 0; + } + log.error( + `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`, + e, + ); + } + }), + ); + allEmbeddings = getLatestVersionFileEmbeddings([ + ...allEmbeddings, + ...newEmbeddings, + ]); + modelLastSinceTime = response.diff.reduce( + (max, { updatedAt }) => Math.max(max, updatedAt), + modelLastSinceTime, + ); + await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings); + await setModelEmbeddingSyncTime(model, modelLastSinceTime); + log.info( + `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`, + ); + } while (response.diff.length > 0); } catch (e) { log.error("Sync embeddings failed", e); } }; +const getLatestVersionEmbeddings = (embeddings: Embedding[]) => { + const latestVersionEntities = new Map(); + embeddings.forEach((embedding) => { + if (!embedding?.fileID) { + return; + } + const existingEmbeddings = latestVersionEntities.get(embedding.fileID); + if ( + !existingEmbeddings || + existingEmbeddings.updatedAt < embedding.updatedAt + ) { + latestVersionEntities.set(embedding.fileID, embedding); + } + }); + return Array.from(latestVersionEntities.values()); +}; + +const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => { + const latestVersionEntities = new Map(); + embeddings.forEach((embedding) => { + if (!embedding?.fileID) { + return; + } + const existingEmbeddings = latestVersionEntities.get(embedding.fileID); + if ( + !existingEmbeddings || + existingEmbeddings.updatedAt < embedding.updatedAt + ) { + latestVersionEntities.set(embedding.fileID, embedding); + } + }); + return Array.from(latestVersionEntities.values()); +}; + export const getEmbeddingsDiff = async ( sinceTime: number, model: EmbeddingModel, @@ -251,7 +282,7 @@ export const getEmbeddingsDiff = async ( return; } const response = await HTTPService.get( - `${ENDPOINT}/embeddings/diff`, + `${getEndpoint()}/embeddings/diff`, { sinceTime, limit: DIFF_LIMIT, @@ -280,7 +311,7 @@ export const putEmbedding = async ( throw Error(CustomError.TOKEN_MISSING); } const resp = await HTTPService.put( - `${ENDPOINT}/embeddings`, + `${getEndpoint()}/embeddings`, putEmbeddingReq, null, { diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts index b02e05a428..16472b3b4f 100644 --- a/web/apps/photos/src/services/export/index.ts +++ b/web/apps/photos/src/services/export/index.ts @@ -3,12 +3,12 @@ import { decodeLivePhoto } from "@/media/live-photo"; import type { Metadata } from "@/media/types/file"; import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; import { formatDateTimeShort } from "@ente/shared/time/format"; import { User } from "@ente/shared/user/types"; -import { wait } from "@ente/shared/utils"; import QueueProcessor, { CancellationStatus, RequestCanceller, @@ -29,7 +29,6 @@ import { getNonEmptyPersonalCollections, } from "utils/collection"; import { - generateStreamFromArrayBuffer, getPersonalFiles, getUpdatedEXIFFileForDownload, mergeMetadata, @@ -734,38 +733,31 @@ class ExportService { const collectionExportName = collectionIDExportNameMap.get(collectionID); - await this.removeFileExportedRecord(exportDir, fileUID); - try { - if (isLivePhotoExportName(fileExportName)) { - const { image, video } = - parseLivePhotoExportName(fileExportName); + if (isLivePhotoExportName(fileExportName)) { + const { image, video } = + parseLivePhotoExportName(fileExportName); - await moveToTrash( - exportDir, - collectionExportName, - image, - ); - - await moveToTrash( - exportDir, - collectionExportName, - video, - ); - } else { - await moveToTrash( - exportDir, - collectionExportName, - fileExportName, - ); - } - } catch (e) { - await this.addFileExportedRecord( + await moveToTrash( exportDir, - fileUID, + collectionExportName, + image, + ); + + await moveToTrash( + exportDir, + collectionExportName, + video, + ); + } else { + await moveToTrash( + exportDir, + collectionExportName, fileExportName, ); - throw e; } + + await this.removeFileExportedRecord(exportDir, fileUID); + log.info(`Moved file id ${fileUID} to Trash`); } catch (e) { log.error("trashing failed for a file", e); @@ -985,26 +977,21 @@ class ExportService { file.metadata.title, electron.fs.exists, ); + await this.saveMetadataFile( + collectionExportPath, + fileExportName, + file, + ); + await writeStream( + electron, + `${collectionExportPath}/${fileExportName}`, + updatedFileStream, + ); await this.addFileExportedRecord( exportDir, fileUID, fileExportName, ); - try { - await this.saveMetadataFile( - collectionExportPath, - fileExportName, - file, - ); - await writeStream( - electron, - `${collectionExportPath}/${fileExportName}`, - updatedFileStream, - ); - } catch (e) { - await this.removeFileExportedRecord(exportDir, fileUID); - throw e; - } } } catch (e) { log.error("download and save failed", e); @@ -1032,52 +1019,44 @@ class ExportService { livePhoto.videoFileName, fs.exists, ); + const livePhotoExportName = getLivePhotoExportName( imageExportName, videoExportName, ); + + await this.saveMetadataFile( + collectionExportPath, + imageExportName, + file, + ); + await writeStream( + electron, + `${collectionExportPath}/${imageExportName}`, + new Response(livePhoto.imageData).body, + ); + + await this.saveMetadataFile( + collectionExportPath, + videoExportName, + file, + ); + try { + await writeStream( + electron, + `${collectionExportPath}/${videoExportName}`, + new Response(livePhoto.videoData).body, + ); + } catch (e) { + await fs.rm(`${collectionExportPath}/${imageExportName}`); + throw e; + } + await this.addFileExportedRecord( exportDir, fileUID, livePhotoExportName, ); - try { - const imageStream = generateStreamFromArrayBuffer( - livePhoto.imageData, - ); - await this.saveMetadataFile( - collectionExportPath, - imageExportName, - file, - ); - await writeStream( - electron, - `${collectionExportPath}/${imageExportName}`, - imageStream, - ); - - const videoStream = generateStreamFromArrayBuffer( - livePhoto.videoData, - ); - await this.saveMetadataFile( - collectionExportPath, - videoExportName, - file, - ); - try { - await writeStream( - electron, - `${collectionExportPath}/${videoExportName}`, - videoStream, - ); - } catch (e) { - await fs.rm(`${collectionExportPath}/${imageExportName}`); - throw e; - } - } catch (e) { - await this.removeFileExportedRecord(exportDir, fileUID); - throw e; - } } private async saveMetadataFile( diff --git a/web/apps/photos/src/services/export/migration.ts b/web/apps/photos/src/services/export/migration.ts index 9404ddde5b..0c8de03e63 100644 --- a/web/apps/photos/src/services/export/migration.ts +++ b/web/apps/photos/src/services/export/migration.ts @@ -3,9 +3,9 @@ import { decodeLivePhoto } from "@/media/live-photo"; import { ensureElectron } from "@/next/electron"; import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { wait } from "@ente/shared/utils"; import { getLocalCollections } from "services/collectionService"; import downloadManager from "services/download"; import { getAllLocalFiles } from "services/fileService"; diff --git a/web/apps/photos/src/services/face/cluster.ts b/web/apps/photos/src/services/face/cluster.ts new file mode 100644 index 0000000000..41ba76504b --- /dev/null +++ b/web/apps/photos/src/services/face/cluster.ts @@ -0,0 +1,35 @@ +import { Hdbscan, type DebugInfo } from "hdbscan"; + +export type Cluster = number[]; + +export interface ClusterFacesResult { + clusters: Cluster[]; + noise: Cluster; + debugInfo?: DebugInfo; +} + +/** + * Cluster the given {@link faceEmbeddings}. + * + * @param faceEmbeddings An array of embeddings produced by our face indexing + * pipeline. Each embedding is for a face detected in an image (a single image + * may have multiple faces detected within it). + */ +export const clusterFaces = async ( + faceEmbeddings: Array>, +): Promise => { + const hdbscan = new Hdbscan({ + input: faceEmbeddings, + minClusterSize: 3, + minSamples: 5, + clusterSelectionEpsilon: 0.6, + clusterSelectionMethod: "leaf", + debug: true, + }); + + return { + clusters: hdbscan.getClusters(), + noise: hdbscan.getNoise(), + debugInfo: hdbscan.getDebugInfo(), + }; +}; diff --git a/web/apps/photos/src/services/face/crop.ts b/web/apps/photos/src/services/face/crop.ts new file mode 100644 index 0000000000..369dfc654a --- /dev/null +++ b/web/apps/photos/src/services/face/crop.ts @@ -0,0 +1,94 @@ +import { blobCache } from "@/next/blob-cache"; +import type { Box, Face, FaceAlignment } from "./types"; + +export const saveFaceCrop = async (imageBitmap: ImageBitmap, face: Face) => { + const faceCrop = extractFaceCrop(imageBitmap, face.alignment); + const blob = await imageBitmapToBlob(faceCrop); + faceCrop.close(); + + const cache = await blobCache("face-crops"); + await cache.put(face.id, blob); + + return blob; +}; + +const imageBitmapToBlob = (imageBitmap: ImageBitmap) => { + const canvas = new OffscreenCanvas(imageBitmap.width, imageBitmap.height); + canvas.getContext("2d").drawImage(imageBitmap, 0, 0); + return canvas.convertToBlob({ type: "image/jpeg", quality: 0.8 }); +}; + +const extractFaceCrop = ( + imageBitmap: ImageBitmap, + alignment: FaceAlignment, +): ImageBitmap => { + // TODO-ML: This algorithm is different from what is used by the mobile app. + // Also, it needs to be something that can work fully using the embedding we + // receive from remote - the `alignment.boundingBox` will not be available + // to us in such cases. + const paddedBox = roundBox(enlargeBox(alignment.boundingBox, 1.5)); + const outputSize = { width: paddedBox.width, height: paddedBox.height }; + + const maxDimension = 256; + const scale = Math.min( + maxDimension / paddedBox.width, + maxDimension / paddedBox.height, + ); + + if (scale < 1) { + outputSize.width = Math.round(scale * paddedBox.width); + outputSize.height = Math.round(scale * paddedBox.height); + } + + const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height); + const offscreenCtx = offscreen.getContext("2d"); + offscreenCtx.imageSmoothingQuality = "high"; + + offscreenCtx.translate(outputSize.width / 2, outputSize.height / 2); + + const outputBox = { + x: -outputSize.width / 2, + y: -outputSize.height / 2, + width: outputSize.width, + height: outputSize.height, + }; + + const enlargedBox = enlargeBox(paddedBox, 1.5); + const enlargedOutputBox = enlargeBox(outputBox, 1.5); + + offscreenCtx.drawImage( + imageBitmap, + enlargedBox.x, + enlargedBox.y, + enlargedBox.width, + enlargedBox.height, + enlargedOutputBox.x, + enlargedOutputBox.y, + enlargedOutputBox.width, + enlargedOutputBox.height, + ); + + return offscreen.transferToImageBitmap(); +}; + +/** Round all the components of the box. */ +const roundBox = (box: Box): Box => { + const [x, y, width, height] = [box.x, box.y, box.width, box.height].map( + (val) => Math.round(val), + ); + return { x, y, width, height }; +}; + +/** Increase the size of the given {@link box} by {@link factor}. */ +const enlargeBox = (box: Box, factor: number): Box => { + const center = { x: box.x + box.width / 2, y: box.y + box.height / 2 }; + const newWidth = factor * box.width; + const newHeight = factor * box.height; + + return { + x: center.x - newWidth / 2, + y: center.y - newHeight / 2, + width: newWidth, + height: newHeight, + }; +}; diff --git a/web/apps/photos/src/utils/storage/mlIDbStorage.ts b/web/apps/photos/src/services/face/db.ts similarity index 65% rename from web/apps/photos/src/utils/storage/mlIDbStorage.ts rename to web/apps/photos/src/services/face/db.ts index 766c3ac9a9..4742dd9d73 100644 --- a/web/apps/photos/src/utils/storage/mlIDbStorage.ts +++ b/web/apps/photos/src/services/face/db.ts @@ -1,11 +1,5 @@ import { haveWindow } from "@/next/env"; import log from "@/next/log"; -import { - DEFAULT_ML_SEARCH_CONFIG, - DEFAULT_ML_SYNC_CONFIG, - DEFAULT_ML_SYNC_JOB_CONFIG, - MAX_ML_SYNC_ERROR_COUNT, -} from "constants/mlConfig"; import { DBSchema, IDBPDatabase, @@ -15,20 +9,42 @@ import { openDB, } from "idb"; import isElectron from "is-electron"; -import { Face, MLLibraryData, MlFileData, Person } from "types/machineLearning"; -import { IndexStatus } from "types/machineLearning/ui"; +import type { Person } from "services/face/people"; +import type { MlFileData } from "services/face/types"; +import { + DEFAULT_ML_SEARCH_CONFIG, + MAX_ML_SYNC_ERROR_COUNT, +} from "services/machineLearning/machineLearningService"; + +export interface IndexStatus { + outOfSyncFilesExists: boolean; + nSyncedFiles: number; + nTotalFiles: number; + localFilesSynced: boolean; + peopleIndexSynced: boolean; +} + +/** + * TODO(MR): Transient type with an intersection of values that both existing + * and new types during the migration will have. Eventually we'll store the the + * server ML data shape here exactly. + */ +export interface MinimalPersistedFileData { + fileId: number; + mlVersion: number; + errorCount: number; + faces?: { personId?: number; id: string }[]; +} interface Config {} -export const ML_SYNC_JOB_CONFIG_NAME = "ml-sync-job"; -export const ML_SYNC_CONFIG_NAME = "ml-sync"; export const ML_SEARCH_CONFIG_NAME = "ml-search"; const MLDATA_DB_NAME = "mldata"; interface MLDb extends DBSchema { files: { key: number; - value: MlFileData; + value: MinimalPersistedFileData; indexes: { mlVersion: [number, number] }; }; people: { @@ -47,7 +63,7 @@ interface MLDb extends DBSchema { }; library: { key: string; - value: MLLibraryData; + value: unknown; }; configs: { key: string; @@ -129,15 +145,18 @@ class MLIDbStorage { // TODO: update configs if version is updated in defaults db.createObjectStore("configs"); + /* await tx .objectStore("configs") .add( DEFAULT_ML_SYNC_JOB_CONFIG, - ML_SYNC_JOB_CONFIG_NAME, + "ml-sync-job", ); + await tx .objectStore("configs") .add(DEFAULT_ML_SYNC_CONFIG, ML_SYNC_CONFIG_NAME); + */ } if (oldVersion < 3) { await tx @@ -156,6 +175,14 @@ class MLIDbStorage { .objectStore("configs") .delete(ML_SEARCH_CONFIG_NAME); + await tx + .objectStore("configs") + .delete(""ml-sync""); + + await tx + .objectStore("configs") + .delete("ml-sync-job"); + await tx .objectStore("configs") .add( @@ -163,6 +190,7 @@ class MLIDbStorage { ML_SEARCH_CONFIG_NAME, ); + db.deleteObjectStore("library"); db.deleteObjectStore("things"); } catch { // TODO: ignore for now as we finalize the new version @@ -196,38 +224,6 @@ class MLIDbStorage { await this.db; } - public async getAllFileIds() { - const db = await this.db; - return db.getAllKeys("files"); - } - - public async putAllFilesInTx(mlFiles: Array) { - const db = await this.db; - const tx = db.transaction("files", "readwrite"); - await Promise.all(mlFiles.map((mlFile) => tx.store.put(mlFile))); - await tx.done; - } - - public async removeAllFilesInTx(fileIds: Array) { - const db = await this.db; - const tx = db.transaction("files", "readwrite"); - - await Promise.all(fileIds.map((fileId) => tx.store.delete(fileId))); - await tx.done; - } - - public async newTransaction< - Name extends StoreNames, - Mode extends IDBTransactionMode = "readonly", - >(storeNames: Name, mode?: Mode) { - const db = await this.db; - return db.transaction(storeNames, mode); - } - - public async commit(tx: IDBPTransaction) { - return tx.done; - } - public async getAllFileIdsForUpdate( tx: IDBPTransaction, ) { @@ -261,16 +257,11 @@ class MLIDbStorage { return fileIds; } - public async getFile(fileId: number) { + public async getFile(fileId: number): Promise { const db = await this.db; return db.get("files", fileId); } - public async getAllFiles() { - const db = await this.db; - return db.getAll("files"); - } - public async putFile(mlFile: MlFileData) { const db = await this.db; return db.put("files", mlFile); @@ -278,7 +269,7 @@ class MLIDbStorage { public async upsertFileInTx( fileId: number, - upsert: (mlFile: MlFileData) => MlFileData, + upsert: (mlFile: MinimalPersistedFileData) => MinimalPersistedFileData, ) { const db = await this.db; const tx = db.transaction("files", "readwrite"); @@ -291,7 +282,7 @@ class MLIDbStorage { } public async putAllFiles( - mlFiles: Array, + mlFiles: MinimalPersistedFileData[], tx: IDBPTransaction, ) { await Promise.all(mlFiles.map((mlFile) => tx.store.put(mlFile))); @@ -304,44 +295,6 @@ class MLIDbStorage { await Promise.all(fileIds.map((fileId) => tx.store.delete(fileId))); } - public async getFace(fileID: number, faceId: string) { - const file = await this.getFile(fileID); - const face = file.faces.filter((f) => f.id === faceId); - return face[0]; - } - - public async getAllFacesMap() { - const startTime = Date.now(); - const db = await this.db; - const allFiles = await db.getAll("files"); - const allFacesMap = new Map>(); - allFiles.forEach( - (mlFileData) => - mlFileData.faces && - allFacesMap.set(mlFileData.fileId, mlFileData.faces), - ); - log.info("getAllFacesMap", Date.now() - startTime, "ms"); - - return allFacesMap; - } - - public async updateFaces(allFacesMap: Map) { - const startTime = Date.now(); - const db = await this.db; - const tx = db.transaction("files", "readwrite"); - let cursor = await tx.store.openCursor(); - while (cursor) { - if (allFacesMap.has(cursor.key)) { - const mlFileData = { ...cursor.value }; - mlFileData.faces = allFacesMap.get(cursor.key); - cursor.update(mlFileData); - } - cursor = await cursor.continue(); - } - await tx.done; - log.info("updateFaces", Date.now() - startTime, "ms"); - } - public async getPerson(id: number) { const db = await this.db; return db.get("people", id); @@ -352,21 +305,6 @@ class MLIDbStorage { return db.getAll("people"); } - public async putPerson(person: Person) { - const db = await this.db; - return db.put("people", person); - } - - public async clearAllPeople() { - const db = await this.db; - return db.clear("people"); - } - - public async getIndexVersion(index: string) { - const db = await this.db; - return db.get("versions", index); - } - public async incrementIndexVersion(index: StoreNames) { if (index === "versions") { throw new Error("versions store can not be versioned"); @@ -381,21 +319,6 @@ class MLIDbStorage { return version; } - public async setIndexVersion(index: string, version: number) { - const db = await this.db; - return db.put("versions", version, index); - } - - public async getLibraryData() { - const db = await this.db; - return db.get("library", "data"); - } - - public async putLibraryData(data: MLLibraryData) { - const db = await this.db; - return db.put("library", data, "data"); - } - public async getConfig(name: string, def: T) { const db = await this.db; const tx = db.transaction("configs", "readwrite"); @@ -459,66 +382,6 @@ class MLIDbStorage { peopleIndexVersion === filesIndexVersion, }; } - - // for debug purpose - public async getAllMLData() { - const db = await this.db; - const tx = db.transaction(db.objectStoreNames, "readonly"); - const allMLData: any = {}; - for (const store of tx.objectStoreNames) { - const keys = await tx.objectStore(store).getAllKeys(); - const data = await tx.objectStore(store).getAll(); - - allMLData[store] = {}; - for (let i = 0; i < keys.length; i++) { - allMLData[store][keys[i]] = data[i]; - } - } - await tx.done; - - const files = allMLData["files"]; - for (const fileId of Object.keys(files)) { - const fileData = files[fileId]; - fileData.faces?.forEach( - (f) => (f.embedding = Array.from(f.embedding)), - ); - } - - return allMLData; - } - - // for debug purpose, this will overwrite all data - public async putAllMLData(allMLData: Map) { - const db = await this.db; - const tx = db.transaction(db.objectStoreNames, "readwrite"); - for (const store of tx.objectStoreNames) { - const records = allMLData[store]; - if (!records) { - continue; - } - const txStore = tx.objectStore(store); - - if (store === "files") { - const files = records; - for (const fileId of Object.keys(files)) { - const fileData = files[fileId]; - fileData.faces?.forEach( - (f) => (f.embedding = Float32Array.from(f.embedding)), - ); - } - } - - await txStore.clear(); - for (const key of Object.keys(records)) { - if (txStore.keyPath) { - txStore.put(records[key]); - } else { - txStore.put(records[key], key); - } - } - } - await tx.done; - } } export default new MLIDbStorage(); diff --git a/web/apps/photos/src/services/face/f-index.ts b/web/apps/photos/src/services/face/f-index.ts new file mode 100644 index 0000000000..5197214b24 --- /dev/null +++ b/web/apps/photos/src/services/face/f-index.ts @@ -0,0 +1,715 @@ +import { FILE_TYPE } from "@/media/file-type"; +import log from "@/next/log"; +import { workerBridge } from "@/next/worker/worker-bridge"; +import { Matrix } from "ml-matrix"; +import type { + Box, + Dimensions, + Face, + FaceAlignment, + FaceDetection, + MlFileData, +} from "services/face/types"; +import { defaultMLVersion } from "services/machineLearning/machineLearningService"; +import { getSimilarityTransformation } from "similarity-transformation"; +import { + Matrix as TransformationMatrix, + applyToPoint, + compose, + scale, + translate, +} from "transformation-matrix"; +import type { EnteFile } from "types/file"; +import { saveFaceCrop } from "./crop"; +import { fetchImageBitmap, getLocalFileImageBitmap } from "./file"; +import { + clamp, + grayscaleIntMatrixFromNormalized2List, + pixelRGBBilinear, + warpAffineFloat32List, +} from "./image"; + +/** + * Index faces in the given file. + * + * This function is the entry point to the indexing pipeline. The file goes + * through various stages: + * + * 1. Downloading the original if needed. + * 2. Detect faces using ONNX/YOLO + * 3. Align the face rectangles, compute blur. + * 4. Compute embeddings for the detected face (crops). + * + * Once all of it is done, it returns the face rectangles and embeddings so that + * they can be saved locally for offline use, and encrypts and uploads them to + * the user's remote storage so that their other devices can download them + * instead of needing to reindex. + */ +export const indexFaces = async (enteFile: EnteFile, localFile?: File) => { + const startTime = Date.now(); + + const imageBitmap = await fetchOrCreateImageBitmap(enteFile, localFile); + let mlFile: MlFileData; + try { + mlFile = await indexFaces_(enteFile, imageBitmap); + } finally { + imageBitmap.close(); + } + + log.debug(() => { + const nf = mlFile.faces?.length ?? 0; + const ms = Date.now() - startTime; + return `Indexed ${nf} faces in file ${enteFile.id} (${ms} ms)`; + }); + return mlFile; +}; + +/** + * Return a {@link ImageBitmap}, using {@link localFile} if present otherwise + * downloading the source image corresponding to {@link enteFile} from remote. + */ +const fetchOrCreateImageBitmap = async ( + enteFile: EnteFile, + localFile: File, +) => { + const fileType = enteFile.metadata.fileType; + if (localFile) { + // TODO-ML(MR): Could also be image part of live photo? + if (fileType !== FILE_TYPE.IMAGE) + throw new Error("Local file of only image type is supported"); + + return await getLocalFileImageBitmap(enteFile, localFile); + } else if ([FILE_TYPE.IMAGE, FILE_TYPE.LIVE_PHOTO].includes(fileType)) { + return await fetchImageBitmap(enteFile); + } else { + throw new Error(`Cannot index unsupported file type ${fileType}`); + } +}; + +const indexFaces_ = async (enteFile: EnteFile, imageBitmap: ImageBitmap) => { + const fileID = enteFile.id; + const { width, height } = imageBitmap; + const imageDimensions = { width, height }; + const mlFile: MlFileData = { + fileId: fileID, + mlVersion: defaultMLVersion, + imageDimensions, + errorCount: 0, + }; + + const faceDetections = await detectFaces(imageBitmap); + const detectedFaces = faceDetections.map((detection) => ({ + id: makeFaceID(fileID, detection, imageDimensions), + fileId: fileID, + detection, + })); + mlFile.faces = detectedFaces; + + if (detectedFaces.length > 0) { + const alignments: FaceAlignment[] = []; + + for (const face of mlFile.faces) { + const alignment = faceAlignment(face.detection); + face.alignment = alignment; + alignments.push(alignment); + + await saveFaceCrop(imageBitmap, face); + } + + const alignedFacesData = convertToMobileFaceNetInput( + imageBitmap, + alignments, + ); + + const blurValues = detectBlur(alignedFacesData, mlFile.faces); + mlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i])); + + const embeddings = await computeEmbeddings(alignedFacesData); + mlFile.faces.forEach((f, i) => (f.embedding = embeddings[i])); + + mlFile.faces.forEach((face) => { + face.detection = relativeDetection(face.detection, imageDimensions); + }); + } + + return mlFile; +}; + +/** + * Detect faces in the given {@link imageBitmap}. + * + * The model used is YOLOv5Face, running in an ONNX runtime. + */ +const detectFaces = async ( + imageBitmap: ImageBitmap, +): Promise => { + const rect = ({ width, height }) => ({ x: 0, y: 0, width, height }); + + const { yoloInput, yoloSize } = + convertToYOLOInputFloat32ChannelsFirst(imageBitmap); + const yoloOutput = await workerBridge.detectFaces(yoloInput); + const faces = filterExtractDetectionsFromYOLOOutput(yoloOutput); + const faceDetections = transformFaceDetections( + faces, + rect(yoloSize), + rect(imageBitmap), + ); + + return naiveNonMaxSuppression(faceDetections, 0.4); +}; + +/** + * Convert {@link imageBitmap} into the format that the YOLO face detection + * model expects. + */ +const convertToYOLOInputFloat32ChannelsFirst = (imageBitmap: ImageBitmap) => { + const requiredWidth = 640; + const requiredHeight = 640; + + const { width, height } = imageBitmap; + + // Create an OffscreenCanvas and set its size. + const offscreenCanvas = new OffscreenCanvas(width, height); + const ctx = offscreenCanvas.getContext("2d"); + ctx.drawImage(imageBitmap, 0, 0, width, height); + const imageData = ctx.getImageData(0, 0, width, height); + const pixelData = imageData.data; + + // Maintain aspect ratio. + const scale = Math.min(requiredWidth / width, requiredHeight / height); + + const scaledWidth = clamp(Math.round(width * scale), 0, requiredWidth); + const scaledHeight = clamp(Math.round(height * scale), 0, requiredHeight); + + const yoloInput = new Float32Array(1 * 3 * requiredWidth * requiredHeight); + const yoloSize = { width: scaledWidth, height: scaledHeight }; + + // Populate the Float32Array with normalized pixel values. + let pi = 0; + const channelOffsetGreen = requiredHeight * requiredWidth; + const channelOffsetBlue = 2 * requiredHeight * requiredWidth; + for (let h = 0; h < requiredHeight; h++) { + for (let w = 0; w < requiredWidth; w++) { + const { r, g, b } = + w >= scaledWidth || h >= scaledHeight + ? { r: 114, g: 114, b: 114 } + : pixelRGBBilinear( + w / scale, + h / scale, + pixelData, + width, + height, + ); + yoloInput[pi] = r / 255.0; + yoloInput[pi + channelOffsetGreen] = g / 255.0; + yoloInput[pi + channelOffsetBlue] = b / 255.0; + pi++; + } + } + + return { yoloInput, yoloSize }; +}; + +/** + * Extract detected faces from the YOLOv5Face's output. + * + * Only detections that exceed a minimum score are returned. + * + * @param rows A Float32Array of shape [25200, 16], where each row represents a + * face detection. + * + * YOLO detects a fixed number of faces, 25200, always from the input it is + * given. Each detection is a "row" of 16 bytes, containing the bounding box, + * score, and landmarks of the detection. + * + * We prune out detections with a score lower than our threshold. However, we + * will still be left with some overlapping detections of the same face: these + * we will deduplicate in {@link removeDuplicateDetections}. + */ +const filterExtractDetectionsFromYOLOOutput = ( + rows: Float32Array, +): FaceDetection[] => { + const faces: FaceDetection[] = []; + // Iterate over each row. + for (let i = 0; i < rows.length; i += 16) { + const score = rows[i + 4]; + if (score < 0.7) continue; + + const xCenter = rows[i]; + const yCenter = rows[i + 1]; + const width = rows[i + 2]; + const height = rows[i + 3]; + const x = xCenter - width / 2.0; // topLeft + const y = yCenter - height / 2.0; // topLeft + + const leftEyeX = rows[i + 5]; + const leftEyeY = rows[i + 6]; + const rightEyeX = rows[i + 7]; + const rightEyeY = rows[i + 8]; + const noseX = rows[i + 9]; + const noseY = rows[i + 10]; + const leftMouthX = rows[i + 11]; + const leftMouthY = rows[i + 12]; + const rightMouthX = rows[i + 13]; + const rightMouthY = rows[i + 14]; + + const box = { x, y, width, height }; + const probability = score as number; + const landmarks = [ + { x: leftEyeX, y: leftEyeY }, + { x: rightEyeX, y: rightEyeY }, + { x: noseX, y: noseY }, + { x: leftMouthX, y: leftMouthY }, + { x: rightMouthX, y: rightMouthY }, + ]; + faces.push({ box, landmarks, probability }); + } + return faces; +}; + +/** + * Transform the given {@link faceDetections} from their coordinate system in + * which they were detected ({@link inBox}) back to the coordinate system of the + * original image ({@link toBox}). + */ +const transformFaceDetections = ( + faceDetections: FaceDetection[], + inBox: Box, + toBox: Box, +): FaceDetection[] => { + const transform = boxTransformationMatrix(inBox, toBox); + return faceDetections.map((f) => ({ + box: transformBox(f.box, transform), + landmarks: f.landmarks.map((p) => applyToPoint(transform, p)), + probability: f.probability, + })); +}; + +const boxTransformationMatrix = ( + inBox: Box, + toBox: Box, +): TransformationMatrix => + compose( + translate(toBox.x, toBox.y), + scale(toBox.width / inBox.width, toBox.height / inBox.height), + ); + +const transformBox = (box: Box, transform: TransformationMatrix): Box => { + const topLeft = applyToPoint(transform, { x: box.x, y: box.y }); + const bottomRight = applyToPoint(transform, { + x: box.x + box.width, + y: box.y + box.height, + }); + + return { + x: topLeft.x, + y: topLeft.y, + width: bottomRight.x - topLeft.x, + height: bottomRight.y - topLeft.y, + }; +}; + +/** + * Remove overlapping faces from an array of face detections through non-maximum + * suppression algorithm. + * + * This function sorts the detections by their probability in descending order, + * then iterates over them. + * + * For each detection, it calculates the Intersection over Union (IoU) with all + * other detections. + * + * If the IoU is greater than or equal to the specified threshold + * (`iouThreshold`), the other detection is considered overlapping and is + * removed. + * + * @param detections - An array of face detections to remove overlapping faces + * from. + * + * @param iouThreshold - The minimum IoU between two detections for them to be + * considered overlapping. + * + * @returns An array of face detections with overlapping faces removed + */ +const naiveNonMaxSuppression = ( + detections: FaceDetection[], + iouThreshold: number, +): FaceDetection[] => { + // Sort the detections by score, the highest first. + detections.sort((a, b) => b.probability - a.probability); + + // Loop through the detections and calculate the IOU. + for (let i = 0; i < detections.length - 1; i++) { + for (let j = i + 1; j < detections.length; j++) { + const iou = intersectionOverUnion(detections[i], detections[j]); + if (iou >= iouThreshold) { + detections.splice(j, 1); + j--; + } + } + } + + return detections; +}; + +const intersectionOverUnion = (a: FaceDetection, b: FaceDetection): number => { + const intersectionMinX = Math.max(a.box.x, b.box.x); + const intersectionMinY = Math.max(a.box.y, b.box.y); + const intersectionMaxX = Math.min( + a.box.x + a.box.width, + b.box.x + b.box.width, + ); + const intersectionMaxY = Math.min( + a.box.y + a.box.height, + b.box.y + b.box.height, + ); + + const intersectionWidth = intersectionMaxX - intersectionMinX; + const intersectionHeight = intersectionMaxY - intersectionMinY; + + if (intersectionWidth < 0 || intersectionHeight < 0) { + return 0.0; // If boxes do not overlap, IoU is 0 + } + + const areaA = a.box.width * a.box.height; + const areaB = b.box.width * b.box.height; + + const intersectionArea = intersectionWidth * intersectionHeight; + const unionArea = areaA + areaB - intersectionArea; + + return intersectionArea / unionArea; +}; + +const makeFaceID = ( + fileID: number, + { box }: FaceDetection, + image: Dimensions, +) => { + const part = (v: number) => clamp(v, 0.0, 0.999999).toFixed(5).substring(2); + const xMin = part(box.x / image.width); + const yMin = part(box.y / image.height); + const xMax = part((box.x + box.width) / image.width); + const yMax = part((box.y + box.height) / image.height); + return [`${fileID}`, xMin, yMin, xMax, yMax].join("_"); +}; + +/** + * Compute and return an {@link FaceAlignment} for the given face detection. + * + * @param faceDetection A geometry indicating a face detected in an image. + */ +const faceAlignment = (faceDetection: FaceDetection): FaceAlignment => + faceAlignmentUsingSimilarityTransform( + faceDetection, + normalizeLandmarks(idealMobileFaceNetLandmarks, mobileFaceNetFaceSize), + ); + +/** + * The ideal location of the landmarks (eye etc) that the MobileFaceNet + * embedding model expects. + */ +const idealMobileFaceNetLandmarks: [number, number][] = [ + [38.2946, 51.6963], + [73.5318, 51.5014], + [56.0252, 71.7366], + [41.5493, 92.3655], + [70.7299, 92.2041], +]; + +const normalizeLandmarks = ( + landmarks: [number, number][], + faceSize: number, +): [number, number][] => + landmarks.map(([x, y]) => [x / faceSize, y / faceSize]); + +const faceAlignmentUsingSimilarityTransform = ( + faceDetection: FaceDetection, + alignedLandmarks: [number, number][], +): FaceAlignment => { + const landmarksMat = new Matrix( + faceDetection.landmarks + .map((p) => [p.x, p.y]) + .slice(0, alignedLandmarks.length), + ).transpose(); + const alignedLandmarksMat = new Matrix(alignedLandmarks).transpose(); + + const simTransform = getSimilarityTransformation( + landmarksMat, + alignedLandmarksMat, + ); + + const RS = Matrix.mul(simTransform.rotation, simTransform.scale); + const TR = simTransform.translation; + + const affineMatrix = [ + [RS.get(0, 0), RS.get(0, 1), TR.get(0, 0)], + [RS.get(1, 0), RS.get(1, 1), TR.get(1, 0)], + [0, 0, 1], + ]; + + const size = 1 / simTransform.scale; + const meanTranslation = simTransform.toMean.sub(0.5).mul(size); + const centerMat = simTransform.fromMean.sub(meanTranslation); + const center = { x: centerMat.get(0, 0), y: centerMat.get(1, 0) }; + + const boundingBox = { + x: center.x - size / 2, + y: center.y - size / 2, + width: size, + height: size, + }; + + return { affineMatrix, boundingBox }; +}; + +const convertToMobileFaceNetInput = ( + imageBitmap: ImageBitmap, + faceAlignments: FaceAlignment[], +): Float32Array => { + const faceSize = mobileFaceNetFaceSize; + const faceData = new Float32Array( + faceAlignments.length * faceSize * faceSize * 3, + ); + for (let i = 0; i < faceAlignments.length; i++) { + const { affineMatrix } = faceAlignments[i]; + const faceDataOffset = i * faceSize * faceSize * 3; + warpAffineFloat32List( + imageBitmap, + affineMatrix, + faceSize, + faceData, + faceDataOffset, + ); + } + return faceData; +}; + +/** + * Laplacian blur detection. + * + * Return an array of detected blur values, one for each face in {@link faces}. + * The face data is taken from the slice of {@link alignedFacesData} + * corresponding to each face of {@link faces}. + */ +const detectBlur = (alignedFacesData: Float32Array, faces: Face[]): number[] => + faces.map((face, i) => { + const faceImage = grayscaleIntMatrixFromNormalized2List( + alignedFacesData, + i, + mobileFaceNetFaceSize, + mobileFaceNetFaceSize, + ); + return matrixVariance(applyLaplacian(faceImage, faceDirection(face))); + }); + +type FaceDirection = "left" | "right" | "straight"; + +const faceDirection = (face: Face): FaceDirection => { + const landmarks = face.detection.landmarks; + const leftEye = landmarks[0]; + const rightEye = landmarks[1]; + const nose = landmarks[2]; + const leftMouth = landmarks[3]; + const rightMouth = landmarks[4]; + + const eyeDistanceX = Math.abs(rightEye.x - leftEye.x); + const eyeDistanceY = Math.abs(rightEye.y - leftEye.y); + const mouthDistanceY = Math.abs(rightMouth.y - leftMouth.y); + + const faceIsUpright = + Math.max(leftEye.y, rightEye.y) + 0.5 * eyeDistanceY < nose.y && + nose.y + 0.5 * mouthDistanceY < Math.min(leftMouth.y, rightMouth.y); + + const noseStickingOutLeft = + nose.x < Math.min(leftEye.x, rightEye.x) && + nose.x < Math.min(leftMouth.x, rightMouth.x); + + const noseStickingOutRight = + nose.x > Math.max(leftEye.x, rightEye.x) && + nose.x > Math.max(leftMouth.x, rightMouth.x); + + const noseCloseToLeftEye = + Math.abs(nose.x - leftEye.x) < 0.2 * eyeDistanceX; + const noseCloseToRightEye = + Math.abs(nose.x - rightEye.x) < 0.2 * eyeDistanceX; + + if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { + return "left"; + } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { + return "right"; + } + + return "straight"; +}; + +/** + * Return a new image by applying a Laplacian blur kernel to each pixel. + */ +const applyLaplacian = ( + image: number[][], + direction: FaceDirection, +): number[][] => { + const paddedImage = padImage(image, direction); + const numRows = paddedImage.length - 2; + const numCols = paddedImage[0].length - 2; + + // Create an output image initialized to 0. + const outputImage: number[][] = Array.from({ length: numRows }, () => + new Array(numCols).fill(0), + ); + + // Define the Laplacian kernel. + const kernel = [ + [0, 1, 0], + [1, -4, 1], + [0, 1, 0], + ]; + + // Apply the kernel to each pixel + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < numCols; j++) { + let sum = 0; + for (let ki = 0; ki < 3; ki++) { + for (let kj = 0; kj < 3; kj++) { + sum += paddedImage[i + ki][j + kj] * kernel[ki][kj]; + } + } + // Adjust the output value if necessary (e.g., clipping). + outputImage[i][j] = sum; + } + } + + return outputImage; +}; + +const padImage = (image: number[][], direction: FaceDirection): number[][] => { + const removeSideColumns = 56; /* must be even */ + + const numRows = image.length; + const numCols = image[0].length; + const paddedNumCols = numCols + 2 - removeSideColumns; + const paddedNumRows = numRows + 2; + + // Create a new matrix with extra padding. + const paddedImage: number[][] = Array.from({ length: paddedNumRows }, () => + new Array(paddedNumCols).fill(0), + ); + + if (direction === "straight") { + // Copy original image into the center of the padded image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = + image[i][j + Math.round(removeSideColumns / 2)]; + } + } + } else if (direction === "left") { + // If the face is facing left, we only take the right side of the face + // image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; + } + } + } else if (direction === "right") { + // If the face is facing right, we only take the left side of the face + // image. + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j]; + } + } + } + + // Reflect padding + // - Top and bottom rows + for (let j = 1; j <= paddedNumCols - 2; j++) { + // Top row + paddedImage[0][j] = paddedImage[2][j]; + // Bottom row + paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; + } + // - Left and right columns + for (let i = 0; i < numRows + 2; i++) { + // Left column + paddedImage[i][0] = paddedImage[i][2]; + // Right column + paddedImage[i][paddedNumCols - 1] = paddedImage[i][paddedNumCols - 3]; + } + + return paddedImage; +}; + +const matrixVariance = (matrix: number[][]): number => { + const numRows = matrix.length; + const numCols = matrix[0].length; + const totalElements = numRows * numCols; + + // Calculate the mean. + let mean: number = 0; + matrix.forEach((row) => { + row.forEach((value) => { + mean += value; + }); + }); + mean /= totalElements; + + // Calculate the variance. + let variance: number = 0; + matrix.forEach((row) => { + row.forEach((value) => { + const diff: number = value - mean; + variance += diff * diff; + }); + }); + variance /= totalElements; + + return variance; +}; + +const mobileFaceNetFaceSize = 112; +const mobileFaceNetEmbeddingSize = 192; + +/** + * Compute embeddings for the given {@link faceData}. + * + * The model used is MobileFaceNet, running in an ONNX runtime. + */ +const computeEmbeddings = async ( + faceData: Float32Array, +): Promise => { + const outputData = await workerBridge.computeFaceEmbeddings(faceData); + + const embeddingSize = mobileFaceNetEmbeddingSize; + const embeddings = new Array( + outputData.length / embeddingSize, + ); + for (let i = 0; i < embeddings.length; i++) { + embeddings[i] = new Float32Array( + outputData.slice(i * embeddingSize, (i + 1) * embeddingSize), + ); + } + return embeddings; +}; + +/** + * Convert the coordinates to between 0-1, normalized by the image's dimensions. + */ +const relativeDetection = ( + faceDetection: FaceDetection, + { width, height }: Dimensions, +): FaceDetection => { + const oldBox: Box = faceDetection.box; + const box = { + x: oldBox.x / width, + y: oldBox.y / height, + width: oldBox.width / width, + height: oldBox.height / height, + }; + const landmarks = faceDetection.landmarks.map((l) => ({ + x: l.x / width, + y: l.y / height, + })); + const probability = faceDetection.probability; + return { box, landmarks, probability }; +}; diff --git a/web/apps/photos/src/services/face/face.worker.ts b/web/apps/photos/src/services/face/face.worker.ts new file mode 100644 index 0000000000..0ba2233e70 --- /dev/null +++ b/web/apps/photos/src/services/face/face.worker.ts @@ -0,0 +1,28 @@ +import { APPS } from "@ente/shared/apps/constants"; +import { expose } from "comlink"; +import downloadManager from "services/download"; +import mlService from "services/machineLearning/machineLearningService"; +import { EnteFile } from "types/file"; + +export class DedicatedMLWorker { + public async closeLocalSyncContext() { + return mlService.closeLocalSyncContext(); + } + + public async syncLocalFile( + token: string, + userID: number, + userAgent: string, + enteFile: EnteFile, + localFile: globalThis.File, + ) { + mlService.syncLocalFile(token, userID, userAgent, enteFile, localFile); + } + + public async sync(token: string, userID: number, userAgent: string) { + await downloadManager.init(APPS.PHOTOS, { token }); + return mlService.sync(token, userID, userAgent); + } +} + +expose(DedicatedMLWorker, self); diff --git a/web/apps/photos/src/services/face/file.ts b/web/apps/photos/src/services/face/file.ts new file mode 100644 index 0000000000..b482af3fb5 --- /dev/null +++ b/web/apps/photos/src/services/face/file.ts @@ -0,0 +1,37 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; +import DownloadManager from "services/download"; +import { getLocalFiles } from "services/fileService"; +import { EnteFile } from "types/file"; +import { getRenderableImage } from "utils/file"; + +export async function getLocalFile(fileId: number) { + const localFiles = await getLocalFiles(); + return localFiles.find((f) => f.id === fileId); +} + +export const fetchImageBitmap = async (file: EnteFile) => + fetchRenderableBlob(file).then(createImageBitmap); + +async function fetchRenderableBlob(file: EnteFile) { + const fileStream = await DownloadManager.getFile(file); + const fileBlob = await new Response(fileStream).blob(); + if (file.metadata.fileType === FILE_TYPE.IMAGE) { + return await getRenderableImage(file.metadata.title, fileBlob); + } else { + const { imageFileName, imageData } = await decodeLivePhoto( + file.metadata.title, + fileBlob, + ); + return await getRenderableImage(imageFileName, new Blob([imageData])); + } +} + +export async function getLocalFileImageBitmap( + enteFile: EnteFile, + localFile: globalThis.File, +) { + let fileBlob = localFile as Blob; + fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob); + return createImageBitmap(fileBlob); +} diff --git a/web/apps/photos/src/services/face/image.ts b/web/apps/photos/src/services/face/image.ts new file mode 100644 index 0000000000..12f49db541 --- /dev/null +++ b/web/apps/photos/src/services/face/image.ts @@ -0,0 +1,295 @@ +import { Matrix, inverse } from "ml-matrix"; + +/** + * Clamp {@link value} to between {@link min} and {@link max}, inclusive. + */ +export const clamp = (value: number, min: number, max: number) => + Math.min(max, Math.max(min, value)); + +/** + * Returns the pixel value (RGB) at the given coordinates ({@link fx}, + * {@link fy}) using bilinear interpolation. + */ +export function pixelRGBBilinear( + fx: number, + fy: number, + imageData: Uint8ClampedArray, + imageWidth: number, + imageHeight: number, +) { + // Clamp to image boundaries. + fx = clamp(fx, 0, imageWidth - 1); + fy = clamp(fy, 0, imageHeight - 1); + + // Get the surrounding coordinates and their weights. + const x0 = Math.floor(fx); + const x1 = Math.ceil(fx); + const y0 = Math.floor(fy); + const y1 = Math.ceil(fy); + const dx = fx - x0; + const dy = fy - y0; + const dx1 = 1.0 - dx; + const dy1 = 1.0 - dy; + + // Get the original pixels. + const pixel1 = pixelRGBA(imageData, imageWidth, imageHeight, x0, y0); + const pixel2 = pixelRGBA(imageData, imageWidth, imageHeight, x1, y0); + const pixel3 = pixelRGBA(imageData, imageWidth, imageHeight, x0, y1); + const pixel4 = pixelRGBA(imageData, imageWidth, imageHeight, x1, y1); + + const bilinear = (val1: number, val2: number, val3: number, val4: number) => + Math.round( + val1 * dx1 * dy1 + + val2 * dx * dy1 + + val3 * dx1 * dy + + val4 * dx * dy, + ); + + // Return interpolated pixel colors. + return { + r: bilinear(pixel1.r, pixel2.r, pixel3.r, pixel4.r), + g: bilinear(pixel1.g, pixel2.g, pixel3.g, pixel4.g), + b: bilinear(pixel1.b, pixel2.b, pixel3.b, pixel4.b), + }; +} + +const pixelRGBA = ( + imageData: Uint8ClampedArray, + width: number, + height: number, + x: number, + y: number, +) => { + if (x < 0 || x >= width || y < 0 || y >= height) { + return { r: 0, g: 0, b: 0, a: 0 }; + } + const index = (y * width + x) * 4; + return { + r: imageData[index], + g: imageData[index + 1], + b: imageData[index + 2], + a: imageData[index + 3], + }; +}; + +/** + * Returns the pixel value (RGB) at the given coordinates ({@link fx}, + * {@link fy}) using bicubic interpolation. + */ +const pixelRGBBicubic = ( + fx: number, + fy: number, + imageData: Uint8ClampedArray, + imageWidth: number, + imageHeight: number, +) => { + // Clamp to image boundaries. + fx = clamp(fx, 0, imageWidth - 1); + fy = clamp(fy, 0, imageHeight - 1); + + const x = Math.trunc(fx) - (fx >= 0.0 ? 0 : 1); + const px = x - 1; + const nx = x + 1; + const ax = x + 2; + const y = Math.trunc(fy) - (fy >= 0.0 ? 0 : 1); + const py = y - 1; + const ny = y + 1; + const ay = y + 2; + const dx = fx - x; + const dy = fy - y; + + const cubic = ( + dx: number, + ipp: number, + icp: number, + inp: number, + iap: number, + ) => + icp + + 0.5 * + (dx * (-ipp + inp) + + dx * dx * (2 * ipp - 5 * icp + 4 * inp - iap) + + dx * dx * dx * (-ipp + 3 * icp - 3 * inp + iap)); + + const icc = pixelRGBA(imageData, imageWidth, imageHeight, x, y); + + const ipp = + px < 0 || py < 0 + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, px, py); + const icp = + px < 0 ? icc : pixelRGBA(imageData, imageWidth, imageHeight, x, py); + const inp = + py < 0 || nx >= imageWidth + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, nx, py); + const iap = + ax >= imageWidth || py < 0 + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, ax, py); + + const ip0 = cubic(dx, ipp.r, icp.r, inp.r, iap.r); + const ip1 = cubic(dx, ipp.g, icp.g, inp.g, iap.g); + const ip2 = cubic(dx, ipp.b, icp.b, inp.b, iap.b); + // const ip3 = cubic(dx, ipp.a, icp.a, inp.a, iap.a); + + const ipc = + px < 0 ? icc : pixelRGBA(imageData, imageWidth, imageHeight, px, y); + const inc = + nx >= imageWidth + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, nx, y); + const iac = + ax >= imageWidth + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, ax, y); + + const ic0 = cubic(dx, ipc.r, icc.r, inc.r, iac.r); + const ic1 = cubic(dx, ipc.g, icc.g, inc.g, iac.g); + const ic2 = cubic(dx, ipc.b, icc.b, inc.b, iac.b); + // const ic3 = cubic(dx, ipc.a, icc.a, inc.a, iac.a); + + const ipn = + px < 0 || ny >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, px, ny); + const icn = + ny >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, x, ny); + const inn = + nx >= imageWidth || ny >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, nx, ny); + const ian = + ax >= imageWidth || ny >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, ax, ny); + + const in0 = cubic(dx, ipn.r, icn.r, inn.r, ian.r); + const in1 = cubic(dx, ipn.g, icn.g, inn.g, ian.g); + const in2 = cubic(dx, ipn.b, icn.b, inn.b, ian.b); + // const in3 = cubic(dx, ipn.a, icn.a, inn.a, ian.a); + + const ipa = + px < 0 || ay >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, px, ay); + const ica = + ay >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, x, ay); + const ina = + nx >= imageWidth || ay >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, nx, ay); + const iaa = + ax >= imageWidth || ay >= imageHeight + ? icc + : pixelRGBA(imageData, imageWidth, imageHeight, ax, ay); + + const ia0 = cubic(dx, ipa.r, ica.r, ina.r, iaa.r); + const ia1 = cubic(dx, ipa.g, ica.g, ina.g, iaa.g); + const ia2 = cubic(dx, ipa.b, ica.b, ina.b, iaa.b); + // const ia3 = cubic(dx, ipa.a, ica.a, ina.a, iaa.a); + + const c0 = Math.trunc(clamp(cubic(dy, ip0, ic0, in0, ia0), 0, 255)); + const c1 = Math.trunc(clamp(cubic(dy, ip1, ic1, in1, ia1), 0, 255)); + const c2 = Math.trunc(clamp(cubic(dy, ip2, ic2, in2, ia2), 0, 255)); + // const c3 = cubic(dy, ip3, ic3, in3, ia3); + + return { r: c0, g: c1, b: c2 }; +}; + +/** + * Transform {@link inputData} starting at {@link inputStartIndex}. + */ +export const warpAffineFloat32List = ( + imageBitmap: ImageBitmap, + faceAlignmentAffineMatrix: number[][], + faceSize: number, + inputData: Float32Array, + inputStartIndex: number, +): void => { + const { width, height } = imageBitmap; + + // Get the pixel data. + const offscreenCanvas = new OffscreenCanvas(width, height); + const ctx = offscreenCanvas.getContext("2d"); + ctx.drawImage(imageBitmap, 0, 0, width, height); + const imageData = ctx.getImageData(0, 0, width, height); + const pixelData = imageData.data; + + const transformationMatrix = faceAlignmentAffineMatrix.map((row) => + row.map((val) => (val != 1.0 ? val * faceSize : 1.0)), + ); // 3x3 + + const A: Matrix = new Matrix([ + [transformationMatrix[0][0], transformationMatrix[0][1]], + [transformationMatrix[1][0], transformationMatrix[1][1]], + ]); + const Ainverse = inverse(A); + + const b00 = transformationMatrix[0][2]; + const b10 = transformationMatrix[1][2]; + const a00Prime = Ainverse.get(0, 0); + const a01Prime = Ainverse.get(0, 1); + const a10Prime = Ainverse.get(1, 0); + const a11Prime = Ainverse.get(1, 1); + + for (let yTrans = 0; yTrans < faceSize; ++yTrans) { + for (let xTrans = 0; xTrans < faceSize; ++xTrans) { + // Perform inverse affine transformation. + const xOrigin = + a00Prime * (xTrans - b00) + a01Prime * (yTrans - b10); + const yOrigin = + a10Prime * (xTrans - b00) + a11Prime * (yTrans - b10); + + // Get the pixel RGB using bicubic interpolation. + const { r, g, b } = pixelRGBBicubic( + xOrigin, + yOrigin, + pixelData, + width, + height, + ); + + // Set the pixel in the input data. + const index = (yTrans * faceSize + xTrans) * 3; + inputData[inputStartIndex + index] = rgbToBipolarFloat(r); + inputData[inputStartIndex + index + 1] = rgbToBipolarFloat(g); + inputData[inputStartIndex + index + 2] = rgbToBipolarFloat(b); + } + } +}; + +/** Convert a RGB component 0-255 to a floating point value between -1 and 1. */ +const rgbToBipolarFloat = (pixelValue: number) => pixelValue / 127.5 - 1.0; + +/** Convert a floating point value between -1 and 1 to a RGB component 0-255. */ +const bipolarFloatToRGB = (pixelValue: number) => + clamp(Math.round((pixelValue + 1.0) * 127.5), 0, 255); + +export const grayscaleIntMatrixFromNormalized2List = ( + imageList: Float32Array, + faceNumber: number, + width: number, + height: number, +): number[][] => { + const startIndex = faceNumber * width * height * 3; + return Array.from({ length: height }, (_, y) => + Array.from({ length: width }, (_, x) => { + // 0.299 ∙ Red + 0.587 ∙ Green + 0.114 ∙ Blue + const pixelIndex = startIndex + 3 * (y * width + x); + return clamp( + Math.round( + 0.299 * bipolarFloatToRGB(imageList[pixelIndex]) + + 0.587 * bipolarFloatToRGB(imageList[pixelIndex + 1]) + + 0.114 * bipolarFloatToRGB(imageList[pixelIndex + 2]), + ), + 0, + 255, + ); + }), + ); +}; diff --git a/web/apps/photos/src/services/face/index.ts b/web/apps/photos/src/services/face/index.ts new file mode 100644 index 0000000000..86fa9ab20b --- /dev/null +++ b/web/apps/photos/src/services/face/index.ts @@ -0,0 +1,8 @@ +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import type { DedicatedMLWorker } from "services/face/face.worker"; + +const createFaceWebWorker = () => + new Worker(new URL("face.worker.ts", import.meta.url)); + +export const createFaceComlinkWorker = (name: string) => + new ComlinkWorker(name, createFaceWebWorker()); diff --git a/web/apps/photos/src/services/face/people.ts b/web/apps/photos/src/services/face/people.ts new file mode 100644 index 0000000000..d118cb4f90 --- /dev/null +++ b/web/apps/photos/src/services/face/people.ts @@ -0,0 +1,130 @@ +export interface Person { + id: number; + name?: string; + files: Array; + displayFaceId?: string; +} + +// TODO-ML(MR): Forced disable clustering. It doesn't currently work, +// need to finalize it before we move out of beta. +// +// > Error: Failed to execute 'transferToImageBitmap' on +// > 'OffscreenCanvas': ImageBitmap construction failed + +/* +export const syncPeopleIndex = async () => { + + if ( + syncContext.outOfSyncFiles.length <= 0 || + (syncContext.nSyncedFiles === batchSize && Math.random() < 0) + ) { + await this.syncIndex(syncContext); + } + + public async syncIndex(syncContext: MLSyncContext) { + await this.getMLLibraryData(syncContext); + + await syncPeopleIndex(syncContext); + + await this.persistMLLibraryData(syncContext); + } + + const filesVersion = await mlIDbStorage.getIndexVersion("files"); + if (filesVersion <= (await mlIDbStorage.getIndexVersion("people"))) { + return; + } + + + // TODO: have faces addresable through fileId + faceId + // to avoid index based addressing, which is prone to wrong results + // one way could be to match nearest face within threshold in the file + + const allFacesMap = + syncContext.allSyncedFacesMap ?? + (syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap()); + + + // await this.init(); + + const allFacesMap = await mlIDbStorage.getAllFacesMap(); + const allFaces = [...allFacesMap.values()].flat(); + + if (!allFaces || allFaces.length < 50) { + log.info( + `Skipping clustering since number of faces (${allFaces.length}) is less than the clustering threshold (50)`, + ); + return; + } + + log.info("Running clustering allFaces: ", allFaces.length); + const faceClusteringResults = await clusterFaces( + allFaces.map((f) => Array.from(f.embedding)), + ); + log.info( + "[MLService] Got face clustering results: ", + JSON.stringify(faceClusteringResults), + ); + + const clusters = faceClusteringResults?.clusters; + if (!clusters || clusters.length < 1) { + return; + } + + for (const face of allFaces) { + face.personId = undefined; + } + await mlIDbStorage.clearAllPeople(); + for (const [index, cluster] of clusters.entries()) { + const faces = cluster.map((f) => allFaces[f]).filter((f) => f); + + // TODO: take default display face from last leaves of hdbscan clusters + const personFace = faces.reduce((best, face) => + face.detection.probability > best.detection.probability + ? face + : best, + ); + + + if (personFace && !personFace.crop?.cacheKey) { + const file = await getLocalFile(personFace.fileId); + const imageBitmap = await fetchImageBitmap(file); + await saveFaceCrop(imageBitmap, personFace); + } + + + const person: Person = { + id: index, + files: faces.map((f) => f.fileId), + displayFaceId: personFace?.id, + }; + + await mlIDbStorage.putPerson(person); + + faces.forEach((face) => { + face.personId = person.id; + }); + // log.info("Creating person: ", person, faces); + } + + await mlIDbStorage.updateFaces(allFacesMap); + + // await mlIDbStorage.setIndexVersion("people", filesVersion); +}; + + public async regenerateFaceCrop(token: string, faceID: string) { + await downloadManager.init(APPS.PHOTOS, { token }); + return mlService.regenerateFaceCrop(faceID); + } + +export const regenerateFaceCrop = async (faceID: string) => { + const fileID = Number(faceID.split("-")[0]); + const personFace = await mlIDbStorage.getFace(fileID, faceID); + if (!personFace) { + throw Error("Face not found"); + } + + const file = await getLocalFile(personFace.fileId); + const imageBitmap = await fetchImageBitmap(file); + return await saveFaceCrop(imageBitmap, personFace); +}; +*/ diff --git a/web/apps/photos/src/services/face/remote.ts b/web/apps/photos/src/services/face/remote.ts new file mode 100644 index 0000000000..3c64ca30cc --- /dev/null +++ b/web/apps/photos/src/services/face/remote.ts @@ -0,0 +1,148 @@ +import log from "@/next/log"; +import ComlinkCryptoWorker from "@ente/shared/crypto"; +import { putEmbedding } from "services/embeddingService"; +import type { EnteFile } from "types/file"; +import type { Face, FaceDetection, MlFileData, Point } from "./types"; + +export const putFaceEmbedding = async ( + enteFile: EnteFile, + mlFileData: MlFileData, + userAgent: string, +) => { + const serverMl = LocalFileMlDataToServerFileMl(mlFileData, userAgent); + log.debug(() => ({ t: "Local ML file data", mlFileData })); + log.debug(() => ({ + t: "Uploaded ML file data", + d: JSON.stringify(serverMl), + })); + + const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance(); + const { file: encryptedEmbeddingData } = + await comlinkCryptoWorker.encryptMetadata(serverMl, enteFile.key); + await putEmbedding({ + fileID: enteFile.id, + encryptedEmbedding: encryptedEmbeddingData.encryptedData, + decryptionHeader: encryptedEmbeddingData.decryptionHeader, + model: "file-ml-clip-face", + }); +}; + +export interface FileML extends ServerFileMl { + updatedAt: number; +} + +class ServerFileMl { + public fileID: number; + public height?: number; + public width?: number; + public faceEmbedding: ServerFaceEmbeddings; + + public constructor( + fileID: number, + faceEmbedding: ServerFaceEmbeddings, + height?: number, + width?: number, + ) { + this.fileID = fileID; + this.height = height; + this.width = width; + this.faceEmbedding = faceEmbedding; + } +} + +class ServerFaceEmbeddings { + public faces: ServerFace[]; + public version: number; + public client: string; + + public constructor(faces: ServerFace[], client: string, version: number) { + this.faces = faces; + this.client = client; + this.version = version; + } +} + +class ServerFace { + public faceID: string; + public embedding: number[]; + public detection: ServerDetection; + public score: number; + public blur: number; + + public constructor( + faceID: string, + embedding: number[], + detection: ServerDetection, + score: number, + blur: number, + ) { + this.faceID = faceID; + this.embedding = embedding; + this.detection = detection; + this.score = score; + this.blur = blur; + } +} + +class ServerDetection { + public box: ServerFaceBox; + public landmarks: Point[]; + + public constructor(box: ServerFaceBox, landmarks: Point[]) { + this.box = box; + this.landmarks = landmarks; + } +} + +class ServerFaceBox { + public x: number; + public y: number; + public width: number; + public height: number; + + public constructor(x: number, y: number, width: number, height: number) { + this.x = x; + this.y = y; + this.width = width; + this.height = height; + } +} + +function LocalFileMlDataToServerFileMl( + localFileMlData: MlFileData, + userAgent: string, +): ServerFileMl { + if (localFileMlData.errorCount > 0) { + return null; + } + const imageDimensions = localFileMlData.imageDimensions; + + const faces: ServerFace[] = []; + for (let i = 0; i < localFileMlData.faces.length; i++) { + const face: Face = localFileMlData.faces[i]; + const faceID = face.id; + const embedding = face.embedding; + const score = face.detection.probability; + const blur = face.blurValue; + const detection: FaceDetection = face.detection; + const box = detection.box; + const landmarks = detection.landmarks; + const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height); + + const newFaceObject = new ServerFace( + faceID, + Array.from(embedding), + new ServerDetection(newBox, landmarks), + score, + blur, + ); + faces.push(newFaceObject); + } + const faceEmbeddings = new ServerFaceEmbeddings(faces, userAgent, 1); + return new ServerFileMl( + localFileMlData.fileId, + faceEmbeddings, + imageDimensions.height, + imageDimensions.width, + ); +} diff --git a/web/apps/photos/src/services/face/types.ts b/web/apps/photos/src/services/face/types.ts new file mode 100644 index 0000000000..0b1b2f9757 --- /dev/null +++ b/web/apps/photos/src/services/face/types.ts @@ -0,0 +1,68 @@ +/** The x and y coordinates of a point. */ +export interface Point { + x: number; + y: number; +} + +/** The dimensions of something, say an image. */ +export interface Dimensions { + width: number; + height: number; +} + +/** A rectangle given by its top left coordinates and dimensions. */ +export interface Box { + /** The x coordinate of the the top left (xMin). */ + x: number; + /** The y coodinate of the top left (yMin). */ + y: number; + /** The width of the box. */ + width: number; + /** The height of the box. */ + height: number; +} + +export interface FaceDetection { + // box and landmarks is relative to image dimentions stored at mlFileData + box: Box; + landmarks?: Point[]; + probability?: number; +} + +export interface FaceAlignment { + /** + * An affine transformation matrix (rotation, translation, scaling) to align + * the face extracted from the image. + */ + affineMatrix: number[][]; + /** + * The bounding box of the transformed box. + * + * The affine transformation shifts the original detection box a new, + * transformed, box (possibily rotated). This property is the bounding box + * of that transformed box. It is in the coordinate system of the original, + * full, image on which the detection occurred. + */ + boundingBox: Box; +} + +export interface Face { + fileId: number; + detection: FaceDetection; + id: string; + + alignment?: FaceAlignment; + blurValue?: number; + + embedding?: Float32Array; + + personId?: number; +} + +export interface MlFileData { + fileId: number; + faces?: Face[]; + imageDimensions?: Dimensions; + mlVersion: number; + errorCount: number; +} diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts index 4dfdb3f641..85dd5db397 100644 --- a/web/apps/photos/src/services/ffmpeg.ts +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -9,6 +9,11 @@ import { } from "constants/ffmpeg"; import { NULL_LOCATION } from "constants/upload"; import type { ParsedExtractedMetadata } from "types/metadata"; +import { + readConvertToMP4Done, + readConvertToMP4Stream, + writeConvertToMP4Stream, +} from "utils/native-stream"; import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; import { toDataOrPathOrZipEntry, @@ -31,7 +36,7 @@ import { */ export const generateVideoThumbnailWeb = async (blob: Blob) => _generateVideoThumbnail((seekTime: number) => - ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg", 0), + ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg"), ); const _generateVideoThumbnail = async ( @@ -70,7 +75,6 @@ export const generateVideoThumbnailNative = async ( makeGenThumbnailCommand(seekTime), toDataOrPathOrZipEntry(desktopUploadItem), "jpeg", - 0, ), ); @@ -98,8 +102,8 @@ const makeGenThumbnailCommand = (seekTime: number) => [ * of videos that the user is uploading. * * @param uploadItem A {@link File}, or the absolute path to a file on the - * user's local filesytem. A path can only be provided when we're running in the - * context of our desktop app. + * user's local file sytem. A path can only be provided when we're running in + * the context of our desktop app. */ export const extractVideoMetadata = async ( uploadItem: UploadItem, @@ -107,12 +111,11 @@ export const extractVideoMetadata = async ( const command = extractVideoMetadataCommand; const outputData = uploadItem instanceof File - ? await ffmpegExecWeb(command, uploadItem, "txt", 0) + ? await ffmpegExecWeb(command, uploadItem, "txt") : await electron.ffmpegExec( command, toDataOrPathOrZipEntry(uploadItem), "txt", - 0, ); return parseFFmpegExtractedMetadata(outputData); @@ -219,10 +222,9 @@ const ffmpegExecWeb = async ( command: string[], blob: Blob, outputFileExtension: string, - timeoutMs: number, ) => { const worker = await workerFactory.lazy(); - return await worker.exec(command, blob, outputFileExtension, timeoutMs); + return await worker.exec(command, blob, outputFileExtension); }; /** @@ -234,61 +236,46 @@ const ffmpegExecWeb = async ( * * @param blob The video blob. * - * @returns The mp4 video data. + * @returns The mp4 video blob. */ -export const convertToMP4 = async (blob: Blob) => - ffmpegExecNativeOrWeb( - [ +export const convertToMP4 = async (blob: Blob): Promise => { + const electron = globalThis.electron; + if (electron) { + return convertToMP4Native(electron, blob); + } else { + const command = [ ffmpegPathPlaceholder, "-i", inputPathPlaceholder, "-preset", "ultrafast", outputPathPlaceholder, - ], - blob, - "mp4", - 30 * 1000, - ); + ]; + return ffmpegExecWeb(command, blob, "mp4"); + } +}; -/** - * Run the given FFmpeg command using a native FFmpeg binary when we're running - * in the context of our desktop app, otherwise using the browser based wasm - * FFmpeg implemenation. - * - * See also: {@link ffmpegExecWeb}. - */ -const ffmpegExecNativeOrWeb = async ( - command: string[], - blob: Blob, - outputFileExtension: string, - timeoutMs: number, -) => { - const electron = globalThis.electron; - if (electron) - return electron.ffmpegExec( - command, - new Uint8Array(await blob.arrayBuffer()), - outputFileExtension, - timeoutMs, - ); - else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs); +const convertToMP4Native = async (electron: Electron, blob: Blob) => { + const token = await writeConvertToMP4Stream(electron, blob); + const mp4Blob = await readConvertToMP4Stream(electron, token); + readConvertToMP4Done(electron, token); + return mp4Blob; }; /** Lazily create a singleton instance of our worker */ class WorkerFactory { private instance: Promise>; + private createComlinkWorker = () => + new ComlinkWorker( + "ffmpeg-worker", + new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), + ); + async lazy() { - if (!this.instance) this.instance = createComlinkWorker().remote; + if (!this.instance) this.instance = this.createComlinkWorker().remote; return this.instance; } } const workerFactory = new WorkerFactory(); - -const createComlinkWorker = () => - new ComlinkWorker( - "ffmpeg-worker", - new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), - ); diff --git a/web/apps/photos/src/services/heic-convert.ts b/web/apps/photos/src/services/heic-convert.ts index c2ea198391..d2e05d9ec9 100644 --- a/web/apps/photos/src/services/heic-convert.ts +++ b/web/apps/photos/src/services/heic-convert.ts @@ -1,9 +1,10 @@ +import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert"; +import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { CustomError } from "@ente/shared/error"; import { retryAsyncFunction } from "@ente/shared/utils"; import QueueProcessor from "@ente/shared/utils/queueProcessor"; -import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker"; /** * Convert a HEIC image to a JPEG. @@ -29,7 +30,7 @@ class HEICConverter { if (this.workerPool.length > 0) return; this.workerPool = []; for (let i = 0; i < WORKER_POOL_SIZE; i++) - this.workerPool.push(createComlinkWorker()); + this.workerPool.push(createHEICConvertComlinkWorker()); } async convert(fileBlob: Blob): Promise { @@ -50,9 +51,7 @@ class HEICConverter { const startTime = Date.now(); const convertedHEIC = await worker.heicToJPEG(fileBlob); - const ms = Math.round( - Date.now() - startTime, - ); + const ms = Date.now() - startTime; log.debug(() => `heic => jpeg (${ms} ms)`); clearTimeout(timeout); resolve(convertedHEIC); @@ -79,7 +78,7 @@ class HEICConverter { } catch (e) { log.error("HEIC conversion failed", e); convertWorker.terminate(); - this.workerPool.push(createComlinkWorker()); + this.workerPool.push(createHEICConvertComlinkWorker()); throw e; } }, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS), @@ -99,9 +98,3 @@ class HEICConverter { /** The singleton instance of {@link HEICConverter}. */ const converter = new HEICConverter(); - -const createComlinkWorker = () => - new ComlinkWorker( - "heic-convert-worker", - new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)), - ); diff --git a/web/apps/photos/src/services/logout.ts b/web/apps/photos/src/services/logout.ts new file mode 100644 index 0000000000..a6b155c8c2 --- /dev/null +++ b/web/apps/photos/src/services/logout.ts @@ -0,0 +1,50 @@ +import log from "@/next/log"; +import { accountLogout } from "@ente/accounts/services/logout"; +import { clipService } from "services/clip-service"; +import DownloadManager from "./download"; +import exportService from "./export"; +import mlWorkManager from "./machineLearning/mlWorkManager"; + +/** + * Logout sequence for the photos app. + * + * This function is guaranteed not to throw any errors. + * + * See: [Note: Do not throw during logout]. + */ +export const photosLogout = async () => { + await accountLogout(); + + try { + await DownloadManager.logout(); + } catch (e) { + log.error("Ignoring error during logout (download)", e); + } + + try { + await clipService.logout(); + } catch (e) { + log.error("Ignoring error during logout (CLIP)", e); + } + + const electron = globalThis.electron; + if (electron) { + try { + await mlWorkManager.logout(); + } catch (e) { + log.error("Ignoring error during logout (ML)", e); + } + + try { + exportService.disableContinuousExport(); + } catch (e) { + log.error("Ignoring error during logout (export)", e); + } + + try { + await electron?.logout(); + } catch (e) { + log.error("Ignoring error during logout (electron)", e); + } + } +}; diff --git a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts b/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts deleted file mode 100644 index 99063b3f25..0000000000 --- a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { - FaceAlignment, - FaceAlignmentMethod, - FaceAlignmentService, - FaceDetection, - Versioned, -} from "types/machineLearning"; -import { getArcfaceAlignment } from "utils/machineLearning/faceAlign"; - -class ArcfaceAlignmentService implements FaceAlignmentService { - public method: Versioned; - - constructor() { - this.method = { - value: "ArcFace", - version: 1, - }; - } - - public getFaceAlignment(faceDetection: FaceDetection): FaceAlignment { - return getArcfaceAlignment(faceDetection); - } -} - -export default new ArcfaceAlignmentService(); diff --git a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts b/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts deleted file mode 100644 index cb6ccd0298..0000000000 --- a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { - FaceCrop, - FaceCropConfig, - FaceCropMethod, - FaceCropService, - FaceDetection, - Versioned, -} from "types/machineLearning"; -import { getArcfaceAlignment } from "utils/machineLearning/faceAlign"; -import { getFaceCrop } from "utils/machineLearning/faceCrop"; - -class ArcFaceCropService implements FaceCropService { - public method: Versioned; - - constructor() { - this.method = { - value: "ArcFace", - version: 1, - }; - } - - public async getFaceCrop( - imageBitmap: ImageBitmap, - faceDetection: FaceDetection, - config: FaceCropConfig, - ): Promise { - const alignedFace = getArcfaceAlignment(faceDetection); - const faceCrop = getFaceCrop(imageBitmap, alignedFace, config); - - return faceCrop; - } -} - -export default new ArcFaceCropService(); diff --git a/web/apps/photos/src/services/machineLearning/clusteringService.ts b/web/apps/photos/src/services/machineLearning/clusteringService.ts deleted file mode 100644 index 03931b63b8..0000000000 --- a/web/apps/photos/src/services/machineLearning/clusteringService.ts +++ /dev/null @@ -1,88 +0,0 @@ -import { DBSCAN, KMEANS, OPTICS } from "density-clustering"; -import { Hdbscan } from "hdbscan"; -import { HdbscanInput } from "hdbscan/dist/types"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringResults, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class ClusteringService { - private dbscan: DBSCAN; - private optics: OPTICS; - private kmeans: KMEANS; - - constructor() { - this.dbscan = new DBSCAN(); - this.optics = new OPTICS(); - this.kmeans = new KMEANS(); - } - - public clusterUsingDBSCAN( - dataset: Array>, - epsilon: number = 1.0, - minPts: number = 2, - ): ClusteringResults { - // log.info("distanceFunction", DBSCAN._); - const clusters = this.dbscan.run(dataset, epsilon, minPts); - const noise = this.dbscan.noise; - return { clusters, noise }; - } - - public clusterUsingOPTICS( - dataset: Array>, - epsilon: number = 1.0, - minPts: number = 2, - ) { - const clusters = this.optics.run(dataset, epsilon, minPts); - return { clusters, noise: [] }; - } - - public clusterUsingKMEANS( - dataset: Array>, - numClusters: number = 5, - ) { - const clusters = this.kmeans.run(dataset, numClusters); - return { clusters, noise: [] }; - } - - public clusterUsingHdbscan(hdbscanInput: HdbscanInput): HdbscanResults { - if (hdbscanInput.input.length < 10) { - throw Error("too few samples to run Hdbscan"); - } - - const hdbscan = new Hdbscan(hdbscanInput); - const clusters = hdbscan.getClusters(); - const noise = hdbscan.getNoise(); - const debugInfo = hdbscan.getDebugInfo(); - - return { clusters, noise, debugInfo }; - } - - public cluster( - method: Versioned, - input: ClusteringInput, - config: ClusteringConfig, - ) { - if (method.value === "Hdbscan") { - return this.clusterUsingHdbscan({ - input, - minClusterSize: config.minClusterSize, - debug: config.generateDebugInfo, - }); - } else if (method.value === "Dbscan") { - return this.clusterUsingDBSCAN( - input, - config.maxDistanceInsideCluster, - config.minClusterSize, - ); - } else { - throw Error("Unknown clustering method: " + method.value); - } - } -} - -export default ClusteringService; diff --git a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts deleted file mode 100644 index 33298eef3c..0000000000 --- a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { DBSCAN } from "density-clustering"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringService, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class DbscanClusteringService implements ClusteringService { - public method: Versioned; - - constructor() { - this.method = { - value: "Dbscan", - version: 1, - }; - } - - public async cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise { - // log.info('Clustering input: ', input); - const dbscan = new DBSCAN(); - const clusters = dbscan.run( - input, - config.clusterSelectionEpsilon, - config.minClusterSize, - ); - const noise = dbscan.noise; - return { clusters, noise }; - } -} - -export default new DbscanClusteringService(); diff --git a/web/apps/photos/src/services/machineLearning/faceService.ts b/web/apps/photos/src/services/machineLearning/faceService.ts deleted file mode 100644 index 1dedadf151..0000000000 --- a/web/apps/photos/src/services/machineLearning/faceService.ts +++ /dev/null @@ -1,306 +0,0 @@ -import { openCache } from "@/next/blob-cache"; -import log from "@/next/log"; -import { - DetectedFace, - Face, - MLSyncContext, - MLSyncFileContext, -} from "types/machineLearning"; -import { imageBitmapToBlob } from "utils/image"; -import { - areFaceIdsSame, - extractFaceImagesToFloat32, - getFaceId, - getLocalFile, - getOriginalImageBitmap, - isDifferentOrOld, -} from "utils/machineLearning"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import ReaderService from "./readerService"; - -class FaceService { - async syncFileFaceDetections( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !isDifferentOrOld( - oldMlFile?.faceDetectionMethod, - syncContext.faceDetectionService.method, - ) && - oldMlFile?.imageSource === syncContext.config.imageSource - ) { - newMlFile.faces = oldMlFile?.faces?.map((existingFace) => ({ - id: existingFace.id, - fileId: existingFace.fileId, - detection: existingFace.detection, - })); - - newMlFile.imageSource = oldMlFile.imageSource; - newMlFile.imageDimensions = oldMlFile.imageDimensions; - newMlFile.faceDetectionMethod = oldMlFile.faceDetectionMethod; - return; - } - - newMlFile.faceDetectionMethod = syncContext.faceDetectionService.method; - fileContext.newDetection = true; - const imageBitmap = await ReaderService.getImageBitmap( - syncContext, - fileContext, - ); - const timerId = `faceDetection-${fileContext.enteFile.id}`; - console.time(timerId); - const faceDetections = - await syncContext.faceDetectionService.detectFaces(imageBitmap); - console.timeEnd(timerId); - console.log("faceDetections: ", faceDetections?.length); - - // TODO: reenable faces filtering based on width - const detectedFaces = faceDetections?.map((detection) => { - return { - fileId: fileContext.enteFile.id, - detection, - } as DetectedFace; - }); - newMlFile.faces = detectedFaces?.map((detectedFace) => ({ - ...detectedFace, - id: getFaceId(detectedFace, newMlFile.imageDimensions), - })); - // ?.filter((f) => - // f.box.width > syncContext.config.faceDetection.minFaceSize - // ); - log.info("[MLService] Detected Faces: ", newMlFile.faces?.length); - } - - async syncFileFaceCrops( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - // !syncContext.config.faceCrop.enabled || - !fileContext.newDetection && - !isDifferentOrOld( - oldMlFile?.faceCropMethod, - syncContext.faceCropService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.crop = oldMlFile.faces[index].crop; - } - newMlFile.faceCropMethod = oldMlFile.faceCropMethod; - return; - } - - const imageBitmap = await ReaderService.getImageBitmap( - syncContext, - fileContext, - ); - newMlFile.faceCropMethod = syncContext.faceCropService.method; - - for (const face of newMlFile.faces) { - await this.saveFaceCrop(imageBitmap, face, syncContext); - } - } - - async syncFileFaceAlignments( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ): Promise { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newDetection && - !isDifferentOrOld( - oldMlFile?.faceAlignmentMethod, - syncContext.faceAlignmentService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.alignment = oldMlFile.faces[index].alignment; - } - newMlFile.faceAlignmentMethod = oldMlFile.faceAlignmentMethod; - return; - } - - newMlFile.faceAlignmentMethod = syncContext.faceAlignmentService.method; - fileContext.newAlignment = true; - const imageBitmap = - fileContext.imageBitmap || - (await ReaderService.getImageBitmap(syncContext, fileContext)); - - // Execute the face alignment calculations - for (const face of newMlFile.faces) { - face.alignment = syncContext.faceAlignmentService.getFaceAlignment( - face.detection, - ); - } - // Extract face images and convert to Float32Array - const faceAlignments = newMlFile.faces.map((f) => f.alignment); - const faceImages = await extractFaceImagesToFloat32( - faceAlignments, - syncContext.faceEmbeddingService.faceSize, - imageBitmap, - ); - const blurValues = syncContext.blurDetectionService.detectBlur( - faceImages, - newMlFile.faces, - ); - newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i])); - - imageBitmap.close(); - log.info("[MLService] alignedFaces: ", newMlFile.faces?.length); - - return faceImages; - } - - async syncFileFaceEmbeddings( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - alignedFacesInput: Float32Array, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newAlignment && - !isDifferentOrOld( - oldMlFile?.faceEmbeddingMethod, - syncContext.faceEmbeddingService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - for (const [index, face] of newMlFile.faces.entries()) { - face.embedding = oldMlFile.faces[index].embedding; - } - newMlFile.faceEmbeddingMethod = oldMlFile.faceEmbeddingMethod; - return; - } - - newMlFile.faceEmbeddingMethod = syncContext.faceEmbeddingService.method; - // TODO: when not storing face crops, image will be needed to extract faces - // fileContext.imageBitmap || - // (await this.getImageBitmap(syncContext, fileContext)); - - const embeddings = - await syncContext.faceEmbeddingService.getFaceEmbeddings( - alignedFacesInput, - ); - newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i])); - - log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length); - } - - async syncFileFaceMakeRelativeDetections( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { oldMlFile, newMlFile } = fileContext; - if ( - !fileContext.newAlignment && - !isDifferentOrOld( - oldMlFile?.faceEmbeddingMethod, - syncContext.faceEmbeddingService.method, - ) && - areFaceIdsSame(newMlFile.faces, oldMlFile?.faces) - ) { - return; - } - for (let i = 0; i < newMlFile.faces.length; i++) { - const face = newMlFile.faces[i]; - if (face.detection.box.x + face.detection.box.width < 2) continue; // Skip if somehow already relative - face.detection = - syncContext.faceDetectionService.getRelativeDetection( - face.detection, - newMlFile.imageDimensions, - ); - } - } - - async saveFaceCrop( - imageBitmap: ImageBitmap, - face: Face, - syncContext: MLSyncContext, - ) { - const faceCrop = await syncContext.faceCropService.getFaceCrop( - imageBitmap, - face.detection, - syncContext.config.faceCrop, - ); - - const blobOptions = syncContext.config.faceCrop.blobOptions; - const blob = await imageBitmapToBlob(faceCrop.image, blobOptions); - - const cache = await openCache("face-crops"); - await cache.put(face.id, blob); - - faceCrop.image.close(); - - return blob; - } - - async getAllSyncedFacesMap(syncContext: MLSyncContext) { - if (syncContext.allSyncedFacesMap) { - return syncContext.allSyncedFacesMap; - } - - syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap(); - return syncContext.allSyncedFacesMap; - } - - public async runFaceClustering( - syncContext: MLSyncContext, - allFaces: Array, - ) { - // await this.init(); - - const clusteringConfig = syncContext.config.faceClustering; - - if (!allFaces || allFaces.length < clusteringConfig.minInputSize) { - log.info( - "[MLService] Too few faces to cluster, not running clustering: ", - allFaces.length, - ); - return; - } - - log.info("Running clustering allFaces: ", allFaces.length); - syncContext.mlLibraryData.faceClusteringResults = - await syncContext.faceClusteringService.cluster( - allFaces.map((f) => Array.from(f.embedding)), - syncContext.config.faceClustering, - ); - syncContext.mlLibraryData.faceClusteringMethod = - syncContext.faceClusteringService.method; - log.info( - "[MLService] Got face clustering results: ", - JSON.stringify(syncContext.mlLibraryData.faceClusteringResults), - ); - - // syncContext.faceClustersWithNoise = { - // clusters: syncContext.faceClusteringResults.clusters.map( - // (faces) => ({ - // faces, - // }) - // ), - // noise: syncContext.faceClusteringResults.noise, - // }; - } - - public async regenerateFaceCrop( - syncContext: MLSyncContext, - faceID: string, - ) { - const fileID = Number(faceID.split("-")[0]); - const personFace = await mlIDbStorage.getFace(fileID, faceID); - if (!personFace) { - throw Error("Face not found"); - } - - const file = await getLocalFile(personFace.fileId); - const imageBitmap = await getOriginalImageBitmap(file); - return await this.saveFaceCrop(imageBitmap, personFace, syncContext); - } -} - -export default new FaceService(); diff --git a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts deleted file mode 100644 index 21e2118252..0000000000 --- a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Hdbscan } from "hdbscan"; -import { - ClusteringConfig, - ClusteringInput, - ClusteringMethod, - ClusteringService, - HdbscanResults, - Versioned, -} from "types/machineLearning"; - -class HdbscanClusteringService implements ClusteringService { - public method: Versioned; - - constructor() { - this.method = { - value: "Hdbscan", - version: 1, - }; - } - - public async cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise { - // log.info('Clustering input: ', input); - const hdbscan = new Hdbscan({ - input, - - minClusterSize: config.minClusterSize, - minSamples: config.minSamples, - clusterSelectionEpsilon: config.clusterSelectionEpsilon, - clusterSelectionMethod: config.clusterSelectionMethod, - debug: config.generateDebugInfo, - }); - - return { - clusters: hdbscan.getClusters(), - noise: hdbscan.getNoise(), - debugInfo: hdbscan.getDebugInfo(), - }; - } -} - -export default new HdbscanClusteringService(); diff --git a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts deleted file mode 100644 index 3357e21ccd..0000000000 --- a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { - BlurDetectionMethod, - BlurDetectionService, - Face, - Versioned, -} from "types/machineLearning"; -import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image"; -import { mobileFaceNetFaceSize } from "./mobileFaceNetEmbeddingService"; - -class LaplacianBlurDetectionService implements BlurDetectionService { - public method: Versioned; - - public constructor() { - this.method = { - value: "Laplacian", - version: 1, - }; - } - - public detectBlur(alignedFaces: Float32Array, faces: Face[]): number[] { - const numFaces = Math.round( - alignedFaces.length / - (mobileFaceNetFaceSize * mobileFaceNetFaceSize * 3), - ); - const blurValues: number[] = []; - for (let i = 0; i < numFaces; i++) { - const face = faces[i]; - const direction = getFaceDirection(face); - const faceImage = createGrayscaleIntMatrixFromNormalized2List( - alignedFaces, - i, - ); - const laplacian = this.applyLaplacian(faceImage, direction); - const variance = this.calculateVariance(laplacian); - blurValues.push(variance); - } - return blurValues; - } - - private calculateVariance(matrix: number[][]): number { - const numRows = matrix.length; - const numCols = matrix[0].length; - const totalElements = numRows * numCols; - - // Calculate the mean - let mean: number = 0; - matrix.forEach((row) => { - row.forEach((value) => { - mean += value; - }); - }); - mean /= totalElements; - - // Calculate the variance - let variance: number = 0; - matrix.forEach((row) => { - row.forEach((value) => { - const diff: number = value - mean; - variance += diff * diff; - }); - }); - variance /= totalElements; - - return variance; - } - - private padImage( - image: number[][], - removeSideColumns: number = 56, - direction: FaceDirection = "straight", - ): number[][] { - // Exception is removeSideColumns is not even - if (removeSideColumns % 2 != 0) { - throw new Error("removeSideColumns must be even"); - } - const numRows = image.length; - const numCols = image[0].length; - const paddedNumCols = numCols + 2 - removeSideColumns; - const paddedNumRows = numRows + 2; - - // Create a new matrix with extra padding - const paddedImage: number[][] = Array.from( - { length: paddedNumRows }, - () => new Array(paddedNumCols).fill(0), - ); - - // Copy original image into the center of the padded image - if (direction === "straight") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = - image[i][j + Math.round(removeSideColumns / 2)]; - } - } - } // If the face is facing left, we only take the right side of the face image - else if (direction === "left") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; - } - } - } // If the face is facing right, we only take the left side of the face image - else if (direction === "right") { - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < paddedNumCols - 2; j++) { - paddedImage[i + 1][j + 1] = image[i][j]; - } - } - } - - // Reflect padding - // Top and bottom rows - for (let j = 1; j <= paddedNumCols - 2; j++) { - paddedImage[0][j] = paddedImage[2][j]; // Top row - paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row - } - // Left and right columns - for (let i = 0; i < numRows + 2; i++) { - paddedImage[i][0] = paddedImage[i][2]; // Left column - paddedImage[i][paddedNumCols - 1] = - paddedImage[i][paddedNumCols - 3]; // Right column - } - - return paddedImage; - } - - private applyLaplacian( - image: number[][], - direction: FaceDirection = "straight", - ): number[][] { - const paddedImage: number[][] = this.padImage( - image, - undefined, - direction, - ); - const numRows = paddedImage.length - 2; - const numCols = paddedImage[0].length - 2; - - // Create an output image initialized to 0 - const outputImage: number[][] = Array.from({ length: numRows }, () => - new Array(numCols).fill(0), - ); - - // Define the Laplacian kernel - const kernel: number[][] = [ - [0, 1, 0], - [1, -4, 1], - [0, 1, 0], - ]; - - // Apply the kernel to each pixel - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < numCols; j++) { - let sum = 0; - for (let ki = 0; ki < 3; ki++) { - for (let kj = 0; kj < 3; kj++) { - sum += paddedImage[i + ki][j + kj] * kernel[ki][kj]; - } - } - // Adjust the output value if necessary (e.g., clipping) - outputImage[i][j] = sum; - } - } - - return outputImage; - } -} - -export default new LaplacianBlurDetectionService(); - -type FaceDirection = "left" | "right" | "straight"; - -const getFaceDirection = (face: Face): FaceDirection => { - const landmarks = face.detection.landmarks; - const leftEye = landmarks[0]; - const rightEye = landmarks[1]; - const nose = landmarks[2]; - const leftMouth = landmarks[3]; - const rightMouth = landmarks[4]; - - const eyeDistanceX = Math.abs(rightEye.x - leftEye.x); - const eyeDistanceY = Math.abs(rightEye.y - leftEye.y); - const mouthDistanceY = Math.abs(rightMouth.y - leftMouth.y); - - const faceIsUpright = - Math.max(leftEye.y, rightEye.y) + 0.5 * eyeDistanceY < nose.y && - nose.y + 0.5 * mouthDistanceY < Math.min(leftMouth.y, rightMouth.y); - - const noseStickingOutLeft = - nose.x < Math.min(leftEye.x, rightEye.x) && - nose.x < Math.min(leftMouth.x, rightMouth.x); - - const noseStickingOutRight = - nose.x > Math.max(leftEye.x, rightEye.x) && - nose.x > Math.max(leftMouth.x, rightMouth.x); - - const noseCloseToLeftEye = - Math.abs(nose.x - leftEye.x) < 0.2 * eyeDistanceX; - const noseCloseToRightEye = - Math.abs(nose.x - rightEye.x) < 0.2 * eyeDistanceX; - - // if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) { - if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { - return "left"; - // } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) { - } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { - return "right"; - } - - return "straight"; -}; diff --git a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts b/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts deleted file mode 100644 index 991ae68087..0000000000 --- a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { haveWindow } from "@/next/env"; -import log from "@/next/log"; -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import PQueue from "p-queue"; -import { EnteFile } from "types/file"; -import { - BlurDetectionMethod, - BlurDetectionService, - ClusteringMethod, - ClusteringService, - Face, - FaceAlignmentMethod, - FaceAlignmentService, - FaceCropMethod, - FaceCropService, - FaceDetectionMethod, - FaceDetectionService, - FaceEmbeddingMethod, - FaceEmbeddingService, - MLLibraryData, - MLSyncConfig, - MLSyncContext, -} from "types/machineLearning"; -import { logQueueStats } from "utils/machineLearning"; -import arcfaceAlignmentService from "./arcfaceAlignmentService"; -import arcfaceCropService from "./arcfaceCropService"; -import dbscanClusteringService from "./dbscanClusteringService"; -import hdbscanClusteringService from "./hdbscanClusteringService"; -import laplacianBlurDetectionService from "./laplacianBlurDetectionService"; -import mobileFaceNetEmbeddingService from "./mobileFaceNetEmbeddingService"; -import yoloFaceDetectionService from "./yoloFaceDetectionService"; - -export class MLFactory { - public static getFaceDetectionService( - method: FaceDetectionMethod, - ): FaceDetectionService { - if (method === "YoloFace") { - return yoloFaceDetectionService; - } - - throw Error("Unknon face detection method: " + method); - } - - public static getFaceCropService(method: FaceCropMethod) { - if (method === "ArcFace") { - return arcfaceCropService; - } - - throw Error("Unknon face crop method: " + method); - } - - public static getFaceAlignmentService( - method: FaceAlignmentMethod, - ): FaceAlignmentService { - if (method === "ArcFace") { - return arcfaceAlignmentService; - } - - throw Error("Unknon face alignment method: " + method); - } - - public static getBlurDetectionService( - method: BlurDetectionMethod, - ): BlurDetectionService { - if (method === "Laplacian") { - return laplacianBlurDetectionService; - } - - throw Error("Unknon blur detection method: " + method); - } - - public static getFaceEmbeddingService( - method: FaceEmbeddingMethod, - ): FaceEmbeddingService { - if (method === "MobileFaceNet") { - return mobileFaceNetEmbeddingService; - } - - throw Error("Unknon face embedding method: " + method); - } - - public static getClusteringService( - method: ClusteringMethod, - ): ClusteringService { - if (method === "Hdbscan") { - return hdbscanClusteringService; - } - if (method === "Dbscan") { - return dbscanClusteringService; - } - - throw Error("Unknon clustering method: " + method); - } - - public static getMLSyncContext( - token: string, - userID: number, - config: MLSyncConfig, - shouldUpdateMLVersion: boolean = true, - ) { - return new LocalMLSyncContext( - token, - userID, - config, - shouldUpdateMLVersion, - ); - } -} - -export class LocalMLSyncContext implements MLSyncContext { - public token: string; - public userID: number; - public config: MLSyncConfig; - public shouldUpdateMLVersion: boolean; - - public faceDetectionService: FaceDetectionService; - public faceCropService: FaceCropService; - public faceAlignmentService: FaceAlignmentService; - public blurDetectionService: BlurDetectionService; - public faceEmbeddingService: FaceEmbeddingService; - public faceClusteringService: ClusteringService; - - public localFilesMap: Map; - public outOfSyncFiles: EnteFile[]; - public nSyncedFiles: number; - public nSyncedFaces: number; - public allSyncedFacesMap?: Map>; - - public error?: Error; - - public mlLibraryData: MLLibraryData; - - public syncQueue: PQueue; - // TODO: wheather to limit concurrent downloads - // private downloadQueue: PQueue; - - private concurrency: number; - private comlinkCryptoWorker: Array< - ComlinkWorker - >; - private enteWorkers: Array; - - constructor( - token: string, - userID: number, - config: MLSyncConfig, - shouldUpdateMLVersion: boolean = true, - concurrency?: number, - ) { - this.token = token; - this.userID = userID; - this.config = config; - this.shouldUpdateMLVersion = shouldUpdateMLVersion; - - this.faceDetectionService = MLFactory.getFaceDetectionService( - this.config.faceDetection.method, - ); - this.faceCropService = MLFactory.getFaceCropService( - this.config.faceCrop.method, - ); - this.faceAlignmentService = MLFactory.getFaceAlignmentService( - this.config.faceAlignment.method, - ); - this.blurDetectionService = MLFactory.getBlurDetectionService( - this.config.blurDetection.method, - ); - this.faceEmbeddingService = MLFactory.getFaceEmbeddingService( - this.config.faceEmbedding.method, - ); - this.faceClusteringService = MLFactory.getClusteringService( - this.config.faceClustering.method, - ); - - this.outOfSyncFiles = []; - this.nSyncedFiles = 0; - this.nSyncedFaces = 0; - - this.concurrency = concurrency ?? getConcurrency(); - - log.info("Using concurrency: ", this.concurrency); - // timeout is added on downloads - // timeout on queue will keep the operation open till worker is terminated - this.syncQueue = new PQueue({ concurrency: this.concurrency }); - logQueueStats(this.syncQueue, "sync"); - // this.downloadQueue = new PQueue({ concurrency: 1 }); - // logQueueStats(this.downloadQueue, 'download'); - - this.comlinkCryptoWorker = new Array(this.concurrency); - this.enteWorkers = new Array(this.concurrency); - } - - public async getEnteWorker(id: number): Promise { - const wid = id % this.enteWorkers.length; - console.log("getEnteWorker: ", id, wid); - if (!this.enteWorkers[wid]) { - this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker(); - this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote; - } - - return this.enteWorkers[wid]; - } - - public async dispose() { - this.localFilesMap = undefined; - await this.syncQueue.onIdle(); - this.syncQueue.removeAllListeners(); - for (const enteComlinkWorker of this.comlinkCryptoWorker) { - enteComlinkWorker?.terminate(); - } - } -} - -export const getConcurrency = () => - haveWindow() && Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2)); diff --git a/web/apps/photos/src/services/machineLearning/machineLearningService.ts b/web/apps/photos/src/services/machineLearning/machineLearningService.ts index 03a3b7e2c7..954a88c66d 100644 --- a/web/apps/photos/src/services/machineLearning/machineLearningService.ts +++ b/web/apps/photos/src/services/machineLearning/machineLearningService.ts @@ -1,38 +1,94 @@ import log from "@/next/log"; -import { APPS } from "@ente/shared/apps/constants"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; import { CustomError, parseUploadErrorCodes } from "@ente/shared/error"; -import { MAX_ML_SYNC_ERROR_COUNT } from "constants/mlConfig"; -import downloadManager from "services/download"; -import { putEmbedding } from "services/embeddingService"; +import PQueue from "p-queue"; +import mlIDbStorage, { + ML_SEARCH_CONFIG_NAME, + type MinimalPersistedFileData, +} from "services/face/db"; +import { putFaceEmbedding } from "services/face/remote"; import { getLocalFiles } from "services/fileService"; import { EnteFile } from "types/file"; -import { - MLSyncContext, - MLSyncFileContext, - MLSyncResult, - MlFileData, -} from "types/machineLearning"; -import { getMLSyncConfig } from "utils/machineLearning/config"; -import { LocalFileMlDataToServerFileMl } from "utils/machineLearning/mldataMappers"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import FaceService from "./faceService"; -import { MLFactory } from "./machineLearningFactory"; -import PeopleService from "./peopleService"; -import ReaderService from "./readerService"; +import { isInternalUserForML } from "utils/user"; +import { indexFaces } from "../face/f-index"; + +export const defaultMLVersion = 1; + +const batchSize = 200; + +export const MAX_ML_SYNC_ERROR_COUNT = 1; + +export interface MLSearchConfig { + enabled: boolean; +} + +export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = { + enabled: false, +}; + +export async function getMLSearchConfig() { + if (isInternalUserForML()) { + return mlIDbStorage.getConfig( + ML_SEARCH_CONFIG_NAME, + DEFAULT_ML_SEARCH_CONFIG, + ); + } + // Force disabled for everyone else while we finalize it to avoid redundant + // reindexing for users. + return DEFAULT_ML_SEARCH_CONFIG; +} + +export async function updateMLSearchConfig(newConfig: MLSearchConfig) { + return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig); +} + +class MLSyncContext { + public token: string; + public userID: number; + public userAgent: string; + + public localFilesMap: Map; + public outOfSyncFiles: EnteFile[]; + public nSyncedFiles: number; + public error?: Error; + + public syncQueue: PQueue; + + constructor(token: string, userID: number, userAgent: string) { + this.token = token; + this.userID = userID; + this.userAgent = userAgent; + + this.outOfSyncFiles = []; + this.nSyncedFiles = 0; + + const concurrency = getConcurrency(); + this.syncQueue = new PQueue({ concurrency }); + } + + public async dispose() { + this.localFilesMap = undefined; + await this.syncQueue.onIdle(); + this.syncQueue.removeAllListeners(); + } +} + +const getConcurrency = () => + Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2)); class MachineLearningService { private localSyncContext: Promise; private syncContext: Promise; - public async sync(token: string, userID: number): Promise { + public async sync( + token: string, + userID: number, + userAgent: string, + ): Promise { if (!token) { throw Error("Token needed by ml service to sync file"); } - await downloadManager.init(APPS.PHOTOS, { token }); - - const syncContext = await this.getSyncContext(token, userID); + const syncContext = await this.getSyncContext(token, userID, userAgent); await this.syncLocalFiles(syncContext); @@ -42,40 +98,9 @@ class MachineLearningService { await this.syncFiles(syncContext); } - // TODO: running index before all files are on latest ml version - // may be need to just take synced files on latest ml version for indexing - if ( - syncContext.outOfSyncFiles.length <= 0 || - (syncContext.nSyncedFiles === syncContext.config.batchSize && - Math.random() < 0.2) - ) { - await this.syncIndex(syncContext); - } - - const mlSyncResult: MLSyncResult = { - nOutOfSyncFiles: syncContext.outOfSyncFiles.length, - nSyncedFiles: syncContext.nSyncedFiles, - nSyncedFaces: syncContext.nSyncedFaces, - nFaceClusters: - syncContext.mlLibraryData?.faceClusteringResults?.clusters - .length, - nFaceNoise: - syncContext.mlLibraryData?.faceClusteringResults?.noise.length, - error: syncContext.error, - }; - // log.info('[MLService] sync results: ', mlSyncResult); - - return mlSyncResult; - } - - public async regenerateFaceCrop( - token: string, - userID: number, - faceID: string, - ) { - await downloadManager.init(APPS.PHOTOS, { token }); - const syncContext = await this.getSyncContext(token, userID); - return FaceService.regenerateFaceCrop(syncContext, faceID); + const error = syncContext.error; + const nOutOfSyncFiles = syncContext.outOfSyncFiles.length; + return !error && nOutOfSyncFiles > 0; } private newMlData(fileId: number) { @@ -83,7 +108,7 @@ class MachineLearningService { fileId, mlVersion: 0, errorCount: 0, - } as MlFileData; + } as MinimalPersistedFileData; } private async getLocalFilesMap(syncContext: MLSyncContext) { @@ -153,8 +178,8 @@ class MachineLearningService { private async getOutOfSyncFiles(syncContext: MLSyncContext) { const startTime = Date.now(); const fileIds = await mlIDbStorage.getFileIds( - syncContext.config.batchSize, - syncContext.config.mlVersion, + batchSize, + defaultMLVersion, MAX_ML_SYNC_ERROR_COUNT, ); @@ -187,7 +212,6 @@ class MachineLearningService { syncContext.error = error; } await syncContext.syncQueue.onIdle(); - log.info("allFaces: ", syncContext.nSyncedFaces); // TODO: In case syncJob has to use multiple ml workers // do in same transaction with each file update @@ -196,25 +220,36 @@ class MachineLearningService { // await this.disposeMLModels(); } - private async getSyncContext(token: string, userID: number) { + private async getSyncContext( + token: string, + userID: number, + userAgent: string, + ) { if (!this.syncContext) { log.info("Creating syncContext"); - this.syncContext = getMLSyncConfig().then((mlSyncConfig) => - MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true), - ); + // TODO-ML(MR): Keep as promise for now. + this.syncContext = new Promise((resolve) => { + resolve(new MLSyncContext(token, userID, userAgent)); + }); } else { log.info("reusing existing syncContext"); } return this.syncContext; } - private async getLocalSyncContext(token: string, userID: number) { + private async getLocalSyncContext( + token: string, + userID: number, + userAgent: string, + ) { + // TODO-ML(MR): This is updating the file ML version. verify. if (!this.localSyncContext) { log.info("Creating localSyncContext"); - this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) => - MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false), - ); + // TODO-ML(MR): + this.localSyncContext = new Promise((resolve) => { + resolve(new MLSyncContext(token, userID, userAgent)); + }); } else { log.info("reusing existing localSyncContext"); } @@ -233,26 +268,29 @@ class MachineLearningService { public async syncLocalFile( token: string, userID: number, + userAgent: string, enteFile: EnteFile, localFile?: globalThis.File, - ): Promise { - const syncContext = await this.getLocalSyncContext(token, userID); + ) { + const syncContext = await this.getLocalSyncContext( + token, + userID, + userAgent, + ); try { - const mlFileData = await this.syncFileWithErrorHandler( + await this.syncFileWithErrorHandler( syncContext, enteFile, localFile, ); - if (syncContext.nSyncedFiles >= syncContext.config.batchSize) { + if (syncContext.nSyncedFiles >= batchSize) { await this.closeLocalSyncContext(); } // await syncContext.dispose(); - return mlFileData; } catch (e) { console.error("Error while syncing local file: ", enteFile.id, e); - return e; } } @@ -260,17 +298,13 @@ class MachineLearningService { syncContext: MLSyncContext, enteFile: EnteFile, localFile?: globalThis.File, - ): Promise { + ) { try { - console.log( - `Indexing ${enteFile.title ?? ""} ${enteFile.id}`, - ); const mlFileData = await this.syncFile( - syncContext, enteFile, localFile, + syncContext.userAgent, ); - syncContext.nSyncedFaces += mlFileData.faces?.length || 0; syncContext.nSyncedFiles += 1; return mlFileData; } catch (e) { @@ -303,72 +337,21 @@ class MachineLearningService { } private async syncFile( - syncContext: MLSyncContext, enteFile: EnteFile, - localFile?: globalThis.File, + localFile: globalThis.File | undefined, + userAgent: string, ) { - console.log("Syncing for file" + enteFile.title); - const fileContext: MLSyncFileContext = { enteFile, localFile }; - const oldMlFile = - (fileContext.oldMlFile = await this.getMLFileData(enteFile.id)) ?? - this.newMlData(enteFile.id); - if ( - fileContext.oldMlFile?.mlVersion === syncContext.config.mlVersion - // TODO: reset mlversion of all files when user changes image source - ) { - return fileContext.oldMlFile; - } - const newMlFile = (fileContext.newMlFile = this.newMlData(enteFile.id)); - - if (syncContext.shouldUpdateMLVersion) { - newMlFile.mlVersion = syncContext.config.mlVersion; - } else if (fileContext.oldMlFile?.mlVersion) { - newMlFile.mlVersion = fileContext.oldMlFile.mlVersion; - } - - try { - await ReaderService.getImageBitmap(syncContext, fileContext); - await Promise.all([ - this.syncFileAnalyzeFaces(syncContext, fileContext), - ]); - newMlFile.errorCount = 0; - newMlFile.lastErrorMessage = undefined; - await this.persistOnServer(newMlFile, enteFile); - await mlIDbStorage.putFile(newMlFile); - } catch (e) { - log.error("ml detection failed", e); - newMlFile.mlVersion = oldMlFile.mlVersion; - throw e; - } finally { - fileContext.imageBitmap && fileContext.imageBitmap.close(); + const oldMlFile = await mlIDbStorage.getFile(enteFile.id); + if (oldMlFile && oldMlFile.mlVersion) { + return oldMlFile; } + const newMlFile = await indexFaces(enteFile, localFile); + await putFaceEmbedding(enteFile, newMlFile, userAgent); + await mlIDbStorage.putFile(newMlFile); return newMlFile; } - private async persistOnServer(mlFileData: MlFileData, enteFile: EnteFile) { - const serverMl = LocalFileMlDataToServerFileMl(mlFileData); - log.info(mlFileData); - - const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance(); - const { file: encryptedEmbeddingData } = - await comlinkCryptoWorker.encryptMetadata(serverMl, enteFile.key); - log.info( - `putEmbedding embedding to server for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, - ); - const res = await putEmbedding({ - fileID: enteFile.id, - encryptedEmbedding: encryptedEmbeddingData.encryptedData, - decryptionHeader: encryptedEmbeddingData.decryptionHeader, - model: "file-ml-clip-face", - }); - log.info("putEmbedding response: ", res); - } - - private async getMLFileData(fileId: number) { - return mlIDbStorage.getFile(fileId); - } - private async persistMLFileSyncError(enteFile: EnteFile, e: Error) { try { await mlIDbStorage.upsertFileInTx(enteFile.id, (mlFileData) => { @@ -376,7 +359,7 @@ class MachineLearningService { mlFileData = this.newMlData(enteFile.id); } mlFileData.errorCount = (mlFileData.errorCount || 0) + 1; - mlFileData.lastErrorMessage = e.message; + console.error(`lastError for ${enteFile.id}`, e); return mlFileData; }); @@ -385,59 +368,6 @@ class MachineLearningService { console.error("Error while storing ml sync error", e); } } - - private async getMLLibraryData(syncContext: MLSyncContext) { - syncContext.mlLibraryData = await mlIDbStorage.getLibraryData(); - if (!syncContext.mlLibraryData) { - syncContext.mlLibraryData = {}; - } - } - - private async persistMLLibraryData(syncContext: MLSyncContext) { - return mlIDbStorage.putLibraryData(syncContext.mlLibraryData); - } - - public async syncIndex(syncContext: MLSyncContext) { - await this.getMLLibraryData(syncContext); - - await PeopleService.syncPeopleIndex(syncContext); - - await this.persistMLLibraryData(syncContext); - } - - private async syncFileAnalyzeFaces( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - const { newMlFile } = fileContext; - const startTime = Date.now(); - await FaceService.syncFileFaceDetections(syncContext, fileContext); - - if (newMlFile.faces && newMlFile.faces.length > 0) { - await FaceService.syncFileFaceCrops(syncContext, fileContext); - - const alignedFacesData = await FaceService.syncFileFaceAlignments( - syncContext, - fileContext, - ); - - await FaceService.syncFileFaceEmbeddings( - syncContext, - fileContext, - alignedFacesData, - ); - - await FaceService.syncFileFaceMakeRelativeDetections( - syncContext, - fileContext, - ); - } - log.info( - `face detection time taken ${fileContext.enteFile.id}`, - Date.now() - startTime, - "ms", - ); - } } export default new MachineLearningService(); diff --git a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts index d1c5e9db5e..c1b2ef6a70 100644 --- a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts +++ b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts @@ -1,30 +1,92 @@ import { FILE_TYPE } from "@/media/file-type"; +import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import { clientPackageNamePhotosDesktop } from "@ente/shared/apps/constants"; import { eventBus, Events } from "@ente/shared/events"; import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers"; import debounce from "debounce"; import PQueue from "p-queue"; -import { JobResult } from "types/common/job"; +import { createFaceComlinkWorker } from "services/face"; +import mlIDbStorage from "services/face/db"; +import type { DedicatedMLWorker } from "services/face/face.worker"; import { EnteFile } from "types/file"; -import { MLSyncResult } from "types/machineLearning"; -import { getDedicatedMLWorker } from "utils/comlink/ComlinkMLWorker"; -import { SimpleJob } from "utils/common/job"; -import { logQueueStats } from "utils/machineLearning"; -import { getMLSyncJobConfig } from "utils/machineLearning/config"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import { DedicatedMLWorker } from "worker/ml.worker"; -const LIVE_SYNC_IDLE_DEBOUNCE_SEC = 30; -const LIVE_SYNC_QUEUE_TIMEOUT_SEC = 300; -const LOCAL_FILES_UPDATED_DEBOUNCE_SEC = 30; +export type JobState = "Scheduled" | "Running" | "NotScheduled"; -export interface MLSyncJobResult extends JobResult { - mlSyncResult: MLSyncResult; +export class MLSyncJob { + private runCallback: () => Promise; + private state: JobState; + private stopped: boolean; + private intervalSec: number; + private nextTimeoutId: ReturnType; + + constructor(runCallback: () => Promise) { + this.runCallback = runCallback; + this.state = "NotScheduled"; + this.stopped = true; + this.resetInterval(); + } + + public resetInterval() { + this.intervalSec = 5; + } + + public start() { + this.stopped = false; + this.resetInterval(); + if (this.state !== "Running") { + this.scheduleNext(); + } else { + log.info("Job already running, not scheduling"); + } + } + + private scheduleNext() { + if (this.state === "Scheduled" || this.nextTimeoutId) { + this.clearScheduled(); + } + + this.nextTimeoutId = setTimeout( + () => this.run(), + this.intervalSec * 1000, + ); + this.state = "Scheduled"; + log.info("Scheduled next job after: ", this.intervalSec); + } + + async run() { + this.nextTimeoutId = undefined; + this.state = "Running"; + + try { + if (await this.runCallback()) { + this.resetInterval(); + } else { + this.intervalSec = Math.min(960, this.intervalSec * 2); + } + } catch (e) { + console.error("Error while running Job: ", e); + } finally { + this.state = "NotScheduled"; + !this.stopped && this.scheduleNext(); + } + } + + // currently client is responsible to terminate running job + public stop() { + this.stopped = true; + this.clearScheduled(); + } + + private clearScheduled() { + clearTimeout(this.nextTimeoutId); + this.nextTimeoutId = undefined; + this.state = "NotScheduled"; + log.info("Cleared next job"); + } } -export class MLSyncJob extends SimpleJob {} - class MLWorkManager { private mlSyncJob: MLSyncJob; private syncJobWorker: ComlinkWorker; @@ -40,19 +102,18 @@ class MLWorkManager { this.liveSyncQueue = new PQueue({ concurrency: 1, // TODO: temp, remove - timeout: LIVE_SYNC_QUEUE_TIMEOUT_SEC * 1000, + timeout: 300 * 1000, throwOnTimeout: true, }); this.mlSearchEnabled = false; - eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this); this.debouncedLiveSyncIdle = debounce( () => this.onLiveSyncIdle(), - LIVE_SYNC_IDLE_DEBOUNCE_SEC * 1000, + 30 * 1000, ); this.debouncedFilesUpdated = debounce( () => this.mlSearchEnabled && this.localFilesUpdatedHandler(), - LOCAL_FILES_UPDATED_DEBOUNCE_SEC * 1000, + 30 * 1000, ); } @@ -97,26 +158,12 @@ class MLWorkManager { } } - // Handlers - private async appStartHandler() { - log.info("appStartHandler"); - try { - this.startSyncJob(); - } catch (e) { - log.error("Failed in ML appStart Handler", e); - } - } - - private async logoutHandler() { - log.info("logoutHandler"); - try { - this.stopSyncJob(); - this.mlSyncJob = undefined; - await this.terminateLiveSyncWorker(); - await mlIDbStorage.clearMLDB(); - } catch (e) { - log.error("Failed in ML logout Handler", e); - } + async logout() { + this.setMlSearchEnabled(false); + this.stopSyncJob(); + this.mlSyncJob = undefined; + await this.terminateLiveSyncWorker(); + await mlIDbStorage.clearMLDB(); } private async fileUploadedHandler(arg: { @@ -148,7 +195,7 @@ class MLWorkManager { // Live Sync private async getLiveSyncWorker() { if (!this.liveSyncWorker) { - this.liveSyncWorker = getDedicatedMLWorker("ml-sync-live"); + this.liveSyncWorker = createFaceComlinkWorker("ml-sync-live"); } return await this.liveSyncWorker.remote; @@ -178,25 +225,26 @@ class MLWorkManager { } public async syncLocalFile(enteFile: EnteFile, localFile: globalThis.File) { - const result = await this.liveSyncQueue.add(async () => { + await this.liveSyncQueue.add(async () => { this.stopSyncJob(); const token = getToken(); const userID = getUserID(); + const userAgent = await getUserAgent(); const mlWorker = await this.getLiveSyncWorker(); - return mlWorker.syncLocalFile(token, userID, enteFile, localFile); + return mlWorker.syncLocalFile( + token, + userID, + userAgent, + enteFile, + localFile, + ); }); - - if (result instanceof Error) { - // TODO: redirect/refresh to gallery in case of session_expired - // may not be required as uploader should anyways take care of this - console.error("Error while syncing local file: ", result); - } } // Sync Job private async getSyncJobWorker() { if (!this.syncJobWorker) { - this.syncJobWorker = getDedicatedMLWorker("ml-sync-job"); + this.syncJobWorker = createFaceComlinkWorker("ml-sync-job"); } return await this.syncJobWorker.remote; @@ -207,7 +255,14 @@ class MLWorkManager { this.syncJobWorker = undefined; } - private async runMLSyncJob(): Promise { + /** + * Returns `false` to indicate that either an error occurred, or there are + * not more files to process, or that we cannot currently process files. + * + * Which means that when it returns true, all is well and there are more + * things pending to process, so we should chug along at full speed. + */ + private async runMLSyncJob(): Promise { try { // TODO: skipping is not required if we are caching chunks through service worker // currently worker chunk itself is not loaded when network is not there @@ -215,29 +270,17 @@ class MLWorkManager { log.info( "Skipping ml-sync job run as not connected to internet.", ); - return { - shouldBackoff: true, - mlSyncResult: undefined, - }; + return false; } const token = getToken(); const userID = getUserID(); + const userAgent = await getUserAgent(); const jobWorkerProxy = await this.getSyncJobWorker(); - const mlSyncResult = await jobWorkerProxy.sync(token, userID); - + return await jobWorkerProxy.sync(token, userID, userAgent); // this.terminateSyncJobWorker(); - const jobResult: MLSyncJobResult = { - shouldBackoff: - !!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1, - mlSyncResult, - }; - log.info("ML Sync Job result: ", JSON.stringify(jobResult)); - // TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job - - return jobResult; } catch (e) { log.error("Failed to run MLSync Job", e); } @@ -254,11 +297,8 @@ class MLWorkManager { log.info("User not logged in, not starting ml sync job"); return; } - const mlSyncJobConfig = await getMLSyncJobConfig(); if (!this.mlSyncJob) { - this.mlSyncJob = new MLSyncJob(mlSyncJobConfig, () => - this.runMLSyncJob(), - ); + this.mlSyncJob = new MLSyncJob(() => this.runMLSyncJob()); } this.mlSyncJob.start(); } catch (e) { @@ -266,11 +306,11 @@ class MLWorkManager { } } - public stopSyncJob(terminateWorker: boolean = true) { + public stopSyncJob() { try { log.info("MLWorkManager.stopSyncJob"); this.mlSyncJob?.stop(); - terminateWorker && this.terminateSyncJobWorker(); + this.terminateSyncJobWorker(); } catch (e) { log.error("Failed to stop MLSync Job", e); } @@ -278,3 +318,22 @@ class MLWorkManager { } export default new MLWorkManager(); + +export function logQueueStats(queue: PQueue, name: string) { + queue.on("active", () => + log.info( + `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`, + ), + ); + queue.on("idle", () => log.info(`queuestats: ${name}: Idle`)); + queue.on("error", (error) => + console.error(`queuestats: ${name}: Error, `, error), + ); +} + +const getUserAgent = async () => { + const electron = ensureElectron(); + const name = clientPackageNamePhotosDesktop; + const version = await electron.appVersion(); + return `${name}/${version}`; +}; diff --git a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts b/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts deleted file mode 100644 index 818b8a5d12..0000000000 --- a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { workerBridge } from "@/next/worker/worker-bridge"; -import { - FaceEmbedding, - FaceEmbeddingMethod, - FaceEmbeddingService, - Versioned, -} from "types/machineLearning"; - -export const mobileFaceNetFaceSize = 112; - -class MobileFaceNetEmbeddingService implements FaceEmbeddingService { - public method: Versioned; - public faceSize: number; - - public constructor() { - this.method = { - value: "MobileFaceNet", - version: 2, - }; - this.faceSize = mobileFaceNetFaceSize; - } - - public async getFaceEmbeddings( - faceData: Float32Array, - ): Promise> { - const outputData = await workerBridge.faceEmbedding(faceData); - - const embeddingSize = 192; - const embeddings = new Array( - outputData.length / embeddingSize, - ); - for (let i = 0; i < embeddings.length; i++) { - embeddings[i] = new Float32Array( - outputData.slice(i * embeddingSize, (i + 1) * embeddingSize), - ); - } - return embeddings; - } -} - -export default new MobileFaceNetEmbeddingService(); diff --git a/web/apps/photos/src/services/machineLearning/peopleService.ts b/web/apps/photos/src/services/machineLearning/peopleService.ts deleted file mode 100644 index ad7d7bcec3..0000000000 --- a/web/apps/photos/src/services/machineLearning/peopleService.ts +++ /dev/null @@ -1,94 +0,0 @@ -import log from "@/next/log"; -import { Face, MLSyncContext, Person } from "types/machineLearning"; -import { - findFirstIfSorted, - getAllFacesFromMap, - getLocalFile, - getOriginalImageBitmap, - isDifferentOrOld, -} from "utils/machineLearning"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import FaceService from "./faceService"; - -class PeopleService { - async syncPeopleIndex(syncContext: MLSyncContext) { - const filesVersion = await mlIDbStorage.getIndexVersion("files"); - if ( - filesVersion <= (await mlIDbStorage.getIndexVersion("people")) && - !isDifferentOrOld( - syncContext.mlLibraryData?.faceClusteringMethod, - syncContext.faceClusteringService.method, - ) - ) { - log.info( - "[MLService] Skipping people index as already synced to latest version", - ); - return; - } - - // TODO: have faces addresable through fileId + faceId - // to avoid index based addressing, which is prone to wrong results - // one way could be to match nearest face within threshold in the file - const allFacesMap = await FaceService.getAllSyncedFacesMap(syncContext); - const allFaces = getAllFacesFromMap(allFacesMap); - - await FaceService.runFaceClustering(syncContext, allFaces); - await this.syncPeopleFromClusters(syncContext, allFacesMap, allFaces); - - await mlIDbStorage.setIndexVersion("people", filesVersion); - } - - private async syncPeopleFromClusters( - syncContext: MLSyncContext, - allFacesMap: Map>, - allFaces: Array, - ) { - const clusters = - syncContext.mlLibraryData.faceClusteringResults?.clusters; - if (!clusters || clusters.length < 1) { - return; - } - - for (const face of allFaces) { - face.personId = undefined; - } - await mlIDbStorage.clearAllPeople(); - for (const [index, cluster] of clusters.entries()) { - const faces = cluster.map((f) => allFaces[f]).filter((f) => f); - - // TODO: take default display face from last leaves of hdbscan clusters - const personFace = findFirstIfSorted( - faces, - (a, b) => b.detection.probability - a.detection.probability, - ); - - if (personFace && !personFace.crop?.cacheKey) { - const file = await getLocalFile(personFace.fileId); - const imageBitmap = await getOriginalImageBitmap(file); - await FaceService.saveFaceCrop( - imageBitmap, - personFace, - syncContext, - ); - } - - const person: Person = { - id: index, - files: faces.map((f) => f.fileId), - displayFaceId: personFace?.id, - faceCropCacheKey: personFace?.crop?.cacheKey, - }; - - await mlIDbStorage.putPerson(person); - - faces.forEach((face) => { - face.personId = person.id; - }); - // log.info("Creating person: ", person, faces); - } - - await mlIDbStorage.updateFaces(allFacesMap); - } -} - -export default new PeopleService(); diff --git a/web/apps/photos/src/services/machineLearning/readerService.ts b/web/apps/photos/src/services/machineLearning/readerService.ts deleted file mode 100644 index 62aebdbd1f..0000000000 --- a/web/apps/photos/src/services/machineLearning/readerService.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import log from "@/next/log"; -import { MLSyncContext, MLSyncFileContext } from "types/machineLearning"; -import { - getLocalFileImageBitmap, - getOriginalImageBitmap, - getThumbnailImageBitmap, -} from "utils/machineLearning"; - -class ReaderService { - async getImageBitmap( - syncContext: MLSyncContext, - fileContext: MLSyncFileContext, - ) { - try { - if (fileContext.imageBitmap) { - return fileContext.imageBitmap; - } - if (fileContext.localFile) { - if ( - fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE - ) { - throw new Error( - "Local file of only image type is supported", - ); - } - fileContext.imageBitmap = await getLocalFileImageBitmap( - fileContext.enteFile, - fileContext.localFile, - ); - } else if ( - syncContext.config.imageSource === "Original" && - [FILE_TYPE.IMAGE, FILE_TYPE.LIVE_PHOTO].includes( - fileContext.enteFile.metadata.fileType, - ) - ) { - fileContext.imageBitmap = await getOriginalImageBitmap( - fileContext.enteFile, - ); - } else { - fileContext.imageBitmap = await getThumbnailImageBitmap( - fileContext.enteFile, - ); - } - - fileContext.newMlFile.imageSource = syncContext.config.imageSource; - const { width, height } = fileContext.imageBitmap; - fileContext.newMlFile.imageDimensions = { width, height }; - - return fileContext.imageBitmap; - } catch (e) { - log.error("failed to create image bitmap", e); - throw e; - } - } -} -export default new ReaderService(); diff --git a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts b/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts deleted file mode 100644 index 4fa840749d..0000000000 --- a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts +++ /dev/null @@ -1,332 +0,0 @@ -import { workerBridge } from "@/next/worker/worker-bridge"; -import { euclidean } from "hdbscan"; -import { - Matrix, - applyToPoint, - compose, - scale, - translate, -} from "transformation-matrix"; -import { Dimensions } from "types/image"; -import { - FaceDetection, - FaceDetectionMethod, - FaceDetectionService, - Versioned, -} from "types/machineLearning"; -import { - clamp, - getPixelBilinear, - normalizePixelBetween0And1, -} from "utils/image"; -import { newBox } from "utils/machineLearning"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -class YoloFaceDetectionService implements FaceDetectionService { - public method: Versioned; - - public constructor() { - this.method = { - value: "YoloFace", - version: 1, - }; - } - - public async detectFaces( - imageBitmap: ImageBitmap, - ): Promise> { - const maxFaceDistancePercent = Math.sqrt(2) / 100; - const maxFaceDistance = imageBitmap.width * maxFaceDistancePercent; - const preprocessResult = - this.preprocessImageBitmapToFloat32ChannelsFirst( - imageBitmap, - 640, - 640, - ); - const data = preprocessResult.data; - const resized = preprocessResult.newSize; - const outputData = await workerBridge.detectFaces(data); - const faces = this.getFacesFromYoloOutput( - outputData as Float32Array, - 0.7, - ); - const inBox = newBox(0, 0, resized.width, resized.height); - const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height); - const transform = computeTransformToBox(inBox, toBox); - const faceDetections: Array = faces?.map((f) => { - const box = transformBox(f.box, transform); - const normLandmarks = f.landmarks; - const landmarks = transformPoints(normLandmarks, transform); - return { - box, - landmarks, - probability: f.probability as number, - } as FaceDetection; - }); - return removeDuplicateDetections(faceDetections, maxFaceDistance); - } - - private preprocessImageBitmapToFloat32ChannelsFirst( - imageBitmap: ImageBitmap, - requiredWidth: number, - requiredHeight: number, - maintainAspectRatio: boolean = true, - normFunction: ( - pixelValue: number, - ) => number = normalizePixelBetween0And1, - ) { - // Create an OffscreenCanvas and set its size - const offscreenCanvas = new OffscreenCanvas( - imageBitmap.width, - imageBitmap.height, - ); - const ctx = offscreenCanvas.getContext("2d"); - ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height); - const imageData = ctx.getImageData( - 0, - 0, - imageBitmap.width, - imageBitmap.height, - ); - const pixelData = imageData.data; - - let scaleW = requiredWidth / imageBitmap.width; - let scaleH = requiredHeight / imageBitmap.height; - if (maintainAspectRatio) { - const scale = Math.min( - requiredWidth / imageBitmap.width, - requiredHeight / imageBitmap.height, - ); - scaleW = scale; - scaleH = scale; - } - const scaledWidth = clamp( - Math.round(imageBitmap.width * scaleW), - 0, - requiredWidth, - ); - const scaledHeight = clamp( - Math.round(imageBitmap.height * scaleH), - 0, - requiredHeight, - ); - - const processedImage = new Float32Array( - 1 * 3 * requiredWidth * requiredHeight, - ); - - // Populate the Float32Array with normalized pixel values - let pixelIndex = 0; - const channelOffsetGreen = requiredHeight * requiredWidth; - const channelOffsetBlue = 2 * requiredHeight * requiredWidth; - for (let h = 0; h < requiredHeight; h++) { - for (let w = 0; w < requiredWidth; w++) { - let pixel: { - r: number; - g: number; - b: number; - }; - if (w >= scaledWidth || h >= scaledHeight) { - pixel = { r: 114, g: 114, b: 114 }; - } else { - pixel = getPixelBilinear( - w / scaleW, - h / scaleH, - pixelData, - imageBitmap.width, - imageBitmap.height, - ); - } - processedImage[pixelIndex] = normFunction(pixel.r); - processedImage[pixelIndex + channelOffsetGreen] = normFunction( - pixel.g, - ); - processedImage[pixelIndex + channelOffsetBlue] = normFunction( - pixel.b, - ); - pixelIndex++; - } - } - - return { - data: processedImage, - originalSize: { - width: imageBitmap.width, - height: imageBitmap.height, - }, - newSize: { width: scaledWidth, height: scaledHeight }, - }; - } - - // The rowOutput is a Float32Array of shape [25200, 16], where each row represents a bounding box. - private getFacesFromYoloOutput( - rowOutput: Float32Array, - minScore: number, - ): Array { - const faces: Array = []; - // iterate over each row - for (let i = 0; i < rowOutput.length; i += 16) { - const score = rowOutput[i + 4]; - if (score < minScore) { - continue; - } - // The first 4 values represent the bounding box's coordinates (x1, y1, x2, y2) - const xCenter = rowOutput[i]; - const yCenter = rowOutput[i + 1]; - const width = rowOutput[i + 2]; - const height = rowOutput[i + 3]; - const xMin = xCenter - width / 2.0; // topLeft - const yMin = yCenter - height / 2.0; // topLeft - - const leftEyeX = rowOutput[i + 5]; - const leftEyeY = rowOutput[i + 6]; - const rightEyeX = rowOutput[i + 7]; - const rightEyeY = rowOutput[i + 8]; - const noseX = rowOutput[i + 9]; - const noseY = rowOutput[i + 10]; - const leftMouthX = rowOutput[i + 11]; - const leftMouthY = rowOutput[i + 12]; - const rightMouthX = rowOutput[i + 13]; - const rightMouthY = rowOutput[i + 14]; - - const box = new Box({ - x: xMin, - y: yMin, - width: width, - height: height, - }); - const probability = score as number; - const landmarks = [ - new Point(leftEyeX, leftEyeY), - new Point(rightEyeX, rightEyeY), - new Point(noseX, noseY), - new Point(leftMouthX, leftMouthY), - new Point(rightMouthX, rightMouthY), - ]; - const face: FaceDetection = { - box, - landmarks, - probability, - // detectionMethod: this.method, - }; - faces.push(face); - } - return faces; - } - - public getRelativeDetection( - faceDetection: FaceDetection, - dimensions: Dimensions, - ): FaceDetection { - const oldBox: Box = faceDetection.box; - const box = new Box({ - x: oldBox.x / dimensions.width, - y: oldBox.y / dimensions.height, - width: oldBox.width / dimensions.width, - height: oldBox.height / dimensions.height, - }); - const oldLandmarks: Point[] = faceDetection.landmarks; - const landmarks = oldLandmarks.map((l) => { - return new Point(l.x / dimensions.width, l.y / dimensions.height); - }); - return { - box, - landmarks, - probability: faceDetection.probability, - }; - } -} - -export default new YoloFaceDetectionService(); - -/** - * Removes duplicate face detections from an array of detections. - * - * This function sorts the detections by their probability in descending order, then iterates over them. - * For each detection, it calculates the Euclidean distance to all other detections. - * If the distance is less than or equal to the specified threshold (`withinDistance`), the other detection is considered a duplicate and is removed. - * - * @param detections - An array of face detections to remove duplicates from. - * @param withinDistance - The maximum Euclidean distance between two detections for them to be considered duplicates. - * - * @returns An array of face detections with duplicates removed. - */ -function removeDuplicateDetections( - detections: Array, - withinDistance: number, -) { - // console.time('removeDuplicates'); - detections.sort((a, b) => b.probability - a.probability); - const isSelected = new Map(); - for (let i = 0; i < detections.length; i++) { - if (isSelected.get(i) === false) { - continue; - } - isSelected.set(i, true); - for (let j = i + 1; j < detections.length; j++) { - if (isSelected.get(j) === false) { - continue; - } - const centeri = getDetectionCenter(detections[i]); - const centerj = getDetectionCenter(detections[j]); - const dist = euclidean( - [centeri.x, centeri.y], - [centerj.x, centerj.y], - ); - if (dist <= withinDistance) { - isSelected.set(j, false); - } - } - } - - const uniques: Array = []; - for (let i = 0; i < detections.length; i++) { - isSelected.get(i) && uniques.push(detections[i]); - } - // console.timeEnd('removeDuplicates'); - return uniques; -} - -function getDetectionCenter(detection: FaceDetection) { - const center = new Point(0, 0); - // TODO: first 4 landmarks is applicable to blazeface only - // this needs to consider eyes, nose and mouth landmarks to take center - detection.landmarks?.slice(0, 4).forEach((p) => { - center.x += p.x; - center.y += p.y; - }); - - return center.div({ x: 4, y: 4 }); -} - -function computeTransformToBox(inBox: Box, toBox: Box): Matrix { - return compose( - translate(toBox.x, toBox.y), - scale(toBox.width / inBox.width, toBox.height / inBox.height), - ); -} - -function transformPoint(point: Point, transform: Matrix) { - const txdPoint = applyToPoint(transform, point); - return new Point(txdPoint.x, txdPoint.y); -} - -function transformPoints(points: Point[], transform: Matrix) { - return points?.map((p) => transformPoint(p, transform)); -} - -function transformBox(box: Box, transform: Matrix) { - const topLeft = transformPoint(box.topLeft, transform); - const bottomRight = transformPoint(box.bottomRight, transform); - - return newBoxFromPoints(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y); -} - -function newBoxFromPoints( - left: number, - top: number, - right: number, - bottom: number, -) { - return new Box({ left, top, right, bottom }); -} diff --git a/web/apps/photos/src/services/searchService.ts b/web/apps/photos/src/services/searchService.ts index 96c574b9dd..4bbab115c3 100644 --- a/web/apps/photos/src/services/searchService.ts +++ b/web/apps/photos/src/services/searchService.ts @@ -2,10 +2,12 @@ import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; import * as chrono from "chrono-node"; import { t } from "i18next"; +import mlIDbStorage from "services/face/db"; +import type { Person } from "services/face/people"; +import { defaultMLVersion } from "services/machineLearning/machineLearningService"; import { Collection } from "types/collection"; import { EntityType, LocationTag, LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; import { ClipSearchScores, DateValue, @@ -16,12 +18,9 @@ import { } from "types/search"; import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker"; import { getUniqueFiles } from "utils/file"; -import { getAllPeople } from "utils/machineLearning"; -import { getMLSyncConfig } from "utils/machineLearning/config"; import { getFormattedDate } from "utils/search"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; import { clipService, computeClipMatchScore } from "./clip-service"; -import { getLocalEmbeddings } from "./embeddingService"; +import { localCLIPEmbeddings } from "./embeddingService"; import { getLatestEntities } from "./entityService"; import locationSearchService, { City } from "./locationSearchService"; @@ -176,8 +175,7 @@ export async function getAllPeopleSuggestion(): Promise> { export async function getIndexStatusSuggestion(): Promise { try { - const config = await getMLSyncConfig(); - const indexStatus = await mlIDbStorage.getIndexStatus(config.mlVersion); + const indexStatus = await mlIDbStorage.getIndexStatus(defaultMLVersion); let label; if (!indexStatus.localFilesSynced) { @@ -376,7 +374,7 @@ const searchClip = async ( await clipService.getTextEmbeddingIfAvailable(searchPhrase); if (!textEmbedding) return undefined; - const imageEmbeddings = await getLocalEmbeddings(); + const imageEmbeddings = await localCLIPEmbeddings(); const clipSearchResult = new Map( ( await Promise.all( @@ -430,3 +428,14 @@ function convertSuggestionToSearchQuery(option: Suggestion): Search { return { clip: option.value as ClipSearchScores }; } } + +async function getAllPeople(limit: number = undefined) { + let people: Array = await mlIDbStorage.getAllPeople(); + // await mlPeopleStore.iterate((person) => { + // people.push(person); + // }); + people = people ?? []; + return people + .sort((p1, p2) => p2.files.length - p1.files.length) + .slice(0, limit); +} diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts index 1dd448376e..10da88a650 100644 --- a/web/apps/photos/src/services/upload/thumbnail.ts +++ b/web/apps/photos/src/services/upload/thumbnail.ts @@ -1,7 +1,9 @@ import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; +import { scaledImageDimensions } from "@/media/image"; import log from "@/next/log"; import { type Electron } from "@/next/types/ipc"; -import { withTimeout } from "@ente/shared/utils"; +import { ensure } from "@/utils/ensure"; +import { withTimeout } from "@/utils/promise"; import * as ffmpeg from "services/ffmpeg"; import { heicToJPEG } from "services/heic-convert"; import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types"; @@ -30,10 +32,10 @@ export const generateThumbnailWeb = async ( fileTypeInfo: FileTypeInfo, ): Promise => fileTypeInfo.fileType === FILE_TYPE.IMAGE - ? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo) + ? await generateImageThumbnailWeb(blob, fileTypeInfo) : await generateVideoThumbnailWeb(blob); -const generateImageThumbnailUsingCanvas = async ( +const generateImageThumbnailWeb = async ( blob: Blob, { extension }: FileTypeInfo, ) => { @@ -42,8 +44,12 @@ const generateImageThumbnailUsingCanvas = async ( blob = await heicToJPEG(blob); } + return generateImageThumbnailUsingCanvas(blob); +}; + +const generateImageThumbnailUsingCanvas = async (blob: Blob) => { const canvas = document.createElement("canvas"); - const canvasCtx = canvas.getContext("2d"); + const canvasCtx = ensure(canvas.getContext("2d")); const imageURL = URL.createObjectURL(blob); await withTimeout( @@ -53,7 +59,7 @@ const generateImageThumbnailUsingCanvas = async ( image.onload = () => { try { URL.revokeObjectURL(imageURL); - const { width, height } = scaledThumbnailDimensions( + const { width, height } = scaledImageDimensions( image.width, image.height, maxThumbnailDimension, @@ -62,7 +68,7 @@ const generateImageThumbnailUsingCanvas = async ( canvas.height = height; canvasCtx.drawImage(image, 0, 0, width, height); resolve(undefined); - } catch (e) { + } catch (e: unknown) { reject(e); } }; @@ -73,6 +79,32 @@ const generateImageThumbnailUsingCanvas = async ( return await compressedJPEGData(canvas); }; +const compressedJPEGData = async (canvas: HTMLCanvasElement) => { + let blob: Blob | undefined | null; + let prevSize = Number.MAX_SAFE_INTEGER; + let quality = 0.7; + + do { + if (blob) prevSize = blob.size; + blob = await new Promise((resolve) => { + canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality); + }); + quality -= 0.1; + } while ( + quality >= 0.5 && + blob && + blob.size > maxThumbnailSize && + percentageSizeDiff(blob.size, prevSize) >= 10 + ); + + return new Uint8Array(await ensure(blob).arrayBuffer()); +}; + +const percentageSizeDiff = ( + newThumbnailSize: number, + oldThumbnailSize: number, +) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; + const generateVideoThumbnailWeb = async (blob: Blob) => { try { return await ffmpeg.generateVideoThumbnailWeb(blob); @@ -85,9 +117,9 @@ const generateVideoThumbnailWeb = async (blob: Blob) => { } }; -const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { +export const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { const canvas = document.createElement("canvas"); - const canvasCtx = canvas.getContext("2d"); + const canvasCtx = ensure(canvas.getContext("2d")); const videoURL = URL.createObjectURL(blob); await withTimeout( @@ -98,7 +130,7 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { video.addEventListener("loadeddata", () => { try { URL.revokeObjectURL(videoURL); - const { width, height } = scaledThumbnailDimensions( + const { width, height } = scaledImageDimensions( video.videoWidth, video.videoHeight, maxThumbnailDimension, @@ -118,59 +150,6 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { return await compressedJPEGData(canvas); }; -/** - * Compute the size of the thumbnail to create for an image with the given - * {@link width} and {@link height}. - * - * This function calculates a new size of an image for limiting it to maximum - * width and height (both specified by {@link maxDimension}), while maintaining - * aspect ratio. - * - * It returns `{0, 0}` for invalid inputs. - */ -const scaledThumbnailDimensions = ( - width: number, - height: number, - maxDimension: number, -): { width: number; height: number } => { - if (width === 0 || height === 0) return { width: 0, height: 0 }; - const widthScaleFactor = maxDimension / width; - const heightScaleFactor = maxDimension / height; - const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); - const thumbnailDimensions = { - width: Math.round(width * scaleFactor), - height: Math.round(height * scaleFactor), - }; - if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0) - return { width: 0, height: 0 }; - return thumbnailDimensions; -}; - -const compressedJPEGData = async (canvas: HTMLCanvasElement) => { - let blob: Blob; - let prevSize = Number.MAX_SAFE_INTEGER; - let quality = 0.7; - - do { - if (blob) prevSize = blob.size; - blob = await new Promise((resolve) => { - canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality); - }); - quality -= 0.1; - } while ( - quality >= 0.5 && - blob.size > maxThumbnailSize && - percentageSizeDiff(blob.size, prevSize) >= 10 - ); - - return new Uint8Array(await blob.arrayBuffer()); -}; - -const percentageSizeDiff = ( - newThumbnailSize: number, - oldThumbnailSize: number, -) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; - /** * Generate a JPEG thumbnail for the given file or path using native tools. * diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts index e8ae6de977..c23a58b520 100644 --- a/web/apps/photos/src/services/upload/uploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts @@ -1,9 +1,9 @@ import log from "@/next/log"; +import { wait } from "@/utils/promise"; import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; -import { wait } from "@ente/shared/utils"; import { EnteFile } from "types/file"; import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index 38fd7037be..0ab9ecff0f 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -6,11 +6,11 @@ import log from "@/next/log"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ensure } from "@/utils/ensure"; +import { wait } from "@/utils/promise"; import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; -import { wait } from "@ente/shared/utils"; import { Canceler } from "axios"; import { Remote } from "comlink"; import { diff --git a/web/apps/photos/src/services/userService.ts b/web/apps/photos/src/services/userService.ts index 95b1b95c92..47bda4f0a1 100644 --- a/web/apps/photos/src/services/userService.ts +++ b/web/apps/photos/src/services/userService.ts @@ -1,11 +1,8 @@ import log from "@/next/log"; import { putAttributes } from "@ente/accounts/api/user"; -import { logoutUser } from "@ente/accounts/services/user"; -import { getRecoveryKey } from "@ente/shared/crypto/helpers"; import { ApiError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getFamilyPortalURL } from "@ente/shared/network/api"; -import localForage from "@ente/shared/storage/localForage"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { getToken, @@ -104,10 +101,6 @@ export const getRoadmapRedirectURL = async () => { } }; -export const clearFiles = async () => { - await localForage.clear(); -}; - export const isTokenValid = async (token: string) => { try { const resp = await HTTPService.get( @@ -233,19 +226,6 @@ export const deleteAccount = async ( } }; -// Ensure that the keys in local storage are not malformed by verifying that the -// recoveryKey can be decrypted with the masterKey. -// Note: This is not bullet-proof. -export const validateKey = async () => { - try { - await getRecoveryKey(); - return true; - } catch (e) { - await logoutUser(); - return false; - } -}; - export const getFaceSearchEnabledStatus = async () => { try { const token = getToken(); diff --git a/web/apps/photos/src/types/billing/index.ts b/web/apps/photos/src/types/billing/index.ts index b2058948bc..ef203d49fe 100644 --- a/web/apps/photos/src/types/billing/index.ts +++ b/web/apps/photos/src/types/billing/index.ts @@ -14,6 +14,7 @@ export interface Subscription { price: string; period: PLAN_PERIOD; } + export interface Plan { id: string; androidID: string; diff --git a/web/apps/photos/src/types/common/job.ts b/web/apps/photos/src/types/common/job.ts deleted file mode 100644 index fe42e4aaf2..0000000000 --- a/web/apps/photos/src/types/common/job.ts +++ /dev/null @@ -1,11 +0,0 @@ -export type JobState = "Scheduled" | "Running" | "NotScheduled"; - -export interface JobConfig { - intervalSec: number; - maxItervalSec: number; - backoffMultiplier: number; -} - -export interface JobResult { - shouldBackoff: boolean; -} diff --git a/web/apps/photos/src/types/embedding.tsx b/web/apps/photos/src/types/embedding.tsx index d4719986bc..161244c159 100644 --- a/web/apps/photos/src/types/embedding.tsx +++ b/web/apps/photos/src/types/embedding.tsx @@ -1,9 +1,9 @@ /** - * The embeddings models that we support. + * The embeddings that we (the current client) knows how to handle. * * This is an exhaustive set of values we pass when PUT-ting encrypted * embeddings on the server. However, we should be prepared to receive an - * {@link EncryptedEmbedding} with a model value distinct from one of these. + * {@link EncryptedEmbedding} with a model value different from these. */ export type EmbeddingModel = "onnx-clip" | "file-ml-clip-face"; diff --git a/web/apps/photos/src/types/image/index.ts b/web/apps/photos/src/types/image/index.ts index 8c9619e2eb..e69de29bb2 100644 --- a/web/apps/photos/src/types/image/index.ts +++ b/web/apps/photos/src/types/image/index.ts @@ -1,9 +0,0 @@ -export interface Dimensions { - width: number; - height: number; -} - -export interface BlobOptions { - type?: string; - quality?: number; -} diff --git a/web/apps/photos/src/types/machineLearning/data/clip.ts b/web/apps/photos/src/types/machineLearning/data/clip.ts deleted file mode 100644 index 0181e89e57..0000000000 --- a/web/apps/photos/src/types/machineLearning/data/clip.ts +++ /dev/null @@ -1,4 +0,0 @@ -export interface ClipEmbedding { - embedding: Float32Array; - model: "ggml-clip" | "onnx-clip"; -} diff --git a/web/apps/photos/src/types/machineLearning/data/face.ts b/web/apps/photos/src/types/machineLearning/data/face.ts deleted file mode 100644 index cac391994f..0000000000 --- a/web/apps/photos/src/types/machineLearning/data/face.ts +++ /dev/null @@ -1,27 +0,0 @@ -/// [`x`] and [y] are the coordinates of the top left corner of the box, so the minimim values -/// [width] and [height] are the width and height of the box. -/// All values are in absolute pixels relative to the original image size. -export interface CenterBox { - x: number; - y: number; - height: number; - width: number; -} - -export interface Point { - x: number; - y: number; -} - -export interface Detection { - box: CenterBox; - landmarks: Point[]; -} - -export interface Face { - id: string; - confidence: number; - blur: number; - embedding: Float32Array; - detection: Detection; -} diff --git a/web/apps/photos/src/types/machineLearning/data/fileML.ts b/web/apps/photos/src/types/machineLearning/data/fileML.ts deleted file mode 100644 index 7835450e77..0000000000 --- a/web/apps/photos/src/types/machineLearning/data/fileML.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { ClipEmbedding } from "./clip"; -import { Face } from "./face"; - -export interface FileML { - fileID: number; - clip?: ClipEmbedding; - faces: Face[]; - height: number; - width: number; - version: number; - error?: string; -} diff --git a/web/apps/photos/src/types/machineLearning/index.ts b/web/apps/photos/src/types/machineLearning/index.ts deleted file mode 100644 index 2c3961cdf6..0000000000 --- a/web/apps/photos/src/types/machineLearning/index.ts +++ /dev/null @@ -1,331 +0,0 @@ -import { DebugInfo } from "hdbscan"; -import PQueue from "p-queue"; -import { EnteFile } from "types/file"; -import { Dimensions } from "types/image"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -export interface MLSyncResult { - nOutOfSyncFiles: number; - nSyncedFiles: number; - nSyncedFaces: number; - nFaceClusters: number; - nFaceNoise: number; - error?: Error; -} - -export declare type FaceDescriptor = Float32Array; - -export declare type Cluster = Array; - -export interface ClusteringResults { - clusters: Array; - noise: Cluster; -} - -export interface HdbscanResults extends ClusteringResults { - debugInfo?: DebugInfo; -} - -export interface FacesCluster { - faces: Cluster; - summary?: FaceDescriptor; -} - -export interface FacesClustersWithNoise { - clusters: Array; - noise: Cluster; -} - -export interface NearestCluster { - cluster: FacesCluster; - distance: number; -} - -export declare type Landmark = Point; - -export declare type ImageType = "Original" | "Preview"; - -export declare type FaceDetectionMethod = "YoloFace"; - -export declare type FaceCropMethod = "ArcFace"; - -export declare type FaceAlignmentMethod = "ArcFace"; - -export declare type FaceEmbeddingMethod = "MobileFaceNet"; - -export declare type BlurDetectionMethod = "Laplacian"; - -export declare type ClusteringMethod = "Hdbscan" | "Dbscan"; - -export class AlignedBox { - box: Box; - rotation: number; -} - -export interface Versioned { - value: T; - version: number; -} - -export interface FaceDetection { - // box and landmarks is relative to image dimentions stored at mlFileData - box: Box; - landmarks?: Array; - probability?: number; -} - -export interface DetectedFace { - fileId: number; - detection: FaceDetection; -} - -export interface DetectedFaceWithId extends DetectedFace { - id: string; -} - -export interface FaceCrop { - image: ImageBitmap; - // imageBox is relative to image dimentions stored at mlFileData - imageBox: Box; -} - -export interface StoredFaceCrop { - cacheKey: string; - imageBox: Box; -} - -export interface CroppedFace extends DetectedFaceWithId { - crop?: StoredFaceCrop; -} - -export interface FaceAlignment { - // TODO: remove affine matrix as rotation, size and center - // are simple to store and use, affine matrix adds complexity while getting crop - affineMatrix: Array>; - rotation: number; - // size and center is relative to image dimentions stored at mlFileData - size: number; - center: Point; -} - -export interface AlignedFace extends CroppedFace { - alignment?: FaceAlignment; - blurValue?: number; -} - -export declare type FaceEmbedding = Float32Array; - -export interface FaceWithEmbedding extends AlignedFace { - embedding?: FaceEmbedding; -} - -export interface Face extends FaceWithEmbedding { - personId?: number; -} - -export interface Person { - id: number; - name?: string; - files: Array; - displayFaceId?: string; - faceCropCacheKey?: string; -} - -export interface MlFileData { - fileId: number; - faces?: Face[]; - imageSource?: ImageType; - imageDimensions?: Dimensions; - faceDetectionMethod?: Versioned; - faceCropMethod?: Versioned; - faceAlignmentMethod?: Versioned; - faceEmbeddingMethod?: Versioned; - mlVersion: number; - errorCount: number; - lastErrorMessage?: string; -} - -export interface FaceDetectionConfig { - method: FaceDetectionMethod; -} - -export interface FaceCropConfig { - enabled: boolean; - method: FaceCropMethod; - padding: number; - maxSize: number; - blobOptions: { - type: string; - quality: number; - }; -} - -export interface FaceAlignmentConfig { - method: FaceAlignmentMethod; -} - -export interface BlurDetectionConfig { - method: BlurDetectionMethod; - threshold: number; -} - -export interface FaceEmbeddingConfig { - method: FaceEmbeddingMethod; - faceSize: number; - generateTsne?: boolean; -} - -export interface FaceClusteringConfig extends ClusteringConfig {} - -export declare type TSNEMetric = "euclidean" | "manhattan"; - -export interface TSNEConfig { - samples: number; - dim: number; - perplexity?: number; - earlyExaggeration?: number; - learningRate?: number; - nIter?: number; - metric?: TSNEMetric; -} - -export interface MLSyncConfig { - batchSize: number; - imageSource: ImageType; - faceDetection: FaceDetectionConfig; - faceCrop: FaceCropConfig; - faceAlignment: FaceAlignmentConfig; - blurDetection: BlurDetectionConfig; - faceEmbedding: FaceEmbeddingConfig; - faceClustering: FaceClusteringConfig; - mlVersion: number; -} - -export interface MLSearchConfig { - enabled: boolean; -} - -export interface MLSyncContext { - token: string; - userID: number; - config: MLSyncConfig; - shouldUpdateMLVersion: boolean; - - faceDetectionService: FaceDetectionService; - faceCropService: FaceCropService; - faceAlignmentService: FaceAlignmentService; - faceEmbeddingService: FaceEmbeddingService; - blurDetectionService: BlurDetectionService; - faceClusteringService: ClusteringService; - - localFilesMap: Map; - outOfSyncFiles: EnteFile[]; - nSyncedFiles: number; - nSyncedFaces: number; - allSyncedFacesMap?: Map>; - - error?: Error; - - // oldMLLibraryData: MLLibraryData; - mlLibraryData: MLLibraryData; - - syncQueue: PQueue; - - getEnteWorker(id: number): Promise; - dispose(): Promise; -} - -export interface MLSyncFileContext { - enteFile: EnteFile; - localFile?: globalThis.File; - - oldMlFile?: MlFileData; - newMlFile?: MlFileData; - - imageBitmap?: ImageBitmap; - - newDetection?: boolean; - newAlignment?: boolean; -} - -export interface MLLibraryData { - faceClusteringMethod?: Versioned; - faceClusteringResults?: ClusteringResults; - faceClustersWithNoise?: FacesClustersWithNoise; -} - -export declare type MLIndex = "files" | "people"; - -export interface FaceDetectionService { - method: Versioned; - - detectFaces(image: ImageBitmap): Promise>; - getRelativeDetection( - faceDetection: FaceDetection, - imageDimensions: Dimensions, - ): FaceDetection; -} - -export interface FaceCropService { - method: Versioned; - - getFaceCrop( - imageBitmap: ImageBitmap, - face: FaceDetection, - config: FaceCropConfig, - ): Promise; -} - -export interface FaceAlignmentService { - method: Versioned; - getFaceAlignment(faceDetection: FaceDetection): FaceAlignment; -} - -export interface FaceEmbeddingService { - method: Versioned; - faceSize: number; - - getFaceEmbeddings(faceImages: Float32Array): Promise>; -} - -export interface BlurDetectionService { - method: Versioned; - detectBlur(alignedFaces: Float32Array, faces: Face[]): number[]; -} - -export interface ClusteringService { - method: Versioned; - - cluster( - input: ClusteringInput, - config: ClusteringConfig, - ): Promise; -} - -export interface ClusteringConfig { - method: ClusteringMethod; - minClusterSize: number; - minSamples?: number; - clusterSelectionEpsilon?: number; - clusterSelectionMethod?: "eom" | "leaf"; - maxDistanceInsideCluster?: number; - minInputSize?: number; - generateDebugInfo?: boolean; -} - -export declare type ClusteringInput = Array>; - -export interface MachineLearningWorker { - closeLocalSyncContext(): Promise; - - syncLocalFile( - token: string, - userID: number, - enteFile: EnteFile, - localFile: globalThis.File, - ): Promise; - - sync(token: string, userID: number): Promise; - - close(): void; -} diff --git a/web/apps/photos/src/types/machineLearning/ui.ts b/web/apps/photos/src/types/machineLearning/ui.ts deleted file mode 100644 index cd9f63f185..0000000000 --- a/web/apps/photos/src/types/machineLearning/ui.ts +++ /dev/null @@ -1,7 +0,0 @@ -export interface IndexStatus { - outOfSyncFilesExists: boolean; - nSyncedFiles: number; - nTotalFiles: number; - localFilesSynced: boolean; - peopleIndexSynced: boolean; -} diff --git a/web/apps/photos/src/types/search/index.ts b/web/apps/photos/src/types/search/index.ts index cf50f4a060..33f5eba9a0 100644 --- a/web/apps/photos/src/types/search/index.ts +++ b/web/apps/photos/src/types/search/index.ts @@ -1,9 +1,9 @@ import { FILE_TYPE } from "@/media/file-type"; +import { IndexStatus } from "services/face/db"; +import type { Person } from "services/face/people"; import { City } from "services/locationSearchService"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; -import { IndexStatus } from "types/machineLearning/ui"; export enum SuggestionType { DATE = "DATE", diff --git a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts deleted file mode 100644 index f312a2c5c0..0000000000 --- a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { haveWindow } from "@/next/env"; -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { type DedicatedMLWorker } from "worker/ml.worker"; - -export const getDedicatedMLWorker = (name: string) => { - if (haveWindow()) { - const cryptoComlinkWorker = new ComlinkWorker( - name ?? "ente-ml-worker", - new Worker(new URL("worker/ml.worker.ts", import.meta.url)), - ); - return cryptoComlinkWorker; - } -}; diff --git a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts index 4886bacda5..0d7c52a965 100644 --- a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts +++ b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts @@ -5,11 +5,13 @@ import { type DedicatedSearchWorker } from "worker/search.worker"; class ComlinkSearchWorker { private comlinkWorkerInstance: Remote; + private comlinkWorker: ComlinkWorker; async getInstance() { if (!this.comlinkWorkerInstance) { - this.comlinkWorkerInstance = - await getDedicatedSearchWorker().remote; + if (!this.comlinkWorker) + this.comlinkWorker = getDedicatedSearchWorker(); + this.comlinkWorkerInstance = await this.comlinkWorker.remote; } return this.comlinkWorkerInstance; } diff --git a/web/apps/photos/src/utils/common/job.ts b/web/apps/photos/src/utils/common/job.ts deleted file mode 100644 index 365f879e95..0000000000 --- a/web/apps/photos/src/utils/common/job.ts +++ /dev/null @@ -1,82 +0,0 @@ -import log from "@/next/log"; -import { JobConfig, JobResult, JobState } from "types/common/job"; - -export class SimpleJob { - private config: JobConfig; - private runCallback: () => Promise; - private state: JobState; - private stopped: boolean; - private intervalSec: number; - private nextTimeoutId: ReturnType; - - constructor(config: JobConfig, runCallback: () => Promise) { - this.config = config; - this.runCallback = runCallback; - this.state = "NotScheduled"; - this.stopped = true; - this.intervalSec = this.config.intervalSec; - } - - public resetInterval() { - this.intervalSec = this.config.intervalSec; - } - - public start() { - this.stopped = false; - this.resetInterval(); - if (this.state !== "Running") { - this.scheduleNext(); - } else { - log.info("Job already running, not scheduling"); - } - } - - private scheduleNext() { - if (this.state === "Scheduled" || this.nextTimeoutId) { - this.clearScheduled(); - } - - this.nextTimeoutId = setTimeout( - () => this.run(), - this.intervalSec * 1000, - ); - this.state = "Scheduled"; - log.info("Scheduled next job after: ", this.intervalSec); - } - - async run() { - this.nextTimeoutId = undefined; - this.state = "Running"; - - try { - const jobResult = await this.runCallback(); - if (jobResult && jobResult.shouldBackoff) { - this.intervalSec = Math.min( - this.config.maxItervalSec, - this.intervalSec * this.config.backoffMultiplier, - ); - } else { - this.resetInterval(); - } - log.info("Job completed"); - } catch (e) { - console.error("Error while running Job: ", e); - } finally { - this.state = "NotScheduled"; - !this.stopped && this.scheduleNext(); - } - } - - // currently client is responsible to terminate running job - public stop() { - this.stopped = true; - this.clearScheduled(); - } - - private clearScheduled() { - clearTimeout(this.nextTimeoutId); - this.nextTimeoutId = undefined; - this.state = "NotScheduled"; - log.info("Cleared next job"); - } -} diff --git a/web/apps/photos/src/utils/embedding.ts b/web/apps/photos/src/utils/embedding.ts deleted file mode 100644 index 00012f174f..0000000000 --- a/web/apps/photos/src/utils/embedding.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Embedding } from "types/embedding"; -import { FileML } from "./machineLearning/mldataMappers"; - -export const getLatestVersionEmbeddings = (embeddings: Embedding[]) => { - const latestVersionEntities = new Map(); - embeddings.forEach((embedding) => { - if (!embedding?.fileID) { - return; - } - const existingEmbeddings = latestVersionEntities.get(embedding.fileID); - if ( - !existingEmbeddings || - existingEmbeddings.updatedAt < embedding.updatedAt - ) { - latestVersionEntities.set(embedding.fileID, embedding); - } - }); - return Array.from(latestVersionEntities.values()); -}; - -export const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => { - const latestVersionEntities = new Map(); - embeddings.forEach((embedding) => { - if (!embedding?.fileID) { - return; - } - const existingEmbeddings = latestVersionEntities.get(embedding.fileID); - if ( - !existingEmbeddings || - existingEmbeddings.updatedAt < embedding.updatedAt - ) { - latestVersionEntities.set(embedding.fileID, embedding); - } - }); - return Array.from(latestVersionEntities.values()); -}; diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts index 98a8dd9481..f2f9932dd8 100644 --- a/web/apps/photos/src/utils/file/index.ts +++ b/web/apps/photos/src/utils/file/index.ts @@ -5,10 +5,11 @@ import { lowercaseExtension } from "@/next/file"; import log from "@/next/log"; import { CustomErrorMessage, type Electron } from "@/next/types/ipc"; import { workerBridge } from "@/next/worker/worker-bridge"; +import { withTimeout } from "@/utils/promise"; import ComlinkCryptoWorker from "@ente/shared/crypto"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils"; +import { downloadUsingAnchor } from "@ente/shared/utils"; import { t } from "i18next"; import isElectron from "is-electron"; import { moveToHiddenCollection } from "services/collectionService"; @@ -261,15 +262,10 @@ export async function decryptFile( } } -export function generateStreamFromArrayBuffer(data: Uint8Array) { - return new ReadableStream({ - async start(controller: ReadableStreamDefaultController) { - controller.enqueue(data); - controller.close(); - }, - }); -} - +/** + * The returned blob.type is filled in, whenever possible, with the MIME type of + * the data that we're dealing with. + */ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { try { const tempFile = new File([imageBlob], fileName); @@ -283,7 +279,16 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { if (!isNonWebImageFileExtension(extension)) { // Either it is something that the browser already knows how to // render, or something we don't even about yet. - return imageBlob; + const mimeType = fileTypeInfo.mimeType; + if (!mimeType) { + log.info( + "Trying to render a file without a MIME type", + fileName, + ); + return imageBlob; + } else { + return new Blob([imageBlob], { type: mimeType }); + } } const available = !moduleState.isNativeJPEGConversionNotAvailable; @@ -324,7 +329,7 @@ const nativeConvertToJPEG = async (imageBlob: Blob) => { ? await electron.convertToJPEG(imageData) : await workerBridge.convertToJPEG(imageData); log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`); - return new Blob([jpegData]); + return new Blob([jpegData], { type: "image/jpeg" }); }; export function isSupportedRawFormat(exactType: string) { @@ -635,7 +640,7 @@ async function downloadFileDesktop( imageFileName, fs.exists, ); - const imageStream = generateStreamFromArrayBuffer(imageData); + const imageStream = new Response(imageData).body; await writeStream( electron, `${downloadDir}/${imageExportName}`, @@ -647,7 +652,7 @@ async function downloadFileDesktop( videoFileName, fs.exists, ); - const videoStream = generateStreamFromArrayBuffer(videoData); + const videoStream = new Response(videoData).body; await writeStream( electron, `${downloadDir}/${videoExportName}`, diff --git a/web/apps/photos/src/utils/image/index.ts b/web/apps/photos/src/utils/image/index.ts deleted file mode 100644 index e4884716cd..0000000000 --- a/web/apps/photos/src/utils/image/index.ts +++ /dev/null @@ -1,465 +0,0 @@ -// these utils only work in env where OffscreenCanvas is available - -import { Matrix, inverse } from "ml-matrix"; -import { BlobOptions, Dimensions } from "types/image"; -import { FaceAlignment } from "types/machineLearning"; -import { enlargeBox } from "utils/machineLearning"; -import { Box } from "../../../thirdparty/face-api/classes"; - -export function normalizePixelBetween0And1(pixelValue: number) { - return pixelValue / 255.0; -} - -export function normalizePixelBetweenMinus1And1(pixelValue: number) { - return pixelValue / 127.5 - 1.0; -} - -export function unnormalizePixelFromBetweenMinus1And1(pixelValue: number) { - return clamp(Math.round((pixelValue + 1.0) * 127.5), 0, 255); -} - -export function readPixelColor( - imageData: Uint8ClampedArray, - width: number, - height: number, - x: number, - y: number, -) { - if (x < 0 || x >= width || y < 0 || y >= height) { - return { r: 0, g: 0, b: 0, a: 0 }; - } - const index = (y * width + x) * 4; - return { - r: imageData[index], - g: imageData[index + 1], - b: imageData[index + 2], - a: imageData[index + 3], - }; -} - -export function clamp(value: number, min: number, max: number) { - return Math.min(max, Math.max(min, value)); -} - -export function getPixelBicubic( - fx: number, - fy: number, - imageData: Uint8ClampedArray, - imageWidth: number, - imageHeight: number, -) { - // Clamp to image boundaries - fx = clamp(fx, 0, imageWidth - 1); - fy = clamp(fy, 0, imageHeight - 1); - - const x = Math.trunc(fx) - (fx >= 0.0 ? 0 : 1); - const px = x - 1; - const nx = x + 1; - const ax = x + 2; - const y = Math.trunc(fy) - (fy >= 0.0 ? 0 : 1); - const py = y - 1; - const ny = y + 1; - const ay = y + 2; - const dx = fx - x; - const dy = fy - y; - - function cubic( - dx: number, - ipp: number, - icp: number, - inp: number, - iap: number, - ) { - return ( - icp + - 0.5 * - (dx * (-ipp + inp) + - dx * dx * (2 * ipp - 5 * icp + 4 * inp - iap) + - dx * dx * dx * (-ipp + 3 * icp - 3 * inp + iap)) - ); - } - - const icc = readPixelColor(imageData, imageWidth, imageHeight, x, y); - - const ipp = - px < 0 || py < 0 - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, px, py); - const icp = - px < 0 - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, x, py); - const inp = - py < 0 || nx >= imageWidth - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, nx, py); - const iap = - ax >= imageWidth || py < 0 - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, ax, py); - - const ip0 = cubic(dx, ipp.r, icp.r, inp.r, iap.r); - const ip1 = cubic(dx, ipp.g, icp.g, inp.g, iap.g); - const ip2 = cubic(dx, ipp.b, icp.b, inp.b, iap.b); - // const ip3 = cubic(dx, ipp.a, icp.a, inp.a, iap.a); - - const ipc = - px < 0 - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, px, y); - const inc = - nx >= imageWidth - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, nx, y); - const iac = - ax >= imageWidth - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, ax, y); - - const ic0 = cubic(dx, ipc.r, icc.r, inc.r, iac.r); - const ic1 = cubic(dx, ipc.g, icc.g, inc.g, iac.g); - const ic2 = cubic(dx, ipc.b, icc.b, inc.b, iac.b); - // const ic3 = cubic(dx, ipc.a, icc.a, inc.a, iac.a); - - const ipn = - px < 0 || ny >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, px, ny); - const icn = - ny >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, x, ny); - const inn = - nx >= imageWidth || ny >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, nx, ny); - const ian = - ax >= imageWidth || ny >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, ax, ny); - - const in0 = cubic(dx, ipn.r, icn.r, inn.r, ian.r); - const in1 = cubic(dx, ipn.g, icn.g, inn.g, ian.g); - const in2 = cubic(dx, ipn.b, icn.b, inn.b, ian.b); - // const in3 = cubic(dx, ipn.a, icn.a, inn.a, ian.a); - - const ipa = - px < 0 || ay >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, px, ay); - const ica = - ay >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, x, ay); - const ina = - nx >= imageWidth || ay >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, nx, ay); - const iaa = - ax >= imageWidth || ay >= imageHeight - ? icc - : readPixelColor(imageData, imageWidth, imageHeight, ax, ay); - - const ia0 = cubic(dx, ipa.r, ica.r, ina.r, iaa.r); - const ia1 = cubic(dx, ipa.g, ica.g, ina.g, iaa.g); - const ia2 = cubic(dx, ipa.b, ica.b, ina.b, iaa.b); - // const ia3 = cubic(dx, ipa.a, ica.a, ina.a, iaa.a); - - const c0 = Math.trunc(clamp(cubic(dy, ip0, ic0, in0, ia0), 0, 255)); - const c1 = Math.trunc(clamp(cubic(dy, ip1, ic1, in1, ia1), 0, 255)); - const c2 = Math.trunc(clamp(cubic(dy, ip2, ic2, in2, ia2), 0, 255)); - // const c3 = cubic(dy, ip3, ic3, in3, ia3); - - return { r: c0, g: c1, b: c2 }; -} - -/// Returns the pixel value (RGB) at the given coordinates using bilinear interpolation. -export function getPixelBilinear( - fx: number, - fy: number, - imageData: Uint8ClampedArray, - imageWidth: number, - imageHeight: number, -) { - // Clamp to image boundaries - fx = clamp(fx, 0, imageWidth - 1); - fy = clamp(fy, 0, imageHeight - 1); - - // Get the surrounding coordinates and their weights - const x0 = Math.floor(fx); - const x1 = Math.ceil(fx); - const y0 = Math.floor(fy); - const y1 = Math.ceil(fy); - const dx = fx - x0; - const dy = fy - y0; - const dx1 = 1.0 - dx; - const dy1 = 1.0 - dy; - - // Get the original pixels - const pixel1 = readPixelColor(imageData, imageWidth, imageHeight, x0, y0); - const pixel2 = readPixelColor(imageData, imageWidth, imageHeight, x1, y0); - const pixel3 = readPixelColor(imageData, imageWidth, imageHeight, x0, y1); - const pixel4 = readPixelColor(imageData, imageWidth, imageHeight, x1, y1); - - function bilinear(val1: number, val2: number, val3: number, val4: number) { - return Math.round( - val1 * dx1 * dy1 + - val2 * dx * dy1 + - val3 * dx1 * dy + - val4 * dx * dy, - ); - } - - // Interpolate the pixel values - const red = bilinear(pixel1.r, pixel2.r, pixel3.r, pixel4.r); - const green = bilinear(pixel1.g, pixel2.g, pixel3.g, pixel4.g); - const blue = bilinear(pixel1.b, pixel2.b, pixel3.b, pixel4.b); - - return { r: red, g: green, b: blue }; -} - -export function warpAffineFloat32List( - imageBitmap: ImageBitmap, - faceAlignment: FaceAlignment, - faceSize: number, - inputData: Float32Array, - inputStartIndex: number, -): void { - // Get the pixel data - const offscreenCanvas = new OffscreenCanvas( - imageBitmap.width, - imageBitmap.height, - ); - const ctx = offscreenCanvas.getContext("2d"); - ctx.drawImage(imageBitmap, 0, 0, imageBitmap.width, imageBitmap.height); - const imageData = ctx.getImageData( - 0, - 0, - imageBitmap.width, - imageBitmap.height, - ); - const pixelData = imageData.data; - - const transformationMatrix = faceAlignment.affineMatrix.map((row) => - row.map((val) => (val != 1.0 ? val * faceSize : 1.0)), - ); // 3x3 - - const A: Matrix = new Matrix([ - [transformationMatrix[0][0], transformationMatrix[0][1]], - [transformationMatrix[1][0], transformationMatrix[1][1]], - ]); - const Ainverse = inverse(A); - - const b00 = transformationMatrix[0][2]; - const b10 = transformationMatrix[1][2]; - const a00Prime = Ainverse.get(0, 0); - const a01Prime = Ainverse.get(0, 1); - const a10Prime = Ainverse.get(1, 0); - const a11Prime = Ainverse.get(1, 1); - - for (let yTrans = 0; yTrans < faceSize; ++yTrans) { - for (let xTrans = 0; xTrans < faceSize; ++xTrans) { - // Perform inverse affine transformation - const xOrigin = - a00Prime * (xTrans - b00) + a01Prime * (yTrans - b10); - const yOrigin = - a10Prime * (xTrans - b00) + a11Prime * (yTrans - b10); - - // Get the pixel from interpolation - const pixel = getPixelBicubic( - xOrigin, - yOrigin, - pixelData, - imageBitmap.width, - imageBitmap.height, - ); - - // Set the pixel in the input data - const index = (yTrans * faceSize + xTrans) * 3; - inputData[inputStartIndex + index] = - normalizePixelBetweenMinus1And1(pixel.r); - inputData[inputStartIndex + index + 1] = - normalizePixelBetweenMinus1And1(pixel.g); - inputData[inputStartIndex + index + 2] = - normalizePixelBetweenMinus1And1(pixel.b); - } - } -} - -export function createGrayscaleIntMatrixFromNormalized2List( - imageList: Float32Array, - faceNumber: number, - width: number = 112, - height: number = 112, -): number[][] { - const startIndex = faceNumber * width * height * 3; - return Array.from({ length: height }, (_, y) => - Array.from({ length: width }, (_, x) => { - // 0.299 ∙ Red + 0.587 ∙ Green + 0.114 ∙ Blue - const pixelIndex = startIndex + 3 * (y * width + x); - return clamp( - Math.round( - 0.299 * - unnormalizePixelFromBetweenMinus1And1( - imageList[pixelIndex], - ) + - 0.587 * - unnormalizePixelFromBetweenMinus1And1( - imageList[pixelIndex + 1], - ) + - 0.114 * - unnormalizePixelFromBetweenMinus1And1( - imageList[pixelIndex + 2], - ), - ), - 0, - 255, - ); - }), - ); -} - -export function resizeToSquare(img: ImageBitmap, size: number) { - const scale = size / Math.max(img.height, img.width); - const width = scale * img.width; - const height = scale * img.height; - const offscreen = new OffscreenCanvas(size, size); - const ctx = offscreen.getContext("2d"); - ctx.imageSmoothingQuality = "high"; - ctx.drawImage(img, 0, 0, width, height); - const resizedImage = offscreen.transferToImageBitmap(); - return { image: resizedImage, width, height }; -} - -export function transform( - imageBitmap: ImageBitmap, - affineMat: number[][], - outputWidth: number, - outputHeight: number, -) { - const offscreen = new OffscreenCanvas(outputWidth, outputHeight); - const context = offscreen.getContext("2d"); - context.imageSmoothingQuality = "high"; - - context.transform( - affineMat[0][0], - affineMat[1][0], - affineMat[0][1], - affineMat[1][1], - affineMat[0][2], - affineMat[1][2], - ); - - context.drawImage(imageBitmap, 0, 0); - return offscreen.transferToImageBitmap(); -} - -export function crop(imageBitmap: ImageBitmap, cropBox: Box, size: number) { - const dimensions: Dimensions = { - width: size, - height: size, - }; - - return cropWithRotation(imageBitmap, cropBox, 0, dimensions, dimensions); -} - -export function cropWithRotation( - imageBitmap: ImageBitmap, - cropBox: Box, - rotation?: number, - maxSize?: Dimensions, - minSize?: Dimensions, -) { - const box = cropBox.round(); - - const outputSize = { width: box.width, height: box.height }; - if (maxSize) { - const minScale = Math.min( - maxSize.width / box.width, - maxSize.height / box.height, - ); - if (minScale < 1) { - outputSize.width = Math.round(minScale * box.width); - outputSize.height = Math.round(minScale * box.height); - } - } - - if (minSize) { - const maxScale = Math.max( - minSize.width / box.width, - minSize.height / box.height, - ); - if (maxScale > 1) { - outputSize.width = Math.round(maxScale * box.width); - outputSize.height = Math.round(maxScale * box.height); - } - } - - // log.info({ imageBitmap, box, outputSize }); - - const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height); - const offscreenCtx = offscreen.getContext("2d"); - offscreenCtx.imageSmoothingQuality = "high"; - - offscreenCtx.translate(outputSize.width / 2, outputSize.height / 2); - rotation && offscreenCtx.rotate(rotation); - - const outputBox = new Box({ - x: -outputSize.width / 2, - y: -outputSize.height / 2, - width: outputSize.width, - height: outputSize.height, - }); - - const enlargedBox = enlargeBox(box, 1.5); - const enlargedOutputBox = enlargeBox(outputBox, 1.5); - - offscreenCtx.drawImage( - imageBitmap, - enlargedBox.x, - enlargedBox.y, - enlargedBox.width, - enlargedBox.height, - enlargedOutputBox.x, - enlargedOutputBox.y, - enlargedOutputBox.width, - enlargedOutputBox.height, - ); - - return offscreen.transferToImageBitmap(); -} - -export function addPadding(image: ImageBitmap, padding: number) { - const scale = 1 + padding * 2; - const width = scale * image.width; - const height = scale * image.height; - const offscreen = new OffscreenCanvas(width, height); - const ctx = offscreen.getContext("2d"); - ctx.imageSmoothingEnabled = false; - ctx.drawImage( - image, - width / 2 - image.width / 2, - height / 2 - image.height / 2, - image.width, - image.height, - ); - - return offscreen.transferToImageBitmap(); -} - -export async function imageBitmapToBlob( - imageBitmap: ImageBitmap, - options?: BlobOptions, -) { - const offscreen = new OffscreenCanvas( - imageBitmap.width, - imageBitmap.height, - ); - offscreen.getContext("2d").drawImage(imageBitmap, 0, 0); - - return offscreen.convertToBlob(options); -} - -export async function imageBitmapFromBlob(blob: Blob) { - return createImageBitmap(blob); -} diff --git a/web/apps/photos/src/utils/machineLearning/config.ts b/web/apps/photos/src/utils/machineLearning/config.ts deleted file mode 100644 index 0c25356aba..0000000000 --- a/web/apps/photos/src/utils/machineLearning/config.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { - DEFAULT_ML_SEARCH_CONFIG, - DEFAULT_ML_SYNC_CONFIG, - DEFAULT_ML_SYNC_JOB_CONFIG, -} from "constants/mlConfig"; -import { JobConfig } from "types/common/job"; -import { MLSearchConfig, MLSyncConfig } from "types/machineLearning"; -import mlIDbStorage, { - ML_SEARCH_CONFIG_NAME, - ML_SYNC_CONFIG_NAME, - ML_SYNC_JOB_CONFIG_NAME, -} from "utils/storage/mlIDbStorage"; -import { isInternalUserForML } from "utils/user"; - -export async function getMLSyncJobConfig() { - return mlIDbStorage.getConfig( - ML_SYNC_JOB_CONFIG_NAME, - DEFAULT_ML_SYNC_JOB_CONFIG, - ); -} - -export async function getMLSyncConfig() { - return mlIDbStorage.getConfig(ML_SYNC_CONFIG_NAME, DEFAULT_ML_SYNC_CONFIG); -} - -export async function getMLSearchConfig() { - if (isInternalUserForML()) { - return mlIDbStorage.getConfig( - ML_SEARCH_CONFIG_NAME, - DEFAULT_ML_SEARCH_CONFIG, - ); - } - // Force disabled for everyone else while we finalize it to avoid redundant - // reindexing for users. - return DEFAULT_ML_SEARCH_CONFIG; -} - -export async function updateMLSyncJobConfig(newConfig: JobConfig) { - return mlIDbStorage.putConfig(ML_SYNC_JOB_CONFIG_NAME, newConfig); -} - -export async function updateMLSyncConfig(newConfig: MLSyncConfig) { - return mlIDbStorage.putConfig(ML_SYNC_CONFIG_NAME, newConfig); -} - -export async function updateMLSearchConfig(newConfig: MLSearchConfig) { - return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig); -} diff --git a/web/apps/photos/src/utils/machineLearning/faceAlign.ts b/web/apps/photos/src/utils/machineLearning/faceAlign.ts deleted file mode 100644 index beb98cea9a..0000000000 --- a/web/apps/photos/src/utils/machineLearning/faceAlign.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { Matrix } from "ml-matrix"; -import { getSimilarityTransformation } from "similarity-transformation"; -import { FaceAlignment, FaceDetection } from "types/machineLearning"; -import { Point } from "../../../thirdparty/face-api/classes"; - -const ARCFACE_LANDMARKS = [ - [38.2946, 51.6963], - [73.5318, 51.5014], - [56.0252, 71.7366], - [56.1396, 92.2848], -] as Array<[number, number]>; - -const ARCFACE_LANDMARKS_FACE_SIZE = 112; - -const ARC_FACE_5_LANDMARKS = [ - [38.2946, 51.6963], - [73.5318, 51.5014], - [56.0252, 71.7366], - [41.5493, 92.3655], - [70.7299, 92.2041], -] as Array<[number, number]>; - -export function getArcfaceAlignment( - faceDetection: FaceDetection, -): FaceAlignment { - const landmarkCount = faceDetection.landmarks.length; - return getFaceAlignmentUsingSimilarityTransform( - faceDetection, - normalizeLandmarks( - landmarkCount === 5 ? ARC_FACE_5_LANDMARKS : ARCFACE_LANDMARKS, - ARCFACE_LANDMARKS_FACE_SIZE, - ), - ); -} - -function getFaceAlignmentUsingSimilarityTransform( - faceDetection: FaceDetection, - alignedLandmarks: Array<[number, number]>, - // alignmentMethod: Versioned -): FaceAlignment { - const landmarksMat = new Matrix( - faceDetection.landmarks - .map((p) => [p.x, p.y]) - .slice(0, alignedLandmarks.length), - ).transpose(); - const alignedLandmarksMat = new Matrix(alignedLandmarks).transpose(); - - const simTransform = getSimilarityTransformation( - landmarksMat, - alignedLandmarksMat, - ); - - const RS = Matrix.mul(simTransform.rotation, simTransform.scale); - const TR = simTransform.translation; - - const affineMatrix = [ - [RS.get(0, 0), RS.get(0, 1), TR.get(0, 0)], - [RS.get(1, 0), RS.get(1, 1), TR.get(1, 0)], - [0, 0, 1], - ]; - - const size = 1 / simTransform.scale; - const meanTranslation = simTransform.toMean.sub(0.5).mul(size); - const centerMat = simTransform.fromMean.sub(meanTranslation); - const center = new Point(centerMat.get(0, 0), centerMat.get(1, 0)); - const rotation = -Math.atan2( - simTransform.rotation.get(0, 1), - simTransform.rotation.get(0, 0), - ); - // log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size }); - - return { - affineMatrix, - center, - size, - rotation, - }; -} - -function normalizeLandmarks( - landmarks: Array<[number, number]>, - faceSize: number, -): Array<[number, number]> { - return landmarks.map((landmark) => - landmark.map((p) => p / faceSize), - ) as Array<[number, number]>; -} diff --git a/web/apps/photos/src/utils/machineLearning/faceCrop.ts b/web/apps/photos/src/utils/machineLearning/faceCrop.ts deleted file mode 100644 index d437a942dc..0000000000 --- a/web/apps/photos/src/utils/machineLearning/faceCrop.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { FaceAlignment, FaceCrop, FaceCropConfig } from "types/machineLearning"; -import { cropWithRotation } from "utils/image"; -import { enlargeBox } from "."; -import { Box } from "../../../thirdparty/face-api/classes"; - -export function getFaceCrop( - imageBitmap: ImageBitmap, - alignment: FaceAlignment, - config: FaceCropConfig, -): FaceCrop { - const alignmentBox = new Box({ - x: alignment.center.x - alignment.size / 2, - y: alignment.center.y - alignment.size / 2, - width: alignment.size, - height: alignment.size, - }).round(); - const scaleForPadding = 1 + config.padding * 2; - const paddedBox = enlargeBox(alignmentBox, scaleForPadding).round(); - const faceImageBitmap = cropWithRotation(imageBitmap, paddedBox, 0, { - width: config.maxSize, - height: config.maxSize, - }); - - return { - image: faceImageBitmap, - imageBox: paddedBox, - }; -} diff --git a/web/apps/photos/src/utils/machineLearning/index.ts b/web/apps/photos/src/utils/machineLearning/index.ts deleted file mode 100644 index bc9ae39749..0000000000 --- a/web/apps/photos/src/utils/machineLearning/index.ts +++ /dev/null @@ -1,284 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import { decodeLivePhoto } from "@/media/live-photo"; -import log from "@/next/log"; -import PQueue from "p-queue"; -import DownloadManager from "services/download"; -import { getLocalFiles } from "services/fileService"; -import { EnteFile } from "types/file"; -import { Dimensions } from "types/image"; -import { - DetectedFace, - Face, - FaceAlignment, - MlFileData, - Person, - Versioned, -} from "types/machineLearning"; -import { getRenderableImage } from "utils/file"; -import { clamp, warpAffineFloat32List } from "utils/image"; -import mlIDbStorage from "utils/storage/mlIDbStorage"; -import { Box, Point } from "../../../thirdparty/face-api/classes"; - -export function newBox(x: number, y: number, width: number, height: number) { - return new Box({ x, y, width, height }); -} - -export function getBoxCenterPt(topLeft: Point, bottomRight: Point): Point { - return topLeft.add(bottomRight.sub(topLeft).div(new Point(2, 2))); -} - -export function getBoxCenter(box: Box): Point { - return getBoxCenterPt(box.topLeft, box.bottomRight); -} - -export function enlargeBox(box: Box, factor: number = 1.5) { - const center = getBoxCenter(box); - const size = new Point(box.width, box.height); - const newHalfSize = new Point((factor * size.x) / 2, (factor * size.y) / 2); - - return new Box({ - left: center.x - newHalfSize.x, - top: center.y - newHalfSize.y, - right: center.x + newHalfSize.x, - bottom: center.y + newHalfSize.y, - }); -} - -export function getAllFacesFromMap(allFacesMap: Map>) { - const allFaces = [...allFacesMap.values()].flat(); - - return allFaces; -} - -export async function getLocalFile(fileId: number) { - const localFiles = await getLocalFiles(); - return localFiles.find((f) => f.id === fileId); -} - -export async function extractFaceImagesToFloat32( - faceAlignments: Array, - faceSize: number, - image: ImageBitmap, -): Promise { - const faceData = new Float32Array( - faceAlignments.length * faceSize * faceSize * 3, - ); - for (let i = 0; i < faceAlignments.length; i++) { - const alignedFace = faceAlignments[i]; - const faceDataOffset = i * faceSize * faceSize * 3; - warpAffineFloat32List( - image, - alignedFace, - faceSize, - faceData, - faceDataOffset, - ); - } - return faceData; -} - -export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) { - const xMin = clamp( - detectedFace.detection.box.x / imageDims.width, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const yMin = clamp( - detectedFace.detection.box.y / imageDims.height, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const xMax = clamp( - (detectedFace.detection.box.x + detectedFace.detection.box.width) / - imageDims.width, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - const yMax = clamp( - (detectedFace.detection.box.y + detectedFace.detection.box.height) / - imageDims.height, - 0.0, - 0.999999, - ) - .toFixed(5) - .substring(2); - - const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`; - const faceID = `${detectedFace.fileId}_${rawFaceID}`; - - return faceID; -} - -export async function getImageBlobBitmap(blob: Blob): Promise { - return await createImageBitmap(blob); -} - -async function getOriginalFile(file: EnteFile, queue?: PQueue) { - let fileStream; - if (queue) { - fileStream = await queue.add(() => DownloadManager.getFile(file)); - } else { - fileStream = await DownloadManager.getFile(file); - } - return new Response(fileStream).blob(); -} - -async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) { - const fileBlob = await getOriginalFile(file, queue); - if (file.metadata.fileType === FILE_TYPE.IMAGE) { - return await getRenderableImage(file.metadata.title, fileBlob); - } else { - const { imageFileName, imageData } = await decodeLivePhoto( - file.metadata.title, - fileBlob, - ); - return await getRenderableImage(imageFileName, new Blob([imageData])); - } -} - -export async function getOriginalImageBitmap(file: EnteFile, queue?: PQueue) { - const fileBlob = await getOriginalConvertedFile(file, queue); - log.info("[MLService] Got file: ", file.id.toString()); - return getImageBlobBitmap(fileBlob); -} - -export async function getThumbnailImageBitmap(file: EnteFile) { - const thumb = await DownloadManager.getThumbnail(file); - log.info("[MLService] Got thumbnail: ", file.id.toString()); - - return getImageBlobBitmap(new Blob([thumb])); -} - -export async function getLocalFileImageBitmap( - enteFile: EnteFile, - localFile: globalThis.File, -) { - let fileBlob = localFile as Blob; - fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob); - return getImageBlobBitmap(fileBlob); -} - -export async function getPeopleList(file: EnteFile): Promise> { - let startTime = Date.now(); - const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); - log.info( - "getPeopleList:mlFilesStore:getItem", - Date.now() - startTime, - "ms", - ); - if (!mlFileData?.faces || mlFileData.faces.length < 1) { - return []; - } - - const peopleIds = mlFileData.faces - .filter((f) => f.personId !== null && f.personId !== undefined) - .map((f) => f.personId); - if (!peopleIds || peopleIds.length < 1) { - return []; - } - // log.info("peopleIds: ", peopleIds); - startTime = Date.now(); - const peoplePromises = peopleIds.map( - (p) => mlIDbStorage.getPerson(p) as Promise, - ); - const peopleList = await Promise.all(peoplePromises); - log.info( - "getPeopleList:mlPeopleStore:getItems", - Date.now() - startTime, - "ms", - ); - // log.info("peopleList: ", peopleList); - - return peopleList; -} - -export async function getUnidentifiedFaces( - file: EnteFile, -): Promise> { - const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); - - return mlFileData?.faces?.filter( - (f) => f.personId === null || f.personId === undefined, - ); -} - -export async function getAllPeople(limit: number = undefined) { - let people: Array = await mlIDbStorage.getAllPeople(); - // await mlPeopleStore.iterate((person) => { - // people.push(person); - // }); - people = people ?? []; - return people - .sort((p1, p2) => p2.files.length - p1.files.length) - .slice(0, limit); -} - -export function findFirstIfSorted( - elements: Array, - comparator: (a: T, b: T) => number, -) { - if (!elements || elements.length < 1) { - return; - } - let first = elements[0]; - - for (let i = 1; i < elements.length; i++) { - const comp = comparator(elements[i], first); - if (comp < 0) { - first = elements[i]; - } - } - - return first; -} - -export function isDifferentOrOld( - method: Versioned, - thanMethod: Versioned, -) { - return ( - !method || - method.value !== thanMethod.value || - method.version < thanMethod.version - ); -} - -function primitiveArrayEquals(a, b) { - return ( - Array.isArray(a) && - Array.isArray(b) && - a.length === b.length && - a.every((val, index) => val === b[index]) - ); -} - -export function areFaceIdsSame(ofFaces: Array, toFaces: Array) { - if ( - (ofFaces === null || ofFaces === undefined) && - (toFaces === null || toFaces === undefined) - ) { - return true; - } - return primitiveArrayEquals( - ofFaces?.map((f) => f.id), - toFaces?.map((f) => f.id), - ); -} - -export function logQueueStats(queue: PQueue, name: string) { - queue.on("active", () => - log.info( - `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`, - ), - ); - queue.on("idle", () => log.info(`queuestats: ${name}: Idle`)); - queue.on("error", (error) => - console.error(`queuestats: ${name}: Error, `, error), - ); -} diff --git a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts b/web/apps/photos/src/utils/machineLearning/mldataMappers.ts deleted file mode 100644 index fb91420aa8..0000000000 --- a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts +++ /dev/null @@ -1,265 +0,0 @@ -import { - Face, - FaceDetection, - Landmark, - MlFileData, -} from "types/machineLearning"; -import { ClipEmbedding } from "types/machineLearning/data/clip"; - -export interface FileML extends ServerFileMl { - updatedAt: number; -} - -class ServerFileMl { - public fileID: number; - public height?: number; - public width?: number; - public faceEmbedding: ServerFaceEmbeddings; - public clipEmbedding?: ClipEmbedding; - - public constructor( - fileID: number, - faceEmbedding: ServerFaceEmbeddings, - clipEmbedding?: ClipEmbedding, - height?: number, - width?: number, - ) { - this.fileID = fileID; - this.height = height; - this.width = width; - this.faceEmbedding = faceEmbedding; - this.clipEmbedding = clipEmbedding; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFileMl { - return JSON.parse(json); - } -} - -class ServerFaceEmbeddings { - public faces: ServerFace[]; - public version: number; - public client?: string; - public error?: boolean; - - public constructor( - faces: ServerFace[], - version: number, - client?: string, - error?: boolean, - ) { - this.faces = faces; - this.version = version; - this.client = client; - this.error = error; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFaceEmbeddings { - return JSON.parse(json); - } -} - -class ServerFace { - public fileID: number; - public faceID: string; - public embeddings: number[]; - public detection: ServerDetection; - public score: number; - public blur: number; - public fileInfo?: ServerFileInfo; - - public constructor( - fileID: number, - faceID: string, - embeddings: number[], - detection: ServerDetection, - score: number, - blur: number, - fileInfo?: ServerFileInfo, - ) { - this.fileID = fileID; - this.faceID = faceID; - this.embeddings = embeddings; - this.detection = detection; - this.score = score; - this.blur = blur; - this.fileInfo = fileInfo; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFace { - return JSON.parse(json); - } -} - -class ServerFileInfo { - public imageWidth?: number; - public imageHeight?: number; - - public constructor(imageWidth?: number, imageHeight?: number) { - this.imageWidth = imageWidth; - this.imageHeight = imageHeight; - } -} - -class ServerDetection { - public box: ServerFaceBox; - public landmarks: Landmark[]; - - public constructor(box: ServerFaceBox, landmarks: Landmark[]) { - this.box = box; - this.landmarks = landmarks; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerDetection { - return JSON.parse(json); - } -} - -class ServerFaceBox { - public xMin: number; - public yMin: number; - public width: number; - public height: number; - - public constructor( - xMin: number, - yMin: number, - width: number, - height: number, - ) { - this.xMin = xMin; - this.yMin = yMin; - this.width = width; - this.height = height; - } - - toJson(): string { - return JSON.stringify(this); - } - - static fromJson(json: string): ServerFaceBox { - return JSON.parse(json); - } -} - -export function LocalFileMlDataToServerFileMl( - localFileMlData: MlFileData, -): ServerFileMl { - if ( - localFileMlData.errorCount > 0 && - localFileMlData.lastErrorMessage !== undefined - ) { - return null; - } - const imageDimensions = localFileMlData.imageDimensions; - const fileInfo = new ServerFileInfo( - imageDimensions.width, - imageDimensions.height, - ); - const faces: ServerFace[] = []; - for (let i = 0; i < localFileMlData.faces.length; i++) { - const face: Face = localFileMlData.faces[i]; - const faceID = face.id; - const embedding = face.embedding; - const score = face.detection.probability; - const blur = face.blurValue; - const detection: FaceDetection = face.detection; - const box = detection.box; - const landmarks = detection.landmarks; - const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height); - const newLandmarks: Landmark[] = []; - for (let j = 0; j < landmarks.length; j++) { - newLandmarks.push({ - x: landmarks[j].x, - y: landmarks[j].y, - } as Landmark); - } - - const newFaceObject = new ServerFace( - localFileMlData.fileId, - faceID, - Array.from(embedding), - new ServerDetection(newBox, newLandmarks), - score, - blur, - fileInfo, - ); - faces.push(newFaceObject); - } - const faceEmbeddings = new ServerFaceEmbeddings( - faces, - 1, - localFileMlData.lastErrorMessage, - ); - return new ServerFileMl( - localFileMlData.fileId, - faceEmbeddings, - null, - imageDimensions.height, - imageDimensions.width, - ); -} - -// // Not sure if this actually works -// export function ServerFileMlToLocalFileMlData( -// serverFileMl: ServerFileMl, -// ): MlFileData { -// const faces: Face[] = []; -// const mlVersion: number = serverFileMl.faceEmbeddings.version; -// const errorCount = serverFileMl.faceEmbeddings.error ? 1 : 0; -// for (let i = 0; i < serverFileMl.faceEmbeddings.faces.length; i++) { -// const face = serverFileMl.faceEmbeddings.faces[i]; -// if(face.detection.landmarks.length === 0) { -// continue; -// } -// const detection = face.detection; -// const box = detection.box; -// const landmarks = detection.landmarks; -// const newBox = new FaceBox( -// box.xMin, -// box.yMin, -// box.width, -// box.height, -// ); -// const newLandmarks: Landmark[] = []; -// for (let j = 0; j < landmarks.length; j++) { -// newLandmarks.push( -// { -// x: landmarks[j].x, -// y: landmarks[j].y, -// } as Landmark -// ); -// } -// const newDetection = new Detection(newBox, newLandmarks); -// const newFace = { - -// } as Face -// faces.push(newFace); -// } -// return { -// fileId: serverFileMl.fileID, -// imageDimensions: { -// width: serverFileMl.width, -// height: serverFileMl.height, -// }, -// faces, -// mlVersion, -// errorCount, -// }; -// } diff --git a/web/apps/photos/src/utils/machineLearning/transform.ts b/web/apps/photos/src/utils/machineLearning/transform.ts deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts index 4ed9da753a..e922c26219 100644 --- a/web/apps/photos/src/utils/native-stream.ts +++ b/web/apps/photos/src/utils/native-stream.ts @@ -111,7 +111,79 @@ export const writeStream = async ( const res = await fetch(req); if (!res.ok) - throw new Error( - `Failed to write stream to ${path}: HTTP ${res.status}`, - ); + throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`); +}; + +/** + * Variant of {@link writeStream} tailored for video conversion. + * + * @param blob The video to convert. + * + * @returns a token that can then be passed to {@link readConvertToMP4Stream} to + * read back the converted video. See: [Note: Convert to MP4]. + */ +export const writeConvertToMP4Stream = async (_: Electron, blob: Blob) => { + const url = "stream://convert-to-mp4"; + + const req = new Request(url, { + method: "POST", + body: blob, + // @ts-expect-error TypeScript's libdom.d.ts does not include the + // "duplex" parameter, e.g. see + // https://github.com/node-fetch/node-fetch/issues/1769. + duplex: "half", + }); + + const res = await fetch(req); + if (!res.ok) + throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`); + + const token = res.text(); + return token; +}; + +/** + * Variant of {@link readStream} tailored for video conversion. + * + * @param token A token obtained from {@link writeConvertToMP4Stream}. + * + * @returns the contents of the converted video. See: [Note: Convert to MP4]. + */ +export const readConvertToMP4Stream = async ( + _: Electron, + token: string, +): Promise => { + const params = new URLSearchParams({ token }); + const url = new URL(`stream://convert-to-mp4?${params.toString()}`); + + const req = new Request(url, { method: "GET" }); + + const res = await fetch(req); + if (!res.ok) + throw new Error( + `Failed to read stream from ${url}: HTTP ${res.status}`, + ); + + return res.blob(); +}; + +/** + * Sibling of {@link readConvertToMP4Stream} to let the native side know when we + * are done reading the response, and they can dispose any temporary resources + * it was using. + * + * @param token A token obtained from {@link writeConvertToMP4Stream}. + */ +export const readConvertToMP4Done = async ( + _: Electron, + token: string, +): Promise => { + // The value for `done` is arbitrary, only its presence matters. + const params = new URLSearchParams({ token, done: "1" }); + const url = new URL(`stream://convert-to-mp4?${params.toString()}`); + + const req = new Request(url, { method: "GET" }); + const res = await fetch(req); + if (!res.ok) + throw new Error(`Failed to close stream at ${url}: HTTP ${res.status}`); }; diff --git a/web/apps/photos/src/utils/ui/index.tsx b/web/apps/photos/src/utils/ui/index.tsx index 8f4895ead5..c930f47c8c 100644 --- a/web/apps/photos/src/utils/ui/index.tsx +++ b/web/apps/photos/src/utils/ui/index.tsx @@ -1,11 +1,9 @@ import { ensureElectron } from "@/next/electron"; import { AppUpdate } from "@/next/types/ipc"; -import { logoutUser } from "@ente/accounts/services/user"; import { DialogBoxAttributes } from "@ente/shared/components/DialogBox/types"; import AutoAwesomeOutlinedIcon from "@mui/icons-material/AutoAwesomeOutlined"; import InfoOutlined from "@mui/icons-material/InfoRounded"; import { Link } from "@mui/material"; -import { OPEN_STREET_MAP_LINK } from "components/Sidebar/EnableMap"; import { t } from "i18next"; import { Trans } from "react-i18next"; import { Subscription } from "types/billing"; @@ -69,6 +67,7 @@ export const getUpdateReadyToInstallMessage = ({ variant: "secondary", action: () => ensureElectron().updateOnNextRestart(version), }, + staticBackdrop: true, }); export const getUpdateAvailableForDownloadMessage = ({ @@ -121,14 +120,16 @@ export const getSubscriptionPurchaseSuccessMessage = ( ), }); -export const getSessionExpiredMessage = (): DialogBoxAttributes => ({ +export const getSessionExpiredMessage = ( + action: () => void, +): DialogBoxAttributes => ({ title: t("SESSION_EXPIRED"), content: t("SESSION_EXPIRED_MESSAGE"), nonClosable: true, proceed: { text: t("LOGIN"), - action: logoutUser, + action, variant: "accent", }, }); @@ -141,7 +142,12 @@ export const getMapEnableConfirmationDialog = ( , + a: ( + + ), }} /> ), diff --git a/web/apps/photos/src/utils/units.ts b/web/apps/photos/src/utils/units.ts index 1eb1ffb81c..229ec2ab9d 100644 --- a/web/apps/photos/src/utils/units.ts +++ b/web/apps/photos/src/utils/units.ts @@ -1,5 +1,11 @@ import { t } from "i18next"; +/** + * Localized unit keys. + * + * For each of these, there is expected to be a localized key under + * "storage_unit". e.g. "storage_unit.tb". + */ const units = ["b", "kb", "mb", "gb", "tb"]; /** @@ -21,13 +27,16 @@ export const bytesInGB = (bytes: number, precision = 0): string => * Defaults to 2. */ export function formattedByteSize(bytes: number, precision = 2): string { - if (bytes === 0 || isNaN(bytes)) { - return "0 MB"; - } + if (bytes <= 0) return `0 ${t("storage_unit.mb")}`; - const i = Math.floor(Math.log(bytes) / Math.log(1024)); - const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; - return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; + const i = Math.min( + Math.floor(Math.log(bytes) / Math.log(1024)), + units.length - 1, + ); + const quantity = bytes / Math.pow(1024, i); + const unit = units[i]; + + return `${quantity.toFixed(precision)} ${t(`storage_unit.${unit}`)}`; } interface FormattedStorageByteSizeOptions { @@ -50,7 +59,7 @@ interface FormattedStorageByteSizeOptions { * displaying the "storage size" (in different contexts) as opposed to, say, a * generic "file size". * - * @param options + * @param options {@link FormattedStorageByteSizeOptions}. * * @return A user visible string, including the localized unit suffix. */ @@ -58,21 +67,27 @@ export const formattedStorageByteSize = ( bytes: number, options?: FormattedStorageByteSizeOptions, ): string => { - if (bytes <= 0) { - return `0 ${t("storage_unit.mb")}`; - } - const i = Math.floor(Math.log(bytes) / Math.log(1024)); + if (bytes <= 0) return `0 ${t("storage_unit.mb")}`; + + const i = Math.min( + Math.floor(Math.log(bytes) / Math.log(1024)), + units.length - 1, + ); let quantity = bytes / Math.pow(1024, i); let unit = units[i]; - if (quantity > 100 && unit !== "GB") { + // Round up bytes, KBs and MBs to the bigger unit whenever they'll come of + // as more than 0.1. + if (quantity > 100 && i < units.length - 2) { quantity /= 1024; unit = units[i + 1]; } quantity = Number(quantity.toFixed(1)); + // Truncate or round storage sizes to trim off unnecessary and potentially + // obscuring precision when they are larger that 10 GB. if (bytes >= 10 * 1024 * 1024 * 1024 /* 10 GB */) { if (options?.round) { quantity = Math.ceil(quantity); diff --git a/web/apps/photos/src/utils/user/index.ts b/web/apps/photos/src/utils/user/index.ts index 68ffc9bbd7..0f8ef142fb 100644 --- a/web/apps/photos/src/utils/user/index.ts +++ b/web/apps/photos/src/utils/user/index.ts @@ -14,8 +14,8 @@ export const isInternalUser = () => { }; export const isInternalUserForML = () => { - const userId = (getData(LS_KEYS.USER) as User)?.id; - if (userId == 1) return true; + const userID = (getData(LS_KEYS.USER) as User)?.id; + if (userID == 1 || userID == 2) return true; return isInternalUser(); }; diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts index 946a2090f0..06ba05be9e 100644 --- a/web/apps/photos/src/worker/ffmpeg.worker.ts +++ b/web/apps/photos/src/worker/ffmpeg.worker.ts @@ -1,5 +1,4 @@ import log from "@/next/log"; -import { withTimeout } from "@ente/shared/utils"; import QueueProcessor from "@ente/shared/utils/queueProcessor"; import { expose } from "comlink"; import { @@ -7,6 +6,24 @@ import { inputPathPlaceholder, outputPathPlaceholder, } from "constants/ffmpeg"; + +// When we run tsc on CI, the line below errors out +// +// > Error: src/worker/ffmpeg.worker.ts(10,38): error TS2307: Cannot find module +// 'ffmpeg-wasm' or its corresponding type declarations. +// +// Building and running works fine. And this error does not occur when running +// tsc locally either. +// +// Of course, there is some misconfiguration, but we plan to move off our old +// fork and onto upstream ffmpeg-wasm, and the reason can be figured out then. +// For now, disable the error to allow the CI lint to complete. +// +// Note that we can't use @ts-expect-error since it doesn't error out when +// actually building! +// +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore import { FFmpeg, createFFmpeg } from "ffmpeg-wasm"; export class DedicatedFFmpegWorker { @@ -30,15 +47,11 @@ export class DedicatedFFmpegWorker { command: string[], blob: Blob, outputFileExtension: string, - timeoutMs, ): Promise { if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load(); - const go = () => - ffmpegExec(this.ffmpeg, command, outputFileExtension, blob); - const request = this.ffmpegTaskQueue.queueUpRequest(() => - timeoutMs ? withTimeout(go(), timeoutMs) : go(), + ffmpegExec(this.ffmpeg, command, outputFileExtension, blob), ); return await request.promise; @@ -69,7 +82,7 @@ const ffmpegExec = async ( const result = ffmpeg.FS("readFile", outputPath); - const ms = Math.round(Date.now() - startTime); + const ms = Date.now() - startTime; log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`); return result; } finally { diff --git a/web/apps/photos/src/worker/ml.worker.ts b/web/apps/photos/src/worker/ml.worker.ts deleted file mode 100644 index ed46b7bd44..0000000000 --- a/web/apps/photos/src/worker/ml.worker.ts +++ /dev/null @@ -1,42 +0,0 @@ -import log from "@/next/log"; -import { expose } from "comlink"; -import mlService from "services/machineLearning/machineLearningService"; -import { EnteFile } from "types/file"; -import { MachineLearningWorker } from "types/machineLearning"; - -export class DedicatedMLWorker implements MachineLearningWorker { - constructor() { - log.info("DedicatedMLWorker constructor called"); - } - - public async closeLocalSyncContext() { - return mlService.closeLocalSyncContext(); - } - - public async syncLocalFile( - token: string, - userID: number, - enteFile: EnteFile, - localFile: globalThis.File, - ) { - return mlService.syncLocalFile(token, userID, enteFile, localFile); - } - - public async sync(token: string, userID: number) { - return mlService.sync(token, userID); - } - - public async regenerateFaceCrop( - token: string, - userID: number, - faceID: string, - ) { - return mlService.regenerateFaceCrop(token, userID, faceID); - } - - public close() { - self.close(); - } -} - -expose(DedicatedMLWorker, self); diff --git a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts b/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts deleted file mode 100644 index 7263b4b96c..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Box } from './Box'; - -export interface IBoundingBox { - left: number - top: number - right: number - bottom: number -} - -export class BoundingBox extends Box implements IBoundingBox { - constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) { - super({ left, top, right, bottom }, allowNegativeDimensions) - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/Box.ts b/web/apps/photos/thirdparty/face-api/classes/Box.ts deleted file mode 100644 index fcf1cbebb3..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Box.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { IBoundingBox } from './BoundingBox'; -import { IDimensions } from './Dimensions'; -import { Point } from './Point'; -import { IRect } from './Rect'; - -export class Box implements IBoundingBox, IRect { - - public static isRect(rect: any): boolean { - return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber) - } - - public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) { - if (!Box.isRect(box)) { - throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`) - } - - if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) { - throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`) - } - } - - public x: number - public y: number - public width: number - public height: number - - constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) { - const box = (_box || {}) as any - - const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber) - const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber) - - if (!isRect && !isBbox) { - throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`) - } - - const [x, y, width, height] = isRect - ? [box.x, box.y, box.width, box.height] - : [box.left, box.top, box.right - box.left, box.bottom - box.top] - - Box.assertIsValidBox({ x, y, width, height }, 'Box.constructor', allowNegativeDimensions) - - this.x = x - this.y = y - this.width = width - this.height = height - } - - // public get x(): number { return this._x } - // public get y(): number { return this._y } - // public get width(): number { return this._width } - // public get height(): number { return this._height } - public get left(): number { return this.x } - public get top(): number { return this.y } - public get right(): number { return this.x + this.width } - public get bottom(): number { return this.y + this.height } - public get area(): number { return this.width * this.height } - public get topLeft(): Point { return new Point(this.left, this.top) } - public get topRight(): Point { return new Point(this.right, this.top) } - public get bottomLeft(): Point { return new Point(this.left, this.bottom) } - public get bottomRight(): Point { return new Point(this.right, this.bottom) } - - public round(): Box { - const [x, y, width, height] = [this.x, this.y, this.width, this.height] - .map(val => Math.round(val)) - return new Box({ x, y, width, height }) - } - - public floor(): Box { - const [x, y, width, height] = [this.x, this.y, this.width, this.height] - .map(val => Math.floor(val)) - return new Box({ x, y, width, height }) - } - - public toSquare(): Box { - let { x, y, width, height } = this - const diff = Math.abs(width - height) - if (width < height) { - x -= (diff / 2) - width += diff - } - if (height < width) { - y -= (diff / 2) - height += diff - } - - return new Box({ x, y, width, height }) - } - - public rescale(s: IDimensions | number): Box { - const scaleX = isDimensions(s) ? (s as IDimensions).width : s as number - const scaleY = isDimensions(s) ? (s as IDimensions).height : s as number - return new Box({ - x: this.x * scaleX, - y: this.y * scaleY, - width: this.width * scaleX, - height: this.height * scaleY - }) - } - - public pad(padX: number, padY: number): Box { - let [x, y, width, height] = [ - this.x - (padX / 2), - this.y - (padY / 2), - this.width + padX, - this.height + padY - ] - return new Box({ x, y, width, height }) - } - - public clipAtImageBorders(imgWidth: number, imgHeight: number): Box { - const { x, y, right, bottom } = this - const clippedX = Math.max(x, 0) - const clippedY = Math.max(y, 0) - - const newWidth = right - clippedX - const newHeight = bottom - clippedY - const clippedWidth = Math.min(newWidth, imgWidth - clippedX) - const clippedHeight = Math.min(newHeight, imgHeight - clippedY) - - return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight})).floor() - } - - public shift(sx: number, sy: number): Box { - const { width, height } = this - const x = this.x + sx - const y = this.y + sy - - return new Box({ x, y, width, height }) - } - - public padAtBorders(imageHeight: number, imageWidth: number) { - const w = this.width + 1 - const h = this.height + 1 - - let dx = 1 - let dy = 1 - let edx = w - let edy = h - - let x = this.left - let y = this.top - let ex = this.right - let ey = this.bottom - - if (ex > imageWidth) { - edx = -ex + imageWidth + w - ex = imageWidth - } - if (ey > imageHeight) { - edy = -ey + imageHeight + h - ey = imageHeight - } - if (x < 1) { - edy = 2 - x - x = 1 - } - if (y < 1) { - edy = 2 - y - y = 1 - } - - return { dy, edy, dx, edx, y, ey, x, ex, w, h } - } - - public calibrate(region: Box) { - return new Box({ - left: this.left + (region.left * this.width), - top: this.top + (region.top * this.height), - right: this.right + (region.right * this.width), - bottom: this.bottom + (region.bottom * this.height) - }).toSquare().round() - } -} - -export function isValidNumber(num: any) { - return !!num && num !== Infinity && num !== -Infinity && !isNaN(num) || num === 0 -} - -export function isDimensions(obj: any): boolean { - return obj && obj.width && obj.height -} diff --git a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts b/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts deleted file mode 100644 index 0129f3b678..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { isValidNumber } from './Box'; - -export interface IDimensions { - width: number - height: number -} - -export class Dimensions implements IDimensions { - - private _width: number - private _height: number - - constructor(width: number, height: number) { - if (!isValidNumber(width) || !isValidNumber(height)) { - throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`) - } - - this._width = width - this._height = height - } - - public get width(): number { return this._width } - public get height(): number { return this._height } - - public reverse(): Dimensions { - return new Dimensions(1 / this.width, 1 / this.height) - } -} diff --git a/web/apps/photos/thirdparty/face-api/classes/Point.ts b/web/apps/photos/thirdparty/face-api/classes/Point.ts deleted file mode 100644 index 3c32d5bc19..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Point.ts +++ /dev/null @@ -1,55 +0,0 @@ -export interface IPoint { - x: number - y: number -} - -export class Point implements IPoint { - public x: number - public y: number - - constructor(x: number, y: number) { - this.x = x - this.y = y - } - - // get x(): number { return this._x } - // get y(): number { return this._y } - - public add(pt: IPoint): Point { - return new Point(this.x + pt.x, this.y + pt.y) - } - - public sub(pt: IPoint): Point { - return new Point(this.x - pt.x, this.y - pt.y) - } - - public mul(pt: IPoint): Point { - return new Point(this.x * pt.x, this.y * pt.y) - } - - public div(pt: IPoint): Point { - return new Point(this.x / pt.x, this.y / pt.y) - } - - public abs(): Point { - return new Point(Math.abs(this.x), Math.abs(this.y)) - } - - public magnitude(): number { - return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2)) - } - - public floor(): Point { - return new Point(Math.floor(this.x), Math.floor(this.y)) - } - - public round(): Point { - return new Point(Math.round(this.x), Math.round(this.y)) - } - - public bound(lower: number, higher: number): Point { - const x = Math.max(lower, Math.min(higher, this.x)); - const y = Math.max(lower, Math.min(higher, this.y)); - return new Point(x, y); - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/Rect.ts b/web/apps/photos/thirdparty/face-api/classes/Rect.ts deleted file mode 100644 index 550676984a..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/Rect.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { Box } from './Box'; - -export interface IRect { - x: number - y: number - width: number - height: number -} - -export class Rect extends Box implements IRect { - constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) { - super({ x, y, width, height }, allowNegativeDimensions) - } -} \ No newline at end of file diff --git a/web/apps/photos/thirdparty/face-api/classes/index.ts b/web/apps/photos/thirdparty/face-api/classes/index.ts deleted file mode 100644 index 9bb7cccf40..0000000000 --- a/web/apps/photos/thirdparty/face-api/classes/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * from './BoundingBox' -export * from './Box' -export * from './Dimensions' -export * from './Point' -export * from './Rect' \ No newline at end of file diff --git a/web/docs/README.md b/web/docs/README.md index 365d3bea00..699b7adad8 100644 --- a/web/docs/README.md +++ b/web/docs/README.md @@ -7,3 +7,7 @@ If you just want to run Ente's web apps locally or develop them, you can do The docs in this directory provide more details that some developers might find useful. + +> [!TIP] +> +> To prepare your machine, see [new](new.md). diff --git a/web/docs/dependencies.md b/web/docs/dependencies.md index 83c4c16c84..2ff8e40172 100644 --- a/web/docs/dependencies.md +++ b/web/docs/dependencies.md @@ -5,23 +5,40 @@ These are some global dev dependencies in the root `package.json`. These set the baseline for how our code be in all the workspaces in this (yarn) monorepo. -- "prettier" - Formatter -- "eslint" - Linter -- "typescript" - Type checker +- [prettier](https://prettier.io) - Formatter + +- [eslint](https://eslint.org) - Linter + +- [typescript](https://www.typescriptlang.org/) - Type checker They also need some support packages, which come from the leaf `@/build-config` package: -- "@typescript-eslint/parser" - Tells ESLint how to read TypeScript syntax -- "@typescript-eslint/eslint-plugin" - Provides TypeScript rules and presets -- "eslint-plugin-react-hooks", "eslint-plugin-react-namespace-import" - Some - React specific ESLint rules and configurations that are used by the - workspaces that have React code. -- "eslint-plugin-react-refresh" - A plugin to ensure that React components are - exported in a way that they can be HMR-ed. -- "prettier-plugin-organize-imports" - A Prettier plugin to sort imports. -- "prettier-plugin-packagejson" - A Prettier plugin to also prettify - `package.json`. +- [@typescript-eslint/parser](https://typescript-eslint.io/packages/eslint-plugin/) + \- Tells ESLint how to read TypeScript syntax. + +- [@typescript-eslint/eslint-plugin](https://typescript-eslint.io/packages/eslint-plugin/) + \- Provides TypeScript rules and presets + +- [eslint-plugin-react-hooks](https://github.com/jsx-eslint/eslint-plugin-react), + [eslint-plugin-react-hooks](https://reactjs.org/) \- Some React specific + ESLint rules and configurations that are used by the workspaces that have + React code. + +- [eslint-plugin-react-refresh](https://github.com/ArnaudBarre/eslint-plugin-react-refresh) + \- A plugin to ensure that React components are exported in a way that they + can be HMR-ed. + +- [prettier-plugin-organize-imports](https://github.com/simonhaenisch/prettier-plugin-organize-imports) + \- A Prettier plugin to sort imports. + +- [prettier-plugin-packagejson](https://github.com/matzkoh/prettier-plugin-packagejson) + \- A Prettier plugin to also prettify `package.json`. + +The root `package.json` also has a convenience dev dependency: + +- [concurrently](https://github.com/open-cli-tools/concurrently) for spawning + parallel tasks when we invoke various yarn scripts. ## Utils @@ -133,17 +150,46 @@ some cases. ## Media -- ["jszip"](https://github.com/Stuk/jszip) is used for reading zip files in +- [jszip](https://github.com/Stuk/jszip) is used for reading zip files in JavaScript (Live photos are zip files under the hood). -- ["file-type"](https://github.com/sindresorhus/file-type) is used for MIME - type detection. We are at an old version 16.5.4 because v17 onwards the - package became ESM only - for our limited use case, the custom Webpack - configuration that entails is not worth the upgrade. +- [file-type](https://github.com/sindresorhus/file-type) is used for MIME type + detection. We are at an old version 16.5.4 because v17 onwards the package + became ESM only - for our limited use case, the custom Webpack configuration + that entails is not worth the upgrade. + +- [heic-convert](https://github.com/catdad-experiments/heic-convert) is used + for converting HEIC files (which browsers don't natively support) into JPEG. + +## Processing + +- [comlink](https://github.com/GoogleChromeLabs/comlink) provides a minimal + layer on top of Web Workers to make them more easier to use. ## Photos app specific -### Misc +- [react-dropzone](https://github.com/react-dropzone/react-dropzone/) is a + React hook to create a drag-and-drop input zone. -- "sanitize-filename" is for converting arbitrary strings into strings that - are suitable for being used as filenames. +- [sanitize-filename](https://github.com/parshap/node-sanitize-filename) is + for converting arbitrary strings into strings that are suitable for being + used as filenames. + +## Face search + +- [transformation-matrix](https://github.com/chrvadala/transformation-matrix) + is used for performing 2D affine transformations using transformation + matrices. It is used during face detection. + +- [matrix](https://github.com/mljs/matrix) is mathematical matrix abstraction. + It is used alongwith + [similarity-transformation](https://github.com/shaileshpandit/similarity-transformation-js) + during face alignment. + + > Note that while both `transformation-matrix` and `matrix` are "matrix" + > libraries, they have different foci and purposes: `transformation-matrix` + > provides affine transforms, while `matrix` is for performing computations + > on matrices, say inverting them or performing their decomposition. + +- [hdbscan](https://github.com/shaileshpandit/hdbscan-js) is used for face + clustering. diff --git a/web/docs/deploy.md b/web/docs/deploy.md index 6358cb87f2..75c3106d18 100644 --- a/web/docs/deploy.md +++ b/web/docs/deploy.md @@ -1,50 +1,46 @@ # Deploying The various web apps and static sites in this repository are deployed on -Cloudflare Pages. +Cloudflare Pages using GitHub workflows. -- Production deployments are triggered by pushing to the `deploy/*` branches. +- Automated production deployments of `main` daily 8:00 AM IST. + +- Automated staging deployments `*.ente.sh` of `main` daily 3:00 PM IST. - [help.ente.io](https://help.ente.io) gets deployed whenever a PR that changes anything inside `docs/` gets merged to `main`. -- Every night, all the web apps get automatically deployed to a nightly - preview URLs (`*.ente.sh`) using the current code in main. +- Production or staging deployments can made manually by triggering the + corresponding workflow. There is variant to deploy a single app to + production using the `web-deploy-one.yml` workflow, and a variant to deploy + any one of the apps to `preview.ente.sh` (see below). -- A preview deployment can be made by triggering the "Preview (web)" workflow. - This allows us to deploy a build of any of the apps from an arbitrary branch - to [preview.ente.sh](https://preview.ente.sh). - -Use the various `yarn deploy:*` commands to help with production deployments. -For example, `yarn deploy:photos` will open a PR to merge the current `main` -onto `deploy/photos`, which'll trigger the deployment workflow, which'll build -and publish to [web.ente.io](https://web.ente.io). - -> When merging these deployment PRs, remember to use rebase and merge so that -> their HEAD is a fast forward of `main` instead of diverging from it because of -> the merge commit. +These GitHub workflows use the various `yarn deploy:*` commands. For example, +`yarn deploy:photos` will open a PR to merge the current `main` onto +`deploy/photos`, which'll trigger the deployment workflow, which'll build and +publish to [web.ente.io](https://web.ente.io). ## Deployments Here is a list of all the deployments, whether or not they are production deployments, and the action that triggers them: -| URL | Type | Deployment action | -| -------------------------------------------- | ---------- | -------------------------------------------- | -| [web.ente.io](https://web.ente.io) | Production | Push to `deploy/photos` | -| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) | -| [auth.ente.io](https://auth.ente.io) | Production | Push to `deploy/auth` | -| [accounts.ente.io](https://accounts.ente.io) | Production | Push to `deploy/accounts` | -| [cast.ente.io](https://cast.ente.io) | Production | Push to `deploy/cast` | -| [payments.ente.io](https://payments.ente.io) | Production | Push to `deploy/payments` | -| [help.ente.io](https://help.ente.io) | Production | Push to `main` + changes in `docs/` | -| [staff.ente.sh](https://staff.ente.sh) | Production | Push to `main` + changes in `web/apps/staff` | -| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Nightly deploy of `main` | -| [auth.ente.sh](https://auth.ente.sh) | Preview | Nightly deploy of `main` | -| [cast.ente.sh](https://cast.ente.sh) | Preview | Nightly deploy of `main` | -| [payments.ente.sh](https://payments.ente.sh) | Preview | Nightly deploy of `main` | -| [photos.ente.sh](https://photos.ente.sh) | Preview | Nightly deploy of `main` | -| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered | +| URL | Type | Deployment action | +| -------------------------------------------- | ---------- | --------------------------------------------- | +| [web.ente.io](https://web.ente.io) | Production | Daily deploy of `main` | +| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) | +| [auth.ente.io](https://auth.ente.io) | Production | Daily deploy of `main` | +| [accounts.ente.io](https://accounts.ente.io) | Production | Daily deploy of `main` | +| [cast.ente.io](https://cast.ente.io) | Production | Daily deploy of `main` | +| [payments.ente.io](https://payments.ente.io) | Production | Daily deploy of `main` | +| [help.ente.io](https://help.ente.io) | Production | Changes in `docs/` on push to `main` | +| [staff.ente.sh](https://staff.ente.sh) | Production | Changes in `web/apps/staff` on push to `main` | +| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Daily deploy of `main` | +| [auth.ente.sh](https://auth.ente.sh) | Preview | Daily deploy of `main` | +| [cast.ente.sh](https://cast.ente.sh) | Preview | Daily deploy of `main` | +| [payments.ente.sh](https://payments.ente.sh) | Preview | Daily deploy of `main` | +| [photos.ente.sh](https://photos.ente.sh) | Preview | Daily deploy of `main` | +| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered | ### Other subdomains @@ -60,10 +56,10 @@ Apart from this, there are also some other deployments: ### Preview deployments -To trigger a preview deployment, manually trigger the "Preview (web)" workflow -from the Actions tab on GitHub. You'll need to select the app to build, and the -branch to use. This'll then build the specified app (e.g. "photos") from that -branch, and deploy it to [preview.ente.sh](https://preview.ente.sh). +To trigger a preview deployment, manually trigger the "Deploy preview (web)" +workflow from the Actions tab on GitHub. You'll need to select the app to build, +and the branch to use. This'll then build the specified app (e.g. "photos") from +that branch, and deploy it to [preview.ente.sh](https://preview.ente.sh). The workflow can also be triggered using GitHub's CLI, gh. e.g. diff --git a/web/docs/new.md b/web/docs/new.md index 4500617b5f..0617a8ac65 100644 --- a/web/docs/new.md +++ b/web/docs/new.md @@ -1,26 +1,35 @@ # Welcome! -If you're new to this sort of stuff or coming back to it after mobile/backend +If you're new to web stuff or coming back to it after mobile/backend development, here is a recommended workflow: -1. Install VS Code. +1. Install **VS Code**. -2. Install the Prettier and ESLint extensions. +2. Install the **Prettier** and **ESLint** extensions. 3. Enable the VS Code setting to format on save. -4. Install node on your machine `brew install node@20`. Our package manager, - `yarn` comes with it. +4. Install **node** on your machine. There are myriad ways to do this, here are + some examples: + + - macOS: `brew install node@20` + + - Ubuntu: `sudo apt install nodejs npm && sudo npm i -g corepack` + +5. Enable corepack. This allows us to use the correct version of our package + manager (**Yarn**): + + ```sh + + corepack enable + ``` + + If now you run `yarn --version` in the web directory, you should be seeing a + 1.22.xx version, otherwise your `yarn install` will fail. + + ```sh + $ yarn --version + 1.22.21 + ``` That's it. Enjoy coding! - -## Yarn - -Note that we use Yarn classic - -``` -$ yarn --version -1.22.21 -``` - -You should be seeing a 1.xx.xx version, otherwise your `yarn install` will fail. diff --git a/web/package.json b/web/package.json index 647ee3ba3a..ec096189ad 100644 --- a/web/package.json +++ b/web/package.json @@ -22,13 +22,13 @@ "dev": "yarn dev:photos", "dev:accounts": "yarn workspace accounts next dev -p 3001", "dev:albums": "yarn workspace photos next dev -p 3002", - "dev:auth": "yarn workspace auth next dev", + "dev:auth": "yarn workspace auth next dev -p 3000", "dev:cast": "yarn workspace cast next dev -p 3001", "dev:payments": "yarn workspace payments dev", - "dev:photos": "yarn workspace photos next dev", + "dev:photos": "yarn workspace photos next dev -p 3000", "dev:staff": "yarn workspace staff dev", - "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives .", - "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix .", + "lint": "concurrently --names 'prettier,eslint,tsc' \"yarn prettier --check --log-level warn .\" \"yarn workspaces run eslint --report-unused-disable-directives .\" \"yarn workspaces run tsc\"", + "lint-fix": "concurrently --names 'prettier,eslint,tsc' \"yarn prettier --write --log-level warn .\" \"yarn workspaces run eslint --report-unused-disable-directives --fix .\" \"yarn workspaces run tsc\"", "preview": "yarn preview:photos", "preview:accounts": "yarn build:accounts && python3 -m http.server -d apps/accounts/out 3001", "preview:auth": "yarn build:auth && python3 -m http.server -d apps/auth/out 3000", @@ -41,8 +41,10 @@ "libsodium": "0.7.9" }, "devDependencies": { + "concurrently": "^8.2.2", "eslint": "^8", "prettier": "^3", "typescript": "^5" - } + }, + "packageManager": "yarn@1.22.21" } diff --git a/web/packages/accounts/api/user.ts b/web/packages/accounts/api/user.ts index 7a072064e0..7e313b38e5 100644 --- a/web/packages/accounts/api/user.ts +++ b/web/packages/accounts/api/user.ts @@ -43,7 +43,7 @@ export const putAttributes = (token: string, keyAttributes: KeyAttributes) => }, ); -export const _logout = async () => { +export const logout = async () => { try { const token = getToken(); await HTTPService.post(`${ENDPOINT}/users/logout`, null, undefined, { diff --git a/web/packages/accounts/components/ChangeEmail.tsx b/web/packages/accounts/components/ChangeEmail.tsx index ec647e6712..0b175344bc 100644 --- a/web/packages/accounts/components/ChangeEmail.tsx +++ b/web/packages/accounts/components/ChangeEmail.tsx @@ -1,3 +1,4 @@ +import { wait } from "@/utils/promise"; import { changeEmail, sendOTTForEmailChange } from "@ente/accounts/api/user"; import { APP_HOMES } from "@ente/shared/apps/constants"; import { PageProps } from "@ente/shared/apps/types"; @@ -6,7 +7,6 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer"; import LinkButton from "@ente/shared/components/LinkButton"; import SubmitButton from "@ente/shared/components/SubmitButton"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; -import { wait } from "@ente/shared/utils"; import { Alert, Box, TextField } from "@mui/material"; import { Formik, FormikHelpers } from "formik"; import { t } from "i18next"; diff --git a/web/packages/accounts/components/two-factor/VerifyForm.tsx b/web/packages/accounts/components/two-factor/VerifyForm.tsx index b7f7fc2781..76fd87ba05 100644 --- a/web/packages/accounts/components/two-factor/VerifyForm.tsx +++ b/web/packages/accounts/components/two-factor/VerifyForm.tsx @@ -1,16 +1,15 @@ -import { Formik, FormikHelpers } from "formik"; -import { t } from "i18next"; -import { useRef, useState } from "react"; -import OtpInput from "react-otp-input"; - +import { wait } from "@/utils/promise"; import InvalidInputMessage from "@ente/accounts/components/two-factor/InvalidInputMessage"; import { CenteredFlex, VerticallyCentered, } from "@ente/shared/components/Container"; import SubmitButton from "@ente/shared/components/SubmitButton"; -import { wait } from "@ente/shared/utils"; import { Box, Typography } from "@mui/material"; +import { Formik, FormikHelpers } from "formik"; +import { t } from "i18next"; +import { useRef, useState } from "react"; +import OtpInput from "react-otp-input"; interface formValues { otp: string; diff --git a/web/packages/accounts/pages/credentials.tsx b/web/packages/accounts/pages/credentials.tsx index 3e8fbabbe6..777fe97da6 100644 --- a/web/packages/accounts/pages/credentials.tsx +++ b/web/packages/accounts/pages/credentials.tsx @@ -1,3 +1,4 @@ +import { isDevBuild } from "@/next/env"; import log from "@/next/log"; import { APP_HOMES } from "@ente/shared/apps/constants"; import { PageProps } from "@ente/shared/apps/types"; @@ -5,7 +6,6 @@ import { VerticallyCentered } from "@ente/shared/components/Container"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; import FormPaper from "@ente/shared/components/Form/FormPaper"; import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer"; -import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title"; import LinkButton from "@ente/shared/components/LinkButton"; import VerifyMasterPasswordForm, { VerifyMasterPasswordFormProps, @@ -19,7 +19,7 @@ import { } from "@ente/shared/crypto/helpers"; import { B64EncryptionResult } from "@ente/shared/crypto/types"; import { CustomError } from "@ente/shared/error"; -import { getAccountsURL } from "@ente/shared/network/api"; +import { getAccountsURL, getEndpoint } from "@ente/shared/network/api"; import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; import { LS_KEYS, @@ -39,6 +39,7 @@ import { setKey, } from "@ente/shared/storage/sessionStorage"; import { KeyAttributes, User } from "@ente/shared/user/types"; +import { Typography, styled } from "@mui/material"; import { t } from "i18next"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; @@ -49,10 +50,11 @@ import { generateSRPSetupAttributes, loginViaSRP, } from "../services/srp"; -import { logoutUser } from "../services/user"; import { SRPAttributes } from "../types/srp"; export default function Credentials({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [srpAttributes, setSrpAttributes] = useState(); const [keyAttributes, setKeyAttributes] = useState(); const [user, setUser] = useState(); @@ -259,7 +261,7 @@ export default function Credentials({ appContext, appName }: PageProps) { return ( - {t("PASSWORD")} +
{user.email}
+ {t("FORGOT_PASSWORD")} - + {t("CHANGE_EMAIL")} + + {isDevBuild && }
); } + +const Header: React.FC = ({ children }) => { + return ( + + {t("PASSWORD")} + {children} + + ); +}; + +const Header_ = styled("div")` + margin-block-end: 4rem; + display: flex; + flex-direction: column; + gap: 8px; +`; + +const ConnectionDetails: React.FC = () => { + const apiOrigin = new URL(getEndpoint()); + + return ( + + + {apiOrigin.host} + + + ); +}; + +const ConnectionDetails_ = styled("div")` + margin-block-start: 1rem; +`; diff --git a/web/packages/accounts/pages/generate.tsx b/web/packages/accounts/pages/generate.tsx index fb92edb147..11c15a4f05 100644 --- a/web/packages/accounts/pages/generate.tsx +++ b/web/packages/accounts/pages/generate.tsx @@ -1,7 +1,6 @@ import log from "@/next/log"; import { putAttributes } from "@ente/accounts/api/user"; import { configureSRP } from "@ente/accounts/services/srp"; -import { logoutUser } from "@ente/accounts/services/user"; import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp"; import { generateAndSaveIntermediateKeyAttributes, @@ -31,6 +30,8 @@ import { KeyAttributes, User } from "@ente/shared/user/types"; import { useRouter } from "next/router"; export default function Generate({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [token, setToken] = useState(); const [user, setUser] = useState(); const [recoverModalView, setRecoveryModalView] = useState(false); @@ -113,7 +114,7 @@ export default function Generate({ appContext, appName }: PageProps) { buttonText={t("SET_PASSPHRASE")} /> - + {t("GO_BACK")} diff --git a/web/packages/accounts/pages/two-factor/recover.tsx b/web/packages/accounts/pages/two-factor/recover.tsx index 150bd47de5..8ed187e0e5 100644 --- a/web/packages/accounts/pages/two-factor/recover.tsx +++ b/web/packages/accounts/pages/two-factor/recover.tsx @@ -2,7 +2,6 @@ import log from "@/next/log"; import { recoverTwoFactor, removeTwoFactor } from "@ente/accounts/api/user"; import { PAGES } from "@ente/accounts/constants/pages"; import { TwoFactorType } from "@ente/accounts/constants/twofactor"; -import { logoutUser } from "@ente/accounts/services/user"; import { PageProps } from "@ente/shared/apps/types"; import { VerticallyCentered } from "@ente/shared/components/Container"; import { DialogBoxAttributesV2 } from "@ente/shared/components/DialogBoxV2/types"; @@ -33,6 +32,8 @@ export default function Recover({ appContext, twoFactorType = TwoFactorType.TOTP, }: PageProps) { + const { logout } = appContext; + const [encryptedTwoFactorSecret, setEncryptedTwoFactorSecret] = useState(null); const [sessionID, setSessionID] = useState(null); @@ -77,7 +78,7 @@ export default function Recover({ e instanceof ApiError && e.httpStatusCode === HttpStatusCode.NotFound ) { - logoutUser(); + logout(); } else { log.error("two factor recovery page setup failed", e); setDoesHaveEncryptedRecoveryKey(false); diff --git a/web/packages/accounts/pages/two-factor/verify.tsx b/web/packages/accounts/pages/two-factor/verify.tsx index 5498211aef..1ec6e437d8 100644 --- a/web/packages/accounts/pages/two-factor/verify.tsx +++ b/web/packages/accounts/pages/two-factor/verify.tsx @@ -3,7 +3,7 @@ import VerifyTwoFactor, { VerifyTwoFactorCallback, } from "@ente/accounts/components/two-factor/VerifyForm"; import { PAGES } from "@ente/accounts/constants/pages"; -import { logoutUser } from "@ente/accounts/services/user"; + import type { PageProps } from "@ente/shared/apps/types"; import { VerticallyCentered } from "@ente/shared/components/Container"; import FormPaper from "@ente/shared/components/Form/FormPaper"; @@ -19,7 +19,11 @@ import { t } from "i18next"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -export const TwoFactorVerify: React.FC = () => { +export const TwoFactorVerify: React.FC = ({ + appContext, +}: PageProps) => { + const { logout } = appContext; + const [sessionID, setSessionID] = useState(""); const router = useRouter(); @@ -60,7 +64,7 @@ export const TwoFactorVerify: React.FC = () => { e instanceof ApiError && e.httpStatusCode === HttpStatusCode.NotFound ) { - logoutUser(); + logout(); } else { throw e; } @@ -79,7 +83,7 @@ export const TwoFactorVerify: React.FC = () => { > {t("LOST_DEVICE")} - + {t("CHANGE_EMAIL")} diff --git a/web/packages/accounts/pages/verify.tsx b/web/packages/accounts/pages/verify.tsx index 6515a96b76..2a410fd6f2 100644 --- a/web/packages/accounts/pages/verify.tsx +++ b/web/packages/accounts/pages/verify.tsx @@ -16,7 +16,7 @@ import SingleInputForm, { import { ApiError } from "@ente/shared/error"; import { getAccountsURL } from "@ente/shared/network/api"; import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore"; -import { clearFiles } from "@ente/shared/storage/localForage/helpers"; +import localForage from "@ente/shared/storage/localForage"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; import { getLocalReferralSource, @@ -30,10 +30,11 @@ import { useRouter } from "next/router"; import { putAttributes, sendOtt, verifyOtt } from "../api/user"; import { PAGES } from "../constants/pages"; import { configureSRP } from "../services/srp"; -import { logoutUser } from "../services/user"; import { SRPSetupAttributes } from "../types/srp"; export default function VerifyPage({ appContext, appName }: PageProps) { + const { logout } = appContext; + const [email, setEmail] = useState(""); const [resend, setResend] = useState(0); @@ -121,7 +122,7 @@ export default function VerifyPage({ appContext, appName }: PageProps) { await configureSRP(srpSetupAttributes); } } - clearFiles(); + localForage.clear(); setIsFirstLogin(true); const redirectURL = InMemoryStore.get(MS_KEYS.REDIRECT_URL); InMemoryStore.delete(MS_KEYS.REDIRECT_URL); @@ -191,7 +192,7 @@ export default function VerifyPage({ appContext, appName }: PageProps) { )} {resend === 1 && {t("SENDING")}} {resend === 2 && {t("SENT")}} - + {t("CHANGE_EMAIL")} diff --git a/web/packages/accounts/services/logout.ts b/web/packages/accounts/services/logout.ts new file mode 100644 index 0000000000..1858ec7cc3 --- /dev/null +++ b/web/packages/accounts/services/logout.ts @@ -0,0 +1,50 @@ +import { clearBlobCaches } from "@/next/blob-cache"; +import log from "@/next/log"; +import InMemoryStore from "@ente/shared/storage/InMemoryStore"; +import localForage from "@ente/shared/storage/localForage"; +import { clearData } from "@ente/shared/storage/localStorage"; +import { clearKeys } from "@ente/shared/storage/sessionStorage"; +import { logout as remoteLogout } from "../api/user"; + +/** + * Logout sequence common to all apps that rely on the accounts package. + * + * [Note: Do not throw during logout] + * + * This function is guaranteed to not thrown any errors, and will try to + * independently complete all the steps in the sequence that can be completed. + * This allows the user to logout and start again even if somehow their account + * gets in an unexpected state. + */ +export const accountLogout = async () => { + try { + await remoteLogout(); + } catch (e) { + log.error("Ignoring error during logout (remote)", e); + } + try { + InMemoryStore.clear(); + } catch (e) { + log.error("Ignoring error during logout (in-memory store)", e); + } + try { + clearKeys(); + } catch (e) { + log.error("Ignoring error during logout (session store)", e); + } + try { + clearData(); + } catch (e) { + log.error("Ignoring error during logout (local storage)", e); + } + try { + await localForage.clear(); + } catch (e) { + log.error("Ignoring error during logout (local forage)", e); + } + try { + await clearBlobCaches(); + } catch (e) { + log.error("Ignoring error during logout (cache)", e); + } +}; diff --git a/web/packages/accounts/services/user.ts b/web/packages/accounts/services/user.ts deleted file mode 100644 index 8f6d6609a1..0000000000 --- a/web/packages/accounts/services/user.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { clearCaches } from "@/next/blob-cache"; -import log from "@/next/log"; -import { Events, eventBus } from "@ente/shared/events"; -import InMemoryStore from "@ente/shared/storage/InMemoryStore"; -import { clearFiles } from "@ente/shared/storage/localForage/helpers"; -import { clearData } from "@ente/shared/storage/localStorage"; -import { clearKeys } from "@ente/shared/storage/sessionStorage"; -import router from "next/router"; -import { _logout } from "../api/user"; -import { PAGES } from "../constants/pages"; - -export const logoutUser = async () => { - try { - await _logout(); - } catch (e) { - log.error("Ignoring error during POST /users/logout", e); - } - try { - InMemoryStore.clear(); - } catch (e) { - log.error("Ignoring error when clearing in-memory store", e); - } - try { - clearKeys(); - } catch (e) { - log.error("Ignoring error when clearing keys", e); - } - try { - clearData(); - } catch (e) { - log.error("Ignoring error when clearing data", e); - } - try { - await clearCaches(); - } catch (e) { - log.error("Ignoring error when clearing caches", e); - } - try { - await clearFiles(); - } catch (e) { - log.error("Ignoring error when clearing files", e); - } - const electron = globalThis.electron; - if (electron) { - try { - await electron.watch.reset(); - } catch (e) { - log.error("Ignoring error when resetting native folder watches", e); - } - try { - await electron.clearStores(); - } catch (e) { - log.error("Ignoring error when clearing native stores", e); - } - } - try { - eventBus.emit(Events.LOGOUT); - } catch (e) { - log.error("Ignoring error in event-bus logout handlers", e); - } - router.push(PAGES.ROOT); -}; diff --git a/web/packages/build-config/package.json b/web/packages/build-config/package.json index e46bb96b1c..bacc6e8bb1 100644 --- a/web/packages/build-config/package.json +++ b/web/packages/build-config/package.json @@ -7,8 +7,8 @@ "@typescript-eslint/parser": "^7", "eslint-plugin-react": "^7.34", "eslint-plugin-react-hooks": "^4.6", - "eslint-plugin-react-refresh": "^0.4.6", + "eslint-plugin-react-refresh": "^0.4.7", "prettier-plugin-organize-imports": "^3.2", - "prettier-plugin-packagejson": "^2.4" + "prettier-plugin-packagejson": "^2.5" } } diff --git a/web/packages/media/formats.ts b/web/packages/media/formats.ts index 24d2c7c877..1316b654f4 100644 --- a/web/packages/media/formats.ts +++ b/web/packages/media/formats.ts @@ -24,3 +24,11 @@ const nonWebImageFileExtensions = [ */ export const isNonWebImageFileExtension = (extension: string) => nonWebImageFileExtensions.includes(extension.toLowerCase()); + +/** + * Return `true` if {@link extension} in for an HEIC-like file. + */ +export const isHEICExtension = (extension: string) => { + const ext = extension.toLowerCase(); + return ext == "heic" || ext == "heif"; +}; diff --git a/web/packages/media/image.ts b/web/packages/media/image.ts new file mode 100644 index 0000000000..2912af02a4 --- /dev/null +++ b/web/packages/media/image.ts @@ -0,0 +1,33 @@ +/** + * Compute optimal dimensions for a resized version of an image while + * maintaining aspect ratio of the source image. + * + * @param width The width of the source image. + * + * @param height The height of the source image. + * + * @param maxDimension The maximum width of height of the resized image. + * + * This function returns a new size limiting it to maximum width and height + * (both specified by {@link maxDimension}), while maintaining aspect ratio of + * the source {@link width} and {@link height}. + * + * It returns `{0, 0}` for invalid inputs. + */ +export const scaledImageDimensions = ( + width: number, + height: number, + maxDimension: number, +): { width: number; height: number } => { + if (width == 0 || height == 0) return { width: 0, height: 0 }; + const widthScaleFactor = maxDimension / width; + const heightScaleFactor = maxDimension / height; + const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); + const resizedDimensions = { + width: Math.round(width * scaleFactor), + height: Math.round(height * scaleFactor), + }; + if (resizedDimensions.width == 0 || resizedDimensions.height == 0) + return { width: 0, height: 0 }; + return resizedDimensions; +}; diff --git a/web/packages/media/package.json b/web/packages/media/package.json index 8be7e8bb6c..bf71ed37b2 100644 --- a/web/packages/media/package.json +++ b/web/packages/media/package.json @@ -5,6 +5,10 @@ "dependencies": { "@/next": "*", "file-type": "16.5.4", + "heic-convert": "^2.1", "jszip": "^3.10" + }, + "devDependencies": { + "@types/heic-convert": "^1.2.3" } } diff --git a/web/packages/media/tsconfig.json b/web/packages/media/tsconfig.json index f29c348113..bcc1151c11 100644 --- a/web/packages/media/tsconfig.json +++ b/web/packages/media/tsconfig.json @@ -1,5 +1,13 @@ { "extends": "@/build-config/tsconfig-typecheck.json", + "compilerOptions": { + /* Also indicate expectation of a WebWorker runtime */ + "lib": ["ESnext", "DOM", "DOM.Iterable", "WebWorker"] + }, /* Typecheck all files with the given extensions (here or in subfolders) */ - "include": ["**/*.ts", "**/*.tsx"] + "include": [ + "**/*.ts", + "**/*.tsx", + "../../packages/next/global-electron.d.ts" + ] } diff --git a/web/packages/media/worker/heic-convert.ts b/web/packages/media/worker/heic-convert.ts new file mode 100644 index 0000000000..476eac00a3 --- /dev/null +++ b/web/packages/media/worker/heic-convert.ts @@ -0,0 +1,11 @@ +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import type { DedicatedHEICConvertWorker } from "./heic-convert.worker"; + +export const createHEICConvertWebWorker = () => + new Worker(new URL("heic-convert.worker.ts", import.meta.url)); + +export const createHEICConvertComlinkWorker = () => + new ComlinkWorker( + "heic-convert-worker", + createHEICConvertWebWorker(), + ); diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/packages/media/worker/heic-convert.worker.ts similarity index 84% rename from web/apps/photos/src/worker/heic-convert.worker.ts rename to web/packages/media/worker/heic-convert.worker.ts index 96a1a94684..ffb5eb1582 100644 --- a/web/apps/photos/src/worker/heic-convert.worker.ts +++ b/web/packages/media/worker/heic-convert.worker.ts @@ -7,7 +7,7 @@ export class DedicatedHEICConvertWorker { } } -expose(DedicatedHEICConvertWorker, self); +expose(DedicatedHEICConvertWorker); /** * Convert a HEIC file to a JPEG file. @@ -18,5 +18,5 @@ export const heicToJPEG = async (heicBlob: Blob): Promise => { const buffer = new Uint8Array(await heicBlob.arrayBuffer()); const result = await HeicConvert({ buffer, format: "JPEG" }); const convertedData = new Uint8Array(result); - return new Blob([convertedData]); + return new Blob([convertedData], { type: "image/jpeg" }); }; diff --git a/web/packages/next/blob-cache.ts b/web/packages/next/blob-cache.ts index e6c3734df2..0db9464521 100644 --- a/web/packages/next/blob-cache.ts +++ b/web/packages/next/blob-cache.ts @@ -20,8 +20,8 @@ export type BlobCacheNamespace = (typeof blobCacheNames)[number]; * * This cache is suitable for storing large amounts of data (entire files). * - * To obtain a cache for a given namespace, use {@link openCache}. To clear all - * cached data (e.g. during logout), use {@link clearCaches}. + * To obtain a cache for a given namespace, use {@link openBlobCache}. To clear all + * cached data (e.g. during logout), use {@link clearBlobCaches}. * * [Note: Caching files] * @@ -69,14 +69,31 @@ export interface BlobCache { delete: (key: string) => Promise; } +const cachedCaches = new Map(); + /** * Return the {@link BlobCache} corresponding to the given {@link name}. * + * This is a wrapper over {@link openBlobCache} that caches (pun intended) the + * cache and returns the same one each time it is called with the same name. + * It'll open the cache lazily the first time it is invoked. + */ +export const blobCache = async ( + name: BlobCacheNamespace, +): Promise => { + let c = cachedCaches.get(name); + if (!c) cachedCaches.set(name, (c = await openBlobCache(name))); + return c; +}; + +/** + * Create a new {@link BlobCache} corresponding to the given {@link name}. + * * @param name One of the arbitrary but predefined namespaces of type * {@link BlobCacheNamespace} which group related data and allow us to use the * same key across namespaces. */ -export const openCache = async ( +export const openBlobCache = async ( name: BlobCacheNamespace, ): Promise => isElectron() ? openOPFSCacheWeb(name) : openWebCache(name); @@ -119,6 +136,10 @@ export const openCache = async ( * * new Blob([arrayBuffer, andOrAnyArray, andOrstring]) * + * To convert from a Uint8Array/ArrayBuffer/Blob to a ReadableStream + * + * new Response(array).body + * * Refs: * - https://github.com/yigitunallar/arraybuffer-vs-blob * - https://stackoverflow.com/questions/11821096/what-is-the-difference-between-an-arraybuffer-and-a-blob @@ -194,7 +215,7 @@ export const cachedOrNew = async ( key: string, get: () => Promise, ): Promise => { - const cache = await openCache(cacheName); + const cache = await openBlobCache(cacheName); const cachedBlob = await cache.get(key); if (cachedBlob) return cachedBlob; @@ -204,15 +225,17 @@ export const cachedOrNew = async ( }; /** - * Delete all cached data. + * Delete all cached data, including cached caches. * * Meant for use during logout, to reset the state of the user's account. */ -export const clearCaches = async () => - isElectron() ? clearOPFSCaches() : clearWebCaches(); +export const clearBlobCaches = async () => { + cachedCaches.clear(); + return isElectron() ? clearOPFSCaches() : clearWebCaches(); +}; const clearWebCaches = async () => { - await Promise.all(blobCacheNames.map((name) => caches.delete(name))); + await Promise.allSettled(blobCacheNames.map((name) => caches.delete(name))); }; const clearOPFSCaches = async () => { diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json index 52ce2c47b3..dbdfc6e266 100644 --- a/web/packages/next/locales/bg-BG/translation.json +++ b/web/packages/next/locales/bg-BG/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json index c9e479ecbb..183b1d8037 100644 --- a/web/packages/next/locales/de-DE/translation.json +++ b/web/packages/next/locales/de-DE/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Erhalte 2 Monate kostenlos bei Jahresabonnements", "POPULAR": "Beliebt", "FREE_PLAN_OPTION_LABEL": "Mit kostenloser Testversion fortfahren", - "FREE_PLAN_DESCRIPTION": "1 GB für 1 Jahr", + "free_plan_description": "{{storage}} für 1 Jahr", "CURRENT_USAGE": "Aktuelle Nutzung ist {{usage}}", "WEAK_DEVICE": "Dein Browser ist nicht leistungsstark genug, um deine Bilder zu verschlüsseln. Versuche, dich an einem Computer bei Ente anzumelden, oder lade dir die Ente-App für dein Gerät (Handy oder Desktop) herunter.", "DRAG_AND_DROP_HINT": "Oder ziehe Dateien per Drag-and-Drop in das Ente-Fenster", @@ -565,6 +565,9 @@ "IMAGE": "Bild", "VIDEO": "Video", "LIVE_PHOTO": "Live-Foto", + "editor": { + "crop": "" + }, "CONVERT": "Konvertieren", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Editor wirklich schließen?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Lade dein bearbeitetes Bild herunter oder speichere es in Ente, um die Änderungen nicht zu verlieren.", @@ -588,7 +591,7 @@ "ROTATION": "Drehen", "RESET": "Zurücksetzen", "PHOTO_EDITOR": "Foto-Editor", - "FASTER_UPLOAD": "Schnelleres hochladen", + "FASTER_UPLOAD": "Schnelleres Hochladen", "FASTER_UPLOAD_DESCRIPTION": "Uploads über nahegelegene Server leiten", "MAGIC_SEARCH_STATUS": "Status der magischen Suche", "INDEXED_ITEMS": "Indizierte Elemente", diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json index 9a363f3df6..f7acb63c89 100644 --- a/web/packages/next/locales/en-US/translation.json +++ b/web/packages/next/locales/en-US/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Get 2 months free on yearly plans", "POPULAR": "Popular", "FREE_PLAN_OPTION_LABEL": "Continue with free trial", - "FREE_PLAN_DESCRIPTION": "1 GB for 1 year", + "free_plan_description": "{{storage}} for 1 year", "CURRENT_USAGE": "Current usage is {{usage}}", "WEAK_DEVICE": "The web browser you're using is not powerful enough to encrypt your photos. Please try to log in to Ente on your computer, or download the Ente mobile/desktop app.", "DRAG_AND_DROP_HINT": "Or drag and drop into the Ente window", @@ -565,6 +565,9 @@ "IMAGE": "Image", "VIDEO": "Video", "LIVE_PHOTO": "Live Photo", + "editor": { + "crop": "Crop" + }, "CONVERT": "Convert", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Are you sure you want to close the editor?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download your edited image or save a copy to Ente to persist your changes.", diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json index 69b783207a..abd06b510d 100644 --- a/web/packages/next/locales/es-ES/translation.json +++ b/web/packages/next/locales/es-ES/translation.json @@ -352,7 +352,7 @@ "ADD_COLLABORATORS": "", "ADD_NEW_EMAIL": "", "shared_with_people_zero": "", - "shared_with_people_one": "", + "shared_with_people_one": "Compartido con 1 persona", "shared_with_people_other": "", "participants_zero": "", "participants_one": "", @@ -362,8 +362,8 @@ "CHANGE_PERMISSIONS_TO_COLLABORATOR": "", "CONVERT_TO_VIEWER": "", "CONVERT_TO_COLLABORATOR": "", - "CHANGE_PERMISSION": "", - "REMOVE_PARTICIPANT": "", + "CHANGE_PERMISSION": "¿Cambiar Permiso?", + "REMOVE_PARTICIPANT": "¿Eliminar?", "CONFIRM_REMOVE": "", "MANAGE": "", "ADDED_AS": "", @@ -415,8 +415,8 @@ "albums_other": "{{count}} álbumes", "ALL_ALBUMS": "Todos los álbumes", "ALBUMS": "Álbumes", - "ALL_HIDDEN_ALBUMS": "", - "HIDDEN_ALBUMS": "", + "ALL_HIDDEN_ALBUMS": "Todos los álbumes ocultos", + "HIDDEN_ALBUMS": "Álbumes ocultos", "HIDDEN_ITEMS": "", "ENTER_TWO_FACTOR_OTP": "Ingrese el código de seis dígitos de su aplicación de autenticación a continuación.", "CREATE_ACCOUNT": "Crear cuenta", @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Obtén 2 meses gratis en planes anuales", "POPULAR": "Popular", "FREE_PLAN_OPTION_LABEL": "Continuar con el plan gratuito", - "FREE_PLAN_DESCRIPTION": "1 GB por 1 año", + "free_plan_description": "{{storage}} por 1 año", "CURRENT_USAGE": "El uso actual es {{usage}}", "WEAK_DEVICE": "El navegador web que está utilizando no es lo suficientemente poderoso para cifrar sus fotos. Por favor, intente iniciar sesión en ente en su computadora, o descargue la aplicación ente para móvil/escritorio.", "DRAG_AND_DROP_HINT": "O arrastre y suelte en la ventana ente", @@ -518,7 +518,7 @@ "PUBLIC_COLLECT_SUBTEXT": "Permitir a las personas con el enlace añadir fotos al álbum compartido.", "STOP_EXPORT": "Stop", "EXPORT_PROGRESS": "{{progress.success}} / {{progress.total}} archivos exportados", - "MIGRATING_EXPORT": "", + "MIGRATING_EXPORT": "Preparando...", "RENAMING_COLLECTION_FOLDERS": "", "TRASHING_DELETED_FILES": "", "TRASHING_DELETED_COLLECTIONS": "", @@ -543,7 +543,7 @@ "at": "a las", "AUTH_NEXT": "siguiente", "AUTH_DOWNLOAD_MOBILE_APP": "Descarga nuestra aplicación móvil para administrar tus secretos", - "HIDDEN": "", + "HIDDEN": "Oculto", "HIDE": "Ocultar", "UNHIDE": "Mostrar", "UNHIDE_TO_COLLECTION": "Hacer visible al álbum", @@ -565,10 +565,13 @@ "IMAGE": "", "VIDEO": "Video", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", - "BRIGHTNESS": "", + "BRIGHTNESS": "Brillo", "CONTRAST": "", "SATURATION": "", "BLUR": "", @@ -617,7 +620,7 @@ "PASSKEY_LOGIN_FAILED": "", "PASSKEY_LOGIN_URL_INVALID": "", "PASSKEY_LOGIN_ERRORED": "", - "TRY_AGAIN": "", + "TRY_AGAIN": "Inténtelo de nuevo", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", "LOGIN_WITH_PASSKEY": "", "autogenerated_first_album_name": "", diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json index 34977aa3db..ce0e8e6e10 100644 --- a/web/packages/next/locales/fa-IR/translation.json +++ b/web/packages/next/locales/fa-IR/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json index b94891efdf..9f549eb49b 100644 --- a/web/packages/next/locales/fi-FI/translation.json +++ b/web/packages/next/locales/fi-FI/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json index 796b40a44c..9af40b690c 100644 --- a/web/packages/next/locales/fr-FR/translation.json +++ b/web/packages/next/locales/fr-FR/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Obtenir 2 mois gratuits sur les plans annuels", "POPULAR": "Populaire", "FREE_PLAN_OPTION_LABEL": "Poursuivre avec la version d'essai gratuite", - "FREE_PLAN_DESCRIPTION": "1 Go pour 1 an", + "free_plan_description": "{{storage}} pour 1 an", "CURRENT_USAGE": "L'utilisation actuelle est de {{usage}}", "WEAK_DEVICE": "Le navigateur que vous utilisez n'est pas assez puissant pour chiffrer vos photos. Veuillez essayer de vous connecter à Ente sur votre ordinateur, ou télécharger l'appli Ente mobile/ordinateur.", "DRAG_AND_DROP_HINT": "Sinon glissez déposez dans la fenêtre Ente", @@ -565,6 +565,9 @@ "IMAGE": "Image", "VIDEO": "Vidéo", "LIVE_PHOTO": "Photos en direct", + "editor": { + "crop": "" + }, "CONVERT": "Convertir", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Êtes-vous sûr de vouloir fermer l'éditeur ?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Téléchargez votre image modifiée ou enregistrez une copie sur Ente pour maintenir vos modifications.", diff --git a/web/packages/next/locales/is-IS/translation.json b/web/packages/next/locales/is-IS/translation.json new file mode 100644 index 0000000000..80f443b5de --- /dev/null +++ b/web/packages/next/locales/is-IS/translation.json @@ -0,0 +1,628 @@ +{ + "HERO_SLIDE_1_TITLE": "", + "HERO_SLIDE_1": "", + "HERO_SLIDE_2_TITLE": "", + "HERO_SLIDE_2": "", + "HERO_SLIDE_3_TITLE": "", + "HERO_SLIDE_3": "", + "LOGIN": "", + "SIGN_UP": "", + "NEW_USER": "", + "EXISTING_USER": "", + "ENTER_NAME": "", + "PUBLIC_UPLOADER_NAME_MESSAGE": "", + "ENTER_EMAIL": "", + "EMAIL_ERROR": "", + "REQUIRED": "", + "EMAIL_SENT": "", + "CHECK_INBOX": "", + "ENTER_OTT": "", + "RESEND_MAIL": "", + "VERIFY": "", + "UNKNOWN_ERROR": "", + "INVALID_CODE": "", + "EXPIRED_CODE": "", + "SENDING": "", + "SENT": "", + "PASSWORD": "Lykilorð", + "LINK_PASSWORD": "", + "RETURN_PASSPHRASE_HINT": "Lykilorð", + "SET_PASSPHRASE": "", + "VERIFY_PASSPHRASE": "", + "INCORRECT_PASSPHRASE": "Rangt lykilorð", + "ENTER_ENC_PASSPHRASE": "", + "PASSPHRASE_DISCLAIMER": "", + "WELCOME_TO_ENTE_HEADING": "", + "WELCOME_TO_ENTE_SUBHEADING": "", + "WHERE_YOUR_BEST_PHOTOS_LIVE": "", + "KEY_GENERATION_IN_PROGRESS_MESSAGE": "", + "PASSPHRASE_HINT": "", + "CONFIRM_PASSPHRASE": "", + "REFERRAL_CODE_HINT": "", + "REFERRAL_INFO": "", + "PASSPHRASE_MATCH_ERROR": "", + "CREATE_COLLECTION": "", + "ENTER_ALBUM_NAME": "", + "CLOSE_OPTION": "", + "ENTER_FILE_NAME": "", + "CLOSE": "Loka", + "NO": "Nei", + "NOTHING_HERE": "Ekkert að sjá hér ennþá 👀", + "UPLOAD": "Hlaða upp", + "IMPORT": "", + "ADD_PHOTOS": "", + "ADD_MORE_PHOTOS": "", + "add_photos_one": "", + "add_photos_other": "", + "SELECT_PHOTOS": "", + "FILE_UPLOAD": "", + "UPLOAD_STAGE_MESSAGE": { + "0": "", + "1": "", + "2": "", + "3": "", + "4": "", + "5": "" + }, + "FILE_NOT_UPLOADED_LIST": "", + "SUBSCRIPTION_EXPIRED": "", + "SUBSCRIPTION_EXPIRED_MESSAGE": "", + "STORAGE_QUOTA_EXCEEDED": "", + "INITIAL_LOAD_DELAY_WARNING": "", + "USER_DOES_NOT_EXIST": "", + "NO_ACCOUNT": "", + "ACCOUNT_EXISTS": "", + "CREATE": "", + "DOWNLOAD": "", + "DOWNLOAD_OPTION": "", + "DOWNLOAD_FAVORITES": "", + "DOWNLOAD_UNCATEGORIZED": "", + "DOWNLOAD_HIDDEN_ITEMS": "", + "COPY_OPTION": "", + "TOGGLE_FULLSCREEN": "", + "ZOOM_IN_OUT": "", + "PREVIOUS": "", + "NEXT": "", + "TITLE_PHOTOS": "", + "TITLE_ALBUMS": "", + "TITLE_AUTH": "", + "UPLOAD_FIRST_PHOTO": "", + "IMPORT_YOUR_FOLDERS": "", + "UPLOAD_DROPZONE_MESSAGE": "", + "WATCH_FOLDER_DROPZONE_MESSAGE": "", + "TRASH_FILES_TITLE": "", + "TRASH_FILE_TITLE": "", + "DELETE_FILES_TITLE": "", + "DELETE_FILES_MESSAGE": "", + "DELETE": "Eyða", + "DELETE_OPTION": "", + "FAVORITE_OPTION": "", + "UNFAVORITE_OPTION": "", + "MULTI_FOLDER_UPLOAD": "", + "UPLOAD_STRATEGY_CHOICE": "", + "UPLOAD_STRATEGY_SINGLE_COLLECTION": "", + "OR": "eða", + "UPLOAD_STRATEGY_COLLECTION_PER_FOLDER": "", + "SESSION_EXPIRED_MESSAGE": "", + "SESSION_EXPIRED": "", + "PASSWORD_GENERATION_FAILED": "", + "CHANGE_PASSWORD": "", + "GO_BACK": "Fara til baka", + "RECOVERY_KEY": "", + "SAVE_LATER": "Gera þetta seinna", + "SAVE": "Vista Lykil", + "RECOVERY_KEY_DESCRIPTION": "", + "RECOVER_KEY_GENERATION_FAILED": "", + "KEY_NOT_STORED_DISCLAIMER": "", + "FORGOT_PASSWORD": "Gleymt lykilorð", + "RECOVER_ACCOUNT": "Endurheimta Reikning", + "RECOVERY_KEY_HINT": "Endurheimtunarlykill", + "RECOVER": "Endurheimta", + "NO_RECOVERY_KEY": "Enginn endurheimtunarlykill?", + "INCORRECT_RECOVERY_KEY": "", + "SORRY": "Fyrirgefðu", + "NO_RECOVERY_KEY_MESSAGE": "", + "NO_TWO_FACTOR_RECOVERY_KEY_MESSAGE": "", + "CONTACT_SUPPORT": "", + "REQUEST_FEATURE": "", + "SUPPORT": "", + "CONFIRM": "Staðfesta", + "CANCEL": "Hætta við", + "LOGOUT": "Útskrá", + "DELETE_ACCOUNT": "Eyða aðgangi", + "DELETE_ACCOUNT_MESSAGE": "", + "LOGOUT_MESSAGE": "Ertu viss um að þú viljir skrá þig út?", + "CHANGE_EMAIL": "Breyta netfangi", + "OK": "Í lagi", + "SUCCESS": "Tókst", + "ERROR": "Villa", + "MESSAGE": "Skilaboð", + "INSTALL_MOBILE_APP": "", + "DOWNLOAD_APP_MESSAGE": "", + "DOWNLOAD_APP": "", + "EXPORT": "", + "SUBSCRIPTION": "Áskrift", + "SUBSCRIBE": "Gerast áskrifandi", + "MANAGEMENT_PORTAL": "", + "MANAGE_FAMILY_PORTAL": "", + "LEAVE_FAMILY_PLAN": "", + "LEAVE": "", + "LEAVE_FAMILY_CONFIRM": "", + "CHOOSE_PLAN": "", + "MANAGE_PLAN": "", + "ACTIVE": "Virkur", + "OFFLINE_MSG": "", + "FREE_SUBSCRIPTION_INFO": "", + "FAMILY_SUBSCRIPTION_INFO": "", + "RENEWAL_ACTIVE_SUBSCRIPTION_STATUS": "", + "RENEWAL_CANCELLED_SUBSCRIPTION_STATUS": "", + "RENEWAL_CANCELLED_SUBSCRIPTION_INFO": "", + "ADD_ON_AVAILABLE_TILL": "", + "STORAGE_QUOTA_EXCEEDED_SUBSCRIPTION_INFO": "Þú hefur farið yfir geymsluplássið þitt, vinsamlegast uppfærðu", + "SUBSCRIPTION_PURCHASE_SUCCESS": "", + "SUBSCRIPTION_PURCHASE_CANCELLED": "", + "SUBSCRIPTION_PURCHASE_FAILED": "", + "SUBSCRIPTION_UPDATE_FAILED": "", + "UPDATE_PAYMENT_METHOD_MESSAGE": "", + "STRIPE_AUTHENTICATION_FAILED": "", + "UPDATE_PAYMENT_METHOD": "", + "MONTHLY": "", + "YEARLY": "", + "update_subscription_title": "", + "UPDATE_SUBSCRIPTION_MESSAGE": "", + "UPDATE_SUBSCRIPTION": "", + "CANCEL_SUBSCRIPTION": "", + "CANCEL_SUBSCRIPTION_MESSAGE": "", + "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "", + "SUBSCRIPTION_CANCEL_FAILED": "", + "SUBSCRIPTION_CANCEL_SUCCESS": "", + "REACTIVATE_SUBSCRIPTION": "", + "REACTIVATE_SUBSCRIPTION_MESSAGE": "", + "SUBSCRIPTION_ACTIVATE_SUCCESS": "", + "SUBSCRIPTION_ACTIVATE_FAILED": "", + "SUBSCRIPTION_PURCHASE_SUCCESS_TITLE": "", + "CANCEL_SUBSCRIPTION_ON_MOBILE": "", + "CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE": "", + "MAIL_TO_MANAGE_SUBSCRIPTION": "", + "RENAME": "", + "RENAME_FILE": "", + "RENAME_COLLECTION": "", + "DELETE_COLLECTION_TITLE": "", + "DELETE_COLLECTION": "", + "DELETE_COLLECTION_MESSAGE": "", + "DELETE_PHOTOS": "", + "KEEP_PHOTOS": "", + "SHARE_COLLECTION": "", + "SHARE_WITH_SELF": "", + "ALREADY_SHARED": "", + "SHARING_BAD_REQUEST_ERROR": "", + "SHARING_DISABLED_FOR_FREE_ACCOUNTS": "", + "DOWNLOAD_COLLECTION": "", + "CREATE_ALBUM_FAILED": "", + "SEARCH": "", + "SEARCH_RESULTS": "", + "NO_RESULTS": "", + "SEARCH_HINT": "", + "SEARCH_TYPE": { + "COLLECTION": "", + "LOCATION": "", + "CITY": "", + "DATE": "", + "FILE_NAME": "", + "THING": "", + "FILE_CAPTION": "", + "FILE_TYPE": "", + "CLIP": "" + }, + "photos_count_zero": "", + "photos_count_one": "", + "photos_count_other": "", + "TERMS_AND_CONDITIONS": "", + "ADD_TO_COLLECTION": "", + "SELECTED": "", + "PEOPLE": "", + "INDEXING_SCHEDULED": "", + "ANALYZING_PHOTOS": "", + "INDEXING_PEOPLE": "", + "INDEXING_DONE": "", + "UNIDENTIFIED_FACES": "", + "OBJECTS": "", + "TEXT": "", + "INFO": "", + "INFO_OPTION": "", + "FILE_NAME": "", + "CAPTION_PLACEHOLDER": "", + "LOCATION": "", + "SHOW_ON_MAP": "", + "MAP": "", + "MAP_SETTINGS": "", + "ENABLE_MAPS": "", + "ENABLE_MAP": "", + "DISABLE_MAPS": "", + "ENABLE_MAP_DESCRIPTION": "", + "DISABLE_MAP_DESCRIPTION": "", + "DISABLE_MAP": "", + "DETAILS": "", + "VIEW_EXIF": "", + "NO_EXIF": "", + "EXIF": "", + "ISO": "", + "TWO_FACTOR": "", + "TWO_FACTOR_AUTHENTICATION": "", + "TWO_FACTOR_QR_INSTRUCTION": "", + "ENTER_CODE_MANUALLY": "", + "TWO_FACTOR_MANUAL_CODE_INSTRUCTION": "", + "SCAN_QR_CODE": "", + "ENABLE_TWO_FACTOR": "", + "ENABLE": "", + "LOST_DEVICE": "", + "INCORRECT_CODE": "", + "TWO_FACTOR_INFO": "", + "DISABLE_TWO_FACTOR_LABEL": "", + "UPDATE_TWO_FACTOR_LABEL": "", + "DISABLE": "", + "RECONFIGURE": "", + "UPDATE_TWO_FACTOR": "", + "UPDATE_TWO_FACTOR_MESSAGE": "", + "UPDATE": "", + "DISABLE_TWO_FACTOR": "", + "DISABLE_TWO_FACTOR_MESSAGE": "", + "TWO_FACTOR_DISABLE_FAILED": "", + "EXPORT_DATA": "", + "SELECT_FOLDER": "", + "DESTINATION": "", + "START": "", + "LAST_EXPORT_TIME": "", + "EXPORT_AGAIN": "", + "LOCAL_STORAGE_NOT_ACCESSIBLE": "", + "LOCAL_STORAGE_NOT_ACCESSIBLE_MESSAGE": "", + "SEND_OTT": "", + "EMAIl_ALREADY_OWNED": "", + "ETAGS_BLOCKED": "", + "LIVE_PHOTOS_DETECTED": "", + "RETRY_FAILED": "", + "FAILED_UPLOADS": "", + "SKIPPED_FILES": "", + "THUMBNAIL_GENERATION_FAILED_UPLOADS": "", + "UNSUPPORTED_FILES": "", + "SUCCESSFUL_UPLOADS": "", + "SKIPPED_INFO": "", + "UNSUPPORTED_INFO": "", + "BLOCKED_UPLOADS": "", + "INPROGRESS_METADATA_EXTRACTION": "", + "INPROGRESS_UPLOADS": "", + "TOO_LARGE_UPLOADS": "", + "LARGER_THAN_AVAILABLE_STORAGE_UPLOADS": "", + "LARGER_THAN_AVAILABLE_STORAGE_INFO": "", + "TOO_LARGE_INFO": "", + "THUMBNAIL_GENERATION_FAILED_INFO": "", + "UPLOAD_TO_COLLECTION": "", + "UNCATEGORIZED": "", + "ARCHIVE": "", + "FAVORITES": "", + "ARCHIVE_COLLECTION": "", + "ARCHIVE_SECTION_NAME": "", + "ALL_SECTION_NAME": "", + "MOVE_TO_COLLECTION": "", + "UNARCHIVE": "", + "UNARCHIVE_COLLECTION": "", + "HIDE_COLLECTION": "", + "UNHIDE_COLLECTION": "", + "MOVE": "", + "ADD": "", + "REMOVE": "", + "YES_REMOVE": "", + "REMOVE_FROM_COLLECTION": "", + "TRASH": "", + "MOVE_TO_TRASH": "", + "TRASH_FILES_MESSAGE": "", + "TRASH_FILE_MESSAGE": "", + "DELETE_PERMANENTLY": "", + "RESTORE": "", + "RESTORE_TO_COLLECTION": "", + "EMPTY_TRASH": "", + "EMPTY_TRASH_TITLE": "", + "EMPTY_TRASH_MESSAGE": "", + "LEAVE_SHARED_ALBUM": "", + "LEAVE_ALBUM": "", + "LEAVE_SHARED_ALBUM_TITLE": "", + "LEAVE_SHARED_ALBUM_MESSAGE": "", + "NOT_FILE_OWNER": "", + "CONFIRM_SELF_REMOVE_MESSAGE": "", + "CONFIRM_SELF_AND_OTHER_REMOVE_MESSAGE": "", + "SORT_BY_CREATION_TIME_ASCENDING": "", + "SORT_BY_UPDATION_TIME_DESCENDING": "", + "SORT_BY_NAME": "", + "FIX_CREATION_TIME": "", + "FIX_CREATION_TIME_IN_PROGRESS": "", + "CREATION_TIME_UPDATED": "", + "UPDATE_CREATION_TIME_NOT_STARTED": "", + "UPDATE_CREATION_TIME_COMPLETED": "", + "UPDATE_CREATION_TIME_COMPLETED_WITH_ERROR": "", + "CAPTION_CHARACTER_LIMIT": "hámark 5000 stafir", + "DATE_TIME_ORIGINAL": "", + "DATE_TIME_DIGITIZED": "", + "METADATA_DATE": "", + "CUSTOM_TIME": "", + "REOPEN_PLAN_SELECTOR_MODAL": "", + "OPEN_PLAN_SELECTOR_MODAL_FAILED": "", + "INSTALL": "", + "SHARING_DETAILS": "", + "MODIFY_SHARING": "", + "ADD_COLLABORATORS": "", + "ADD_NEW_EMAIL": "", + "shared_with_people_zero": "", + "shared_with_people_one": "", + "shared_with_people_other": "", + "participants_zero": "", + "participants_one": "", + "participants_other": "", + "ADD_VIEWERS": "", + "CHANGE_PERMISSIONS_TO_VIEWER": "", + "CHANGE_PERMISSIONS_TO_COLLABORATOR": "", + "CONVERT_TO_VIEWER": "", + "CONVERT_TO_COLLABORATOR": "", + "CHANGE_PERMISSION": "", + "REMOVE_PARTICIPANT": "", + "CONFIRM_REMOVE": "", + "MANAGE": "", + "ADDED_AS": "", + "COLLABORATOR_RIGHTS": "", + "REMOVE_PARTICIPANT_HEAD": "", + "OWNER": "Eigandi", + "COLLABORATORS": "", + "ADD_MORE": "", + "VIEWERS": "", + "OR_ADD_EXISTING": "", + "REMOVE_PARTICIPANT_MESSAGE": "", + "NOT_FOUND": "404 - fannst ekki", + "LINK_EXPIRED": "Hlekkur rann út", + "LINK_EXPIRED_MESSAGE": "", + "MANAGE_LINK": "Stjórna hlekk", + "LINK_TOO_MANY_REQUESTS": "", + "FILE_DOWNLOAD": "", + "LINK_PASSWORD_LOCK": "", + "PUBLIC_COLLECT": "", + "LINK_DEVICE_LIMIT": "", + "NO_DEVICE_LIMIT": "", + "LINK_EXPIRY": "", + "NEVER": "", + "DISABLE_FILE_DOWNLOAD": "", + "DISABLE_FILE_DOWNLOAD_MESSAGE": "", + "SHARED_USING": "", + "SHARING_REFERRAL_CODE": "", + "LIVE": "", + "DISABLE_PASSWORD": "", + "DISABLE_PASSWORD_MESSAGE": "", + "PASSWORD_LOCK": "", + "LOCK": "", + "DOWNLOAD_UPLOAD_LOGS": "", + "UPLOAD_FILES": "", + "UPLOAD_DIRS": "", + "UPLOAD_GOOGLE_TAKEOUT": "", + "DEDUPLICATE_FILES": "", + "NO_DUPLICATES_FOUND": "", + "FILES": "", + "EACH": "", + "DEDUPLICATE_BASED_ON_SIZE": "", + "STOP_ALL_UPLOADS_MESSAGE": "", + "STOP_UPLOADS_HEADER": "", + "YES_STOP_UPLOADS": "", + "STOP_DOWNLOADS_HEADER": "", + "YES_STOP_DOWNLOADS": "", + "STOP_ALL_DOWNLOADS_MESSAGE": "", + "albums_one": "", + "albums_other": "", + "ALL_ALBUMS": "", + "ALBUMS": "", + "ALL_HIDDEN_ALBUMS": "", + "HIDDEN_ALBUMS": "", + "HIDDEN_ITEMS": "", + "ENTER_TWO_FACTOR_OTP": "", + "CREATE_ACCOUNT": "", + "COPIED": "", + "WATCH_FOLDERS": "", + "UPGRADE_NOW": "", + "RENEW_NOW": "", + "STORAGE": "", + "USED": "", + "YOU": "", + "FAMILY": "", + "FREE": "", + "OF": "", + "WATCHED_FOLDERS": "", + "NO_FOLDERS_ADDED": "", + "FOLDERS_AUTOMATICALLY_MONITORED": "", + "UPLOAD_NEW_FILES_TO_ENTE": "", + "REMOVE_DELETED_FILES_FROM_ENTE": "", + "ADD_FOLDER": "", + "STOP_WATCHING": "", + "STOP_WATCHING_FOLDER": "", + "STOP_WATCHING_DIALOG_MESSAGE": "", + "YES_STOP": "", + "MONTH_SHORT": "", + "YEAR": "", + "FAMILY_PLAN": "", + "DOWNLOAD_LOGS": "", + "DOWNLOAD_LOGS_MESSAGE": "", + "CHANGE_FOLDER": "", + "TWO_MONTHS_FREE": "", + "POPULAR": "", + "FREE_PLAN_OPTION_LABEL": "", + "free_plan_description": "", + "CURRENT_USAGE": "", + "WEAK_DEVICE": "", + "DRAG_AND_DROP_HINT": "", + "CONFIRM_ACCOUNT_DELETION_MESSAGE": "", + "AUTHENTICATE": "", + "UPLOADED_TO_SINGLE_COLLECTION": "", + "UPLOADED_TO_SEPARATE_COLLECTIONS": "", + "NEVERMIND": "", + "UPDATE_AVAILABLE": "", + "UPDATE_INSTALLABLE_MESSAGE": "", + "INSTALL_NOW": "", + "INSTALL_ON_NEXT_LAUNCH": "", + "UPDATE_AVAILABLE_MESSAGE": "", + "DOWNLOAD_AND_INSTALL": "", + "IGNORE_THIS_VERSION": "", + "TODAY": "", + "YESTERDAY": "", + "NAME_PLACEHOLDER": "", + "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED": "", + "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "", + "CHOSE_THEME": "", + "ML_SEARCH": "", + "ENABLE_ML_SEARCH_DESCRIPTION": "", + "ML_MORE_DETAILS": "", + "ENABLE_FACE_SEARCH": "", + "ENABLE_FACE_SEARCH_TITLE": "", + "ENABLE_FACE_SEARCH_DESCRIPTION": "", + "DISABLE_BETA": "", + "DISABLE_FACE_SEARCH": "", + "DISABLE_FACE_SEARCH_TITLE": "", + "DISABLE_FACE_SEARCH_DESCRIPTION": "", + "ADVANCED": "", + "FACE_SEARCH_CONFIRMATION": "", + "LABS": "", + "YOURS": "", + "PASSPHRASE_STRENGTH_WEAK": "", + "PASSPHRASE_STRENGTH_MODERATE": "", + "PASSPHRASE_STRENGTH_STRONG": "", + "PREFERENCES": "", + "LANGUAGE": "", + "EXPORT_DIRECTORY_DOES_NOT_EXIST": "", + "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "", + "SUBSCRIPTION_VERIFICATION_ERROR": "", + "storage_unit": { + "b": "", + "kb": "", + "mb": "", + "gb": "", + "tb": "" + }, + "AFTER_TIME": { + "HOUR": "", + "DAY": "", + "WEEK": "", + "MONTH": "", + "YEAR": "" + }, + "COPY_LINK": "", + "DONE": "", + "LINK_SHARE_TITLE": "", + "REMOVE_LINK": "", + "CREATE_PUBLIC_SHARING": "", + "PUBLIC_LINK_CREATED": "", + "PUBLIC_LINK_ENABLED": "", + "COLLECT_PHOTOS": "", + "PUBLIC_COLLECT_SUBTEXT": "", + "STOP_EXPORT": "", + "EXPORT_PROGRESS": "", + "MIGRATING_EXPORT": "", + "RENAMING_COLLECTION_FOLDERS": "", + "TRASHING_DELETED_FILES": "", + "TRASHING_DELETED_COLLECTIONS": "", + "CONTINUOUS_EXPORT": "", + "PENDING_ITEMS": "", + "EXPORT_STARTING": "", + "DELETE_ACCOUNT_REASON_LABEL": "", + "DELETE_ACCOUNT_REASON_PLACEHOLDER": "", + "DELETE_REASON": { + "MISSING_FEATURE": "", + "BROKEN_BEHAVIOR": "", + "FOUND_ANOTHER_SERVICE": "", + "NOT_LISTED": "" + }, + "DELETE_ACCOUNT_FEEDBACK_LABEL": "", + "DELETE_ACCOUNT_FEEDBACK_PLACEHOLDER": "", + "CONFIRM_DELETE_ACCOUNT_CHECKBOX_LABEL": "", + "CONFIRM_DELETE_ACCOUNT": "", + "FEEDBACK_REQUIRED": "", + "FEEDBACK_REQUIRED_FOUND_ANOTHER_SERVICE": "", + "RECOVER_TWO_FACTOR": "", + "at": "", + "AUTH_NEXT": "", + "AUTH_DOWNLOAD_MOBILE_APP": "", + "HIDDEN": "", + "HIDE": "Fela", + "UNHIDE": "", + "UNHIDE_TO_COLLECTION": "", + "SORT_BY": "Raða eftir", + "NEWEST_FIRST": "Nýjast fyrst", + "OLDEST_FIRST": "Elsta fyrst", + "CONVERSION_FAILED_NOTIFICATION_MESSAGE": "", + "SELECT_COLLECTION": "", + "PIN_ALBUM": "", + "UNPIN_ALBUM": "", + "DOWNLOAD_COMPLETE": "", + "DOWNLOADING_COLLECTION": "", + "DOWNLOAD_FAILED": "", + "DOWNLOAD_PROGRESS": "", + "CHRISTMAS": "", + "CHRISTMAS_EVE": "", + "NEW_YEAR": "Nýtt ár", + "NEW_YEAR_EVE": "", + "IMAGE": "Mynd", + "VIDEO": "Mynband", + "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, + "CONVERT": "", + "CONFIRM_EDITOR_CLOSE_MESSAGE": "", + "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", + "BRIGHTNESS": "", + "CONTRAST": "", + "SATURATION": "", + "BLUR": "", + "INVERT_COLORS": "", + "ASPECT_RATIO": "", + "SQUARE": "", + "ROTATE_LEFT": "", + "ROTATE_RIGHT": "", + "FLIP_VERTICALLY": "", + "FLIP_HORIZONTALLY": "", + "DOWNLOAD_EDITED": "", + "SAVE_A_COPY_TO_ENTE": "", + "RESTORE_ORIGINAL": "", + "TRANSFORM": "", + "COLORS": "", + "FLIP": "", + "ROTATION": "", + "RESET": "", + "PHOTO_EDITOR": "", + "FASTER_UPLOAD": "", + "FASTER_UPLOAD_DESCRIPTION": "", + "MAGIC_SEARCH_STATUS": "", + "INDEXED_ITEMS": "", + "CAST_ALBUM_TO_TV": "", + "ENTER_CAST_PIN_CODE": "", + "PAIR_DEVICE_TO_TV": "", + "TV_NOT_FOUND": "", + "AUTO_CAST_PAIR": "", + "AUTO_CAST_PAIR_DESC": "", + "PAIR_WITH_PIN": "", + "CHOOSE_DEVICE_FROM_BROWSER": "", + "PAIR_WITH_PIN_DESC": "", + "VISIT_CAST_ENTE_IO": "", + "CAST_AUTO_PAIR_FAILED": "", + "FREEHAND": "", + "APPLY_CROP": "", + "PHOTO_EDIT_REQUIRED_TO_SAVE": "", + "PASSKEYS": "", + "DELETE_PASSKEY": "", + "DELETE_PASSKEY_CONFIRMATION": "", + "RENAME_PASSKEY": "", + "ADD_PASSKEY": "", + "ENTER_PASSKEY_NAME": "", + "PASSKEYS_DESCRIPTION": "", + "CREATED_AT": "", + "PASSKEY_LOGIN_FAILED": "", + "PASSKEY_LOGIN_URL_INVALID": "", + "PASSKEY_LOGIN_ERRORED": "", + "TRY_AGAIN": "", + "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" +} diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json index 0d4298c29e..d935126f7c 100644 --- a/web/packages/next/locales/it-IT/translation.json +++ b/web/packages/next/locales/it-IT/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Ottieni 2 mesi gratis sui piani annuali", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "1 GB per 1 anno", + "free_plan_description": "{{storage}} per 1 anno", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json index b9709ee92e..cec77e0e40 100644 --- a/web/packages/next/locales/ko-KR/translation.json +++ b/web/packages/next/locales/ko-KR/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json index a364061a3d..47775c0c21 100644 --- a/web/packages/next/locales/nl-NL/translation.json +++ b/web/packages/next/locales/nl-NL/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Krijg 2 maanden gratis op jaarlijkse abonnementen", "POPULAR": "Populair", "FREE_PLAN_OPTION_LABEL": "Doorgaan met gratis account", - "FREE_PLAN_DESCRIPTION": "1 GB voor 1 jaar", + "free_plan_description": "{{storage}} voor 1 jaar", "CURRENT_USAGE": "Huidig gebruik is {{usage}}", "WEAK_DEVICE": "De webbrowser die u gebruikt is niet krachtig genoeg om uw foto's te versleutelen. Probeer in te loggen op uw computer, of download de Ente mobiel/desktop app.", "DRAG_AND_DROP_HINT": "Of sleep en plaats in het Ente venster", @@ -565,6 +565,9 @@ "IMAGE": "Afbeelding", "VIDEO": "Video", "LIVE_PHOTO": "Live foto", + "editor": { + "crop": "" + }, "CONVERT": "Converteren", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Weet u zeker dat u de editor wilt afsluiten?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download uw bewerkte afbeelding of sla een kopie op in Ente om uw wijzigingen te behouden.", diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json index 3edd83f6b7..5adb222bca 100644 --- a/web/packages/next/locales/pt-BR/translation.json +++ b/web/packages/next/locales/pt-BR/translation.json @@ -2,7 +2,7 @@ "HERO_SLIDE_1_TITLE": "
Backups privados
para as suas memórias
", "HERO_SLIDE_1": "Criptografia de ponta a ponta por padrão", "HERO_SLIDE_2_TITLE": "
Armazenado com segurança
em um abrigo avançado
", - "HERO_SLIDE_2": "Feito para ter logenvidade", + "HERO_SLIDE_2": "Feito para ter longevidade", "HERO_SLIDE_3_TITLE": "
Disponível
em qualquer lugar
", "HERO_SLIDE_3": "Android, iOS, Web, Desktop", "LOGIN": "Entrar", @@ -410,7 +410,7 @@ "YES_STOP_UPLOADS": "Sim, parar envios", "STOP_DOWNLOADS_HEADER": "Parar downloads?", "YES_STOP_DOWNLOADS": "Sim, parar downloads", - "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?", + "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos os downloads em andamento?", "albums_one": "1 Álbum", "albums_other": "{{count, number}} Álbuns", "ALL_ALBUMS": "Todos os álbuns", @@ -428,7 +428,7 @@ "USED": "usado", "YOU": "Você", "FAMILY": "Família", - "FREE": "grátis", + "FREE": "livre", "OF": "de", "WATCHED_FOLDERS": "Pastas monitoradas", "NO_FOLDERS_ADDED": "Nenhuma pasta adicionada ainda!", @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Obtenha 2 meses gratuitos em planos anuais", "POPULAR": "Popular", "FREE_PLAN_OPTION_LABEL": "Continuar com teste gratuito", - "FREE_PLAN_DESCRIPTION": "1 GB por 1 ano", + "free_plan_description": "{{storage}} por 1 ano", "CURRENT_USAGE": "O uso atual é {{usage}}", "WEAK_DEVICE": "O navegador da web que você está usando não é poderoso o suficiente para criptografar suas fotos. Por favor, tente entrar para o ente no computador ou baixe o aplicativo móvel.", "DRAG_AND_DROP_HINT": "Ou arraste e solte na janela ente", @@ -556,7 +556,7 @@ "UNPIN_ALBUM": "Desafixar álbum", "DOWNLOAD_COMPLETE": "Download concluído", "DOWNLOADING_COLLECTION": "Fazendo download de {{name}}", - "DOWNLOAD_FAILED": "Falha ao baixar", + "DOWNLOAD_FAILED": "Falha no download", "DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos", "CHRISTMAS": "Natal", "CHRISTMAS_EVE": "Véspera de Natal", @@ -565,6 +565,9 @@ "IMAGE": "Imagem", "VIDEO": "Vídeo", "LIVE_PHOTO": "Fotos em movimento", + "editor": { + "crop": "Cortar" + }, "CONVERT": "Converter", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Tem certeza de que deseja fechar o editor?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Baixe sua imagem editada ou salve uma cópia para o ente para persistir nas alterações.", diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json index d6751f32cd..981f33126a 100644 --- a/web/packages/next/locales/pt-PT/translation.json +++ b/web/packages/next/locales/pt-PT/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json index 68816b47b3..7861d339ac 100644 --- a/web/packages/next/locales/ru-RU/translation.json +++ b/web/packages/next/locales/ru-RU/translation.json @@ -168,7 +168,7 @@ "UPDATE_PAYMENT_METHOD": "Обновить платёжную информацию", "MONTHLY": "Ежемесячно", "YEARLY": "Ежегодно", - "update_subscription_title": "", + "update_subscription_title": "Подтвердить изменение плана", "UPDATE_SUBSCRIPTION_MESSAGE": "Хотите сменить текущий план?", "UPDATE_SUBSCRIPTION": "Изменить план", "CANCEL_SUBSCRIPTION": "Отменить подписку", @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "Получите 2 месяца бесплатно по годовым планам", "POPULAR": "Популярный", "FREE_PLAN_OPTION_LABEL": "Продолжайте пользоваться бесплатной пробной версией", - "FREE_PLAN_DESCRIPTION": "1 ГБ на 1 год", + "free_plan_description": "{{storage}} на 1 год", "CURRENT_USAGE": "Текущее использование составляет {{usage}}", "WEAK_DEVICE": "Используемый вами веб-браузер недостаточно мощный, чтобы зашифровать ваши фотографии. Пожалуйста, попробуйте войти в Ente на своем компьютере или загрузить мобильное/настольное приложение Ente.", "DRAG_AND_DROP_HINT": "Или перетащите в основное окно", @@ -565,6 +565,9 @@ "IMAGE": "Изображение", "VIDEO": "Видео", "LIVE_PHOTO": "Живое фото", + "editor": { + "crop": "" + }, "CONVERT": "Преобразовать", "CONFIRM_EDITOR_CLOSE_MESSAGE": "Вы уверены, что хотите закрыть редактор?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Загрузите отредактированное изображение или сохраните копию в ente, чтобы сохранить внесенные изменения.", @@ -620,6 +623,6 @@ "TRY_AGAIN": "Пробовать снова", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.", "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля", - "autogenerated_first_album_name": "", - "autogenerated_default_album_name": "" + "autogenerated_first_album_name": "Мой первый альбом", + "autogenerated_default_album_name": "Новый альбом" } diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json index 775bb5a605..2ec0352b0c 100644 --- a/web/packages/next/locales/sv-SE/translation.json +++ b/web/packages/next/locales/sv-SE/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "Bild", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json index b94891efdf..9f549eb49b 100644 --- a/web/packages/next/locales/th-TH/translation.json +++ b/web/packages/next/locales/th-TH/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json index b94891efdf..9f549eb49b 100644 --- a/web/packages/next/locales/tr-TR/translation.json +++ b/web/packages/next/locales/tr-TR/translation.json @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "", "POPULAR": "", "FREE_PLAN_OPTION_LABEL": "", - "FREE_PLAN_DESCRIPTION": "", + "free_plan_description": "", "CURRENT_USAGE": "", "WEAK_DEVICE": "", "DRAG_AND_DROP_HINT": "", @@ -565,6 +565,9 @@ "IMAGE": "", "VIDEO": "", "LIVE_PHOTO": "", + "editor": { + "crop": "" + }, "CONVERT": "", "CONFIRM_EDITOR_CLOSE_MESSAGE": "", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json index 9e10cdce86..4ac62c7964 100644 --- a/web/packages/next/locales/zh-CN/translation.json +++ b/web/packages/next/locales/zh-CN/translation.json @@ -7,7 +7,7 @@ "HERO_SLIDE_3": "安卓, iOS, 网页端, 桌面端", "LOGIN": "登录", "SIGN_UP": "注册", - "NEW_USER": "刚来到 Ente", + "NEW_USER": "初来 Ente", "EXISTING_USER": "现有用户", "ENTER_NAME": "输入名字", "PUBLIC_UPLOADER_NAME_MESSAGE": "请添加一个名字,以便您的朋友知晓该感谢谁拍摄了这些精美的照片!", @@ -449,7 +449,7 @@ "TWO_MONTHS_FREE": "在年度计划上免费获得 2 个月", "POPULAR": "流行的", "FREE_PLAN_OPTION_LABEL": "继续免费试用", - "FREE_PLAN_DESCRIPTION": "1 GB 1年", + "free_plan_description": "{{storage}} 1年", "CURRENT_USAGE": "当前使用量是 {{usage}}", "WEAK_DEVICE": "您使用的网络浏览器功能不够强大,无法加密您的照片。 请尝试在电脑上登录Ente,或下载Ente移动/桌面应用程序。", "DRAG_AND_DROP_HINT": "或者拖动并拖动到 Ente 窗口", @@ -565,6 +565,9 @@ "IMAGE": "图像", "VIDEO": "视频", "LIVE_PHOTO": "实况照片", + "editor": { + "crop": "裁剪" + }, "CONVERT": "转换", "CONFIRM_EDITOR_CLOSE_MESSAGE": "您确定要关闭编辑器吗?", "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "下载已编辑的图片或将副本保存到 Ente 以保留您的更改。", diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts index f9ef7e5493..e69d22b07e 100644 --- a/web/packages/next/log.ts +++ b/web/packages/next/log.ts @@ -3,6 +3,19 @@ import { isDevBuild } from "./env"; import { logToDisk as webLogToDisk } from "./log-web"; import { workerBridge } from "./worker/worker-bridge"; +/** + * Whether logs go to disk or are always emitted to the console. + */ +let shouldLogToDisk = true; + +/** + * By default, logs get saved into a ring buffer in the browser's local storage. + * However, in some contexts, e.g. when we're running as the cast app, there is + * no mechanism for the user to retrieve these logs. So this function exists as + * a way to disable the on disk logging and always use the console. + */ +export const disableDiskLogs = () => (shouldLogToDisk = false); + /** * Write a {@link message} to the on-disk log. * @@ -45,14 +58,14 @@ const messageWithError = (message: string, e?: unknown) => { const logError = (message: string, e?: unknown) => { const m = `[error] ${messageWithError(message, e)}`; - if (isDevBuild) console.error(m); - logToDisk(m); + console.error(m); + if (shouldLogToDisk) logToDisk(m); }; const logWarn = (message: string, e?: unknown) => { const m = `[warn] ${messageWithError(message, e)}`; - if (isDevBuild) console.error(m); - logToDisk(m); + console.error(m); + if (shouldLogToDisk) logToDisk(m); }; const logInfo = (...params: unknown[]) => { @@ -60,8 +73,8 @@ const logInfo = (...params: unknown[]) => { .map((p) => (typeof p == "string" ? p : JSON.stringify(p))) .join(" "); const m = `[info] ${message}`; - if (isDevBuild) console.log(m); - logToDisk(m); + if (isDevBuild || !shouldLogToDisk) console.log(m); + if (shouldLogToDisk) logToDisk(m); }; const logDebug = (param: () => unknown) => { @@ -71,8 +84,8 @@ const logDebug = (param: () => unknown) => { /** * Ente's logger. * - * This is an object that provides three functions to log at the corresponding - * levels - error, info or debug. + * This is an object that provides functions to log at the corresponding levels: + * error, warn, info or debug. * * Whenever we need to save a log message to disk, * @@ -89,8 +102,7 @@ export default { * any arbitrary object that we obtain, say, when in a try-catch handler (in * JavaScript any arbitrary value can be thrown). * - * The log is written to disk. In development builds, the log is also - * printed to the browser console. + * The log is written to disk and printed to the browser console. */ error: logError, /** @@ -104,8 +116,10 @@ export default { * This is meant as a replacement of {@link console.log}, and takes an * arbitrary number of arbitrary parameters that it then serializes. * - * The log is written to disk. In development builds, the log is also - * printed to the browser console. + * The log is written to disk. However, if logging to disk is disabled by + * using {@link disableDiskLogs}, then the log is printed to the console. + * + * In development builds, the log is always printed to the browser console. */ info: logInfo, /** @@ -118,8 +132,8 @@ export default { * The function can return an arbitrary value which is serialized before * being logged. * - * This log is NOT written to disk. And it is printed to the browser - * console, but only in development builds. + * This log is NOT written to disk. It is printed to the browser console, + * but only in development builds. */ debug: logDebug, }; diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index b4ef2b6b24..806a00cd5e 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -64,12 +64,9 @@ export interface Electron { selectDirectory: () => Promise; /** - * Clear any stored data. - * - * This is a coarse single shot cleanup, meant for use in clearing any - * Electron side state during logout. + * Perform any logout related cleanup of native side state. */ - clearStores: () => void; + logout: () => Promise; /** * Return the previously saved encryption key from persistent safe storage. @@ -260,7 +257,7 @@ export interface Electron { * This executes the command using a FFmpeg executable we bundle with our * desktop app. We also have a wasm FFmpeg wasm implementation that we use * when running on the web, which has a sibling function with the same - * parameters. See [Note: ffmpeg in Electron]. + * parameters. See [Note:FFmpeg in Electron]. * * @param command An array of strings, each representing one positional * parameter in the command to execute. Placeholders for the input, output @@ -280,9 +277,6 @@ export interface Electron { * just return its contents, for some FFmpeg command the extension matters * (e.g. conversion to a JPEG fails if the extension is arbitrary). * - * @param timeoutMS If non-zero, then abort and throw a timeout error if the - * ffmpeg command takes more than the given number of milliseconds. - * * @returns The contents of the output file produced by the ffmpeg command * (specified as {@link outputPathPlaceholder} in {@link command}). */ @@ -290,7 +284,6 @@ export interface Electron { command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ) => Promise; // - ML @@ -304,7 +297,9 @@ export interface Electron { * * @returns A CLIP embedding. */ - clipImageEmbedding: (jpegImageData: Uint8Array) => Promise; + computeCLIPImageEmbedding: ( + jpegImageData: Uint8Array, + ) => Promise; /** * Return a CLIP embedding of the given image if we already have the model @@ -326,7 +321,7 @@ export interface Electron { * * @returns A CLIP embedding. */ - clipTextEmbeddingIfAvailable: ( + computeCLIPTextEmbeddingIfAvailable: ( text: string, ) => Promise; @@ -339,34 +334,12 @@ export interface Electron { detectFaces: (input: Float32Array) => Promise; /** - * Return a MobileFaceNet embedding for the given face data. + * Return a MobileFaceNet embeddings for the given faces. * * Both the input and output are opaque binary data whose internal structure * is specific to our implementation and the model (MobileFaceNet) we use. */ - faceEmbedding: (input: Float32Array) => Promise; - - /** - * Return a face crop stored by a previous version of ML. - * - * [Note: Legacy face crops] - * - * Older versions of ML generated and stored face crops in a "face-crops" - * cache directory on the Electron side. For the time being, we have - * disabled the face search whilst we put finishing touches to it. However, - * it'll be nice to still show the existing faces that have been clustered - * for people who opted in to the older beta. - * - * So we retain the older "face-crops" disk cache, and use this method to - * serve faces from it when needed. - * - * @param faceID An identifier corresponding to which the face crop had been - * stored by the older version of our app. - * - * @returns the JPEG data of the face crop if a file is found for the given - * {@link faceID}, otherwise undefined. - */ - legacyFaceCrop: (faceID: string) => Promise; + computeFaceEmbeddings: (input: Float32Array) => Promise; // - Watch @@ -484,17 +457,6 @@ export interface Electron { * The returned paths are guaranteed to use POSIX separators ('/'). */ findFiles: (folderPath: string) => Promise; - - /** - * Stop watching all existing folder watches and remove any callbacks. - * - * This function is meant to be called when the user logs out. It stops - * all existing folder watches and forgets about any "on*" callback - * functions that have been registered. - * - * The persisted state itself gets cleared via {@link clearStores}. - */ - reset: () => Promise; }; // - Upload diff --git a/web/packages/next/worker/comlink-worker.ts b/web/packages/next/worker/comlink-worker.ts index 5929e5361b..b388cd413c 100644 --- a/web/packages/next/worker/comlink-worker.ts +++ b/web/packages/next/worker/comlink-worker.ts @@ -47,8 +47,8 @@ const workerBridge = { convertToJPEG: (imageData: Uint8Array) => ensureElectron().convertToJPEG(imageData), detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input), - faceEmbedding: (input: Float32Array) => - ensureElectron().faceEmbedding(input), + computeFaceEmbeddings: (input: Float32Array) => + ensureElectron().computeFaceEmbeddings(input), }; export type WorkerBridge = typeof workerBridge; diff --git a/web/packages/shared/apps/constants.ts b/web/packages/shared/apps/constants.ts index d35a5e8c47..b679fb9123 100644 --- a/web/packages/shared/apps/constants.ts +++ b/web/packages/shared/apps/constants.ts @@ -14,6 +14,8 @@ export const CLIENT_PACKAGE_NAMES = new Map([ [APPS.ACCOUNTS, "io.ente.accounts.web"], ]); +export const clientPackageNamePhotosDesktop = "io.ente.photos.desktop"; + export const APP_TITLES = new Map([ [APPS.ALBUMS, "Ente Albums"], [APPS.PHOTOS, "Ente Photos"], diff --git a/web/packages/shared/apps/types.ts b/web/packages/shared/apps/types.ts index 0d5d1aa1a6..bd3a2d4c5c 100644 --- a/web/packages/shared/apps/types.ts +++ b/web/packages/shared/apps/types.ts @@ -7,6 +7,7 @@ export interface PageProps { showNavBar: (show: boolean) => void; isMobile: boolean; setDialogBoxAttributesV2: SetDialogBoxAttributesV2; + logout: () => void; }; appName: APPS; twoFactorType?: TwoFactorType; diff --git a/web/packages/shared/components/DialogBox/types.ts b/web/packages/shared/components/DialogBox/types.ts index 6d076fd5ab..08b52fe4c3 100644 --- a/web/packages/shared/components/DialogBox/types.ts +++ b/web/packages/shared/components/DialogBox/types.ts @@ -3,6 +3,10 @@ import { ButtonProps } from "@mui/material"; export interface DialogBoxAttributes { icon?: React.ReactNode; title?: string; + /** + * Set this to `true` to prevent the dialog from being closed when the user + * clicks the backdrop outside the dialog. + */ staticBackdrop?: boolean; nonClosable?: boolean; content?: any; diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx index 71f027cefe..88c247ecc1 100644 --- a/web/packages/shared/hooks/useFileInput.tsx +++ b/web/packages/shared/hooks/useFileInput.tsx @@ -1,56 +1,71 @@ import { useCallback, useRef, useState } from "react"; interface UseFileInputParams { + /** + * If `true`, the file open dialog will ask the user to select directories. + * Otherwise it'll ask the user to select files (default). + */ directory?: boolean; + /** + * If specified, it'll restrict the type of files that the user can select + * by setting the "accept" attribute of the underlying HTML input element we + * use to surface the file selector dialog. For value of accept can be an + * extension or a MIME type (See + * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept). + */ accept?: string; } +interface UseFileInputResult { + /** + * A function to call to get the properties that should be passed to a dummy + * `input` element that needs to be created to anchor the select file + * dialog. This input HTML element is not going to be visible, but it needs + * to be part of the DOM for {@link openSelector} to have effect. + */ + getInputProps: () => React.HTMLAttributes; + /** + * A function that can be called to open the select file / directory dialog. + */ + openSelector: () => void; + /** + * The list of {@link File}s that the user selected. + * + * This will be a list even if the user selected directories - in that case, + * it will be the recursive list of files within this directory. + */ + selectedFiles: File[]; +} + /** - * Return three things: + * Wrap a open file selector into an easy to use package. * - * - A function that can be called to trigger the showing of the select file / - * directory dialog. + * Returns a {@link UseFileInputResult} which contains a function to get the + * props for an input element, a function to open the file selector, and the + * list of selected files. * - * - The list of properties that should be passed to a dummy `input` element - * that needs to be created to anchor the select file dialog. This input HTML - * element is not going to be visible, but it needs to be part of the DOM fro - * the open trigger to have effect. - * - * - The list of files that the user selected. This will be a list even if the - * user selected directories - in that case, it will be the recursive list of - * files within this directory. - * - * @param param0 - * - * - If {@link directory} is true, the file open dialog will ask the user to - * select directories. Otherwise it'll ask the user to select files. - * - * - If {@link accept} is specified, it'll restrict the type of files that the - * user can select by setting the "accept" attribute of the underlying HTML - * input element we use to surface the file selector dialog. For value of - * accept can be an extension or a MIME type (See - * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept). + * See the documentation of {@link UseFileInputParams} and + * {@link UseFileInputResult} for more details. */ -export default function useFileInput({ +export const useFileInput = ({ directory, accept, -}: UseFileInputParams) { +}: UseFileInputParams): UseFileInputResult => { const [selectedFiles, setSelectedFiles] = useState([]); const inputRef = useRef(); - const openSelectorDialog = useCallback(() => { + const openSelector = useCallback(() => { if (inputRef.current) { inputRef.current.value = null; inputRef.current.click(); } }, []); - const handleChange: React.ChangeEventHandler = async ( + const handleChange: React.ChangeEventHandler = ( event, ) => { - if (!!event.target && !!event.target.files) { - setSelectedFiles([...event.target.files]); - } + const files = event.target?.files; + if (files) setSelectedFiles([...files]); }; // [Note: webkitRelativePath] @@ -78,12 +93,8 @@ export default function useFileInput({ onChange: handleChange, ...(accept ? { accept } : {}), }), - [], + [directoryOpts, accept, handleChange], ); - return { - getInputProps, - open: openSelectorDialog, - selectedFiles: selectedFiles, - }; -} + return { getInputProps, openSelector, selectedFiles }; +}; diff --git a/web/packages/shared/network/HTTPService.ts b/web/packages/shared/network/HTTPService.ts index eda0709f55..7ef99e0d74 100644 --- a/web/packages/shared/network/HTTPService.ts +++ b/web/packages/shared/network/HTTPService.ts @@ -28,8 +28,8 @@ class HTTPService { const responseData = response.data; log.error( `HTTP Service Error - ${JSON.stringify({ - url: config.url, - method: config.method, + url: config?.url, + method: config?.method, xRequestId: response.headers["x-request-id"], httpStatus: response.status, errMessage: responseData.message, diff --git a/web/packages/shared/next/pages/404.tsx b/web/packages/shared/next/pages/404.tsx index 7cc4a6ff09..8e6e06cfa3 100644 --- a/web/packages/shared/next/pages/404.tsx +++ b/web/packages/shared/next/pages/404.tsx @@ -1,19 +1,30 @@ -import { VerticallyCentered } from "@ente/shared/components/Container"; -import { t } from "i18next"; -import { useEffect, useState } from "react"; +import { PAGES } from "@ente/accounts/constants/pages"; +import { useRouter } from "next/router"; +import { useEffect } from "react"; -import { PageProps } from "@ente/shared/apps/types"; -import EnteSpinner from "@ente/shared/components/EnteSpinner"; +const Page: React.FC = () => { + // [Note: 404 back to home] + // + // In the desktop app, if the user presses the refresh button when the URL + // has an attached query parameter, e.g. "/gallery?collectionId=xxx", then + // the code in next-electron-server blindly appends the html extension to + // this URL, resulting in it trying to open "gallery?collectionId=xxx.html" + // instead of "gallery.html". It doesn't find such a file, causing it open + // "404.html" (the static file generated from this file). + // + // One way around is to patch the package, e.g. + // https://github.com/ente-io/next-electron-server/pull/1/files + // + // However, redirecting back to the root is arguably a better fallback in + // all cases (even when running on our website), since our app is a SPA. + + const router = useRouter(); -export default function NotFound({ appContext }: PageProps) { - const [loading, setLoading] = useState(true); useEffect(() => { - appContext.showNavBar(true); - setLoading(false); + router.push(PAGES.ROOT); }, []); - return ( - - {loading ? : t("NOT_FOUND")} - - ); -} + + return <>; +}; + +export default Page; diff --git a/web/packages/shared/storage/localForage/index.ts b/web/packages/shared/storage/localForage.ts similarity index 100% rename from web/packages/shared/storage/localForage/index.ts rename to web/packages/shared/storage/localForage.ts diff --git a/web/packages/shared/storage/localForage/helpers.ts b/web/packages/shared/storage/localForage/helpers.ts deleted file mode 100644 index 913b9f52f9..0000000000 --- a/web/packages/shared/storage/localForage/helpers.ts +++ /dev/null @@ -1,5 +0,0 @@ -import localForage from "."; - -export const clearFiles = async () => { - await localForage.clear(); -}; diff --git a/web/packages/shared/storage/localStorage/index.ts b/web/packages/shared/storage/localStorage/index.ts index 70b9687cdc..c6ec3f57f4 100644 --- a/web/packages/shared/storage/localStorage/index.ts +++ b/web/packages/shared/storage/localStorage/index.ts @@ -7,7 +7,6 @@ export enum LS_KEYS { ORIGINAL_KEY_ATTRIBUTES = "originalKeyAttributes", SUBSCRIPTION = "subscription", FAMILY_DATA = "familyData", - PLANS = "plans", IS_FIRST_LOGIN = "isFirstLogin", JUST_SIGNED_UP = "justSignedUp", SHOW_BACK_BUTTON = "showBackButton", diff --git a/web/packages/shared/utils/index.ts b/web/packages/shared/utils/index.ts index 568ec5cc40..8b46f62670 100644 --- a/web/packages/shared/utils/index.ts +++ b/web/packages/shared/utils/index.ts @@ -1,11 +1,4 @@ -/** - * Wait for {@link ms} milliseconds - * - * This function is a promisified `setTimeout`. It returns a promise that - * resolves after {@link ms} milliseconds. - */ -export const wait = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); +import { wait } from "@/utils/promise"; export function downloadAsFile(filename: string, content: string) { const file = new Blob([content], { @@ -52,23 +45,3 @@ export async function retryAsyncFunction( } } } - -/** - * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it - * does not resolve within {@link timeoutMS}, then reject with a timeout error. - */ -export const withTimeout = async (promise: Promise, ms: number) => { - let timeoutId: ReturnType; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutId = setTimeout( - () => reject(new Error("Operation timed out")), - ms, - ); - }); - const promiseAndCancelTimeout = async () => { - const result = await promise; - clearTimeout(timeoutId); - return result; - }; - return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); -}; diff --git a/web/packages/utils/array.ts b/web/packages/utils/array.ts index 660aef6795..10030b189e 100644 --- a/web/packages/utils/array.ts +++ b/web/packages/utils/array.ts @@ -13,3 +13,18 @@ export const shuffled = (xs: T[]) => .map((x) => [Math.random(), x]) .sort() .map(([, x]) => x) as T[]; + +/** + * Return the first non-empty string from the given list of strings. + * + * This function is needed because the `a ?? b` idiom doesn't do what you'd + * expect when a is "". Perhaps the behaviour is wrong, perhaps the expecation + * is wrong; this function papers over the differences. + * + * If none of the strings are non-empty, or if there are no strings in the given + * array, return undefined. + */ +export const firstNonEmpty = (ss: (string | undefined)[]) => { + for (const s of ss) if (s && s.length > 0) return s; + return undefined; +}; diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts index 93706bfb61..41639ea2b5 100644 --- a/web/packages/utils/ensure.ts +++ b/web/packages/utils/ensure.ts @@ -3,7 +3,7 @@ */ export const ensure = (v: T | null | undefined): T => { if (v === null) throw new Error("Required value was null"); - if (v === undefined) throw new Error("Required value was not found"); + if (v === undefined) throw new Error("Required value was undefined"); return v; }; diff --git a/web/packages/utils/promise.ts b/web/packages/utils/promise.ts new file mode 100644 index 0000000000..34f821b6dd --- /dev/null +++ b/web/packages/utils/promise.ts @@ -0,0 +1,32 @@ +/** + * Wait for {@link ms} milliseconds + * + * This function is a promisified `setTimeout`. It returns a promise that + * resolves after {@link ms} milliseconds. + */ +export const wait = (ms: number) => + new Promise((resolve) => setTimeout(resolve, ms)); + +/** + * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it + * does not resolve within {@link timeoutMS}, then reject with a timeout error. + * + * Note that this does not abort {@link promise} itself - it will still get + * resolved to completion, just its result will be ignored if it gets resolved + * after we've already timed out. + */ +export const withTimeout = async (promise: Promise, ms: number) => { + let timeoutId: ReturnType; + const rejectOnTimeout = new Promise((_, reject) => { + timeoutId = setTimeout( + () => reject(new Error("Operation timed out")), + ms, + ); + }); + const promiseAndCancelTimeout = async () => { + const result = await promise; + clearTimeout(timeoutId); + return result; + }; + return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); +}; diff --git a/web/yarn.lock b/web/yarn.lock index 972b14df1c..bb12308316 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -1015,6 +1015,11 @@ resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.14.tgz#319b63ad6df705ee2a65a73ef042c8271e696613" integrity sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg== +"@types/heic-convert@^1.2.3": + version "1.2.3" + resolved "https://registry.yarnpkg.com/@types/heic-convert/-/heic-convert-1.2.3.tgz#0705f36e467e7b6180806edd0b3f1e673514ff8c" + integrity sha512-5LJ2fGuVk/gnOLihoT56xJwrXxfnNepGvrHwlW5ZtT3HS4jO1AqBaAHCxXUpnY9UaD3zYcyxXMRM2fNN1AFF/Q== + "@types/hoist-non-react-statics@^3.3.1": version "3.3.5" resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz#dab7867ef789d87e2b4b0003c9d65c49cc44a494" @@ -1645,7 +1650,7 @@ chalk@^2.4.2: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0: +chalk@^4.0.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -1665,6 +1670,15 @@ client-only@0.0.1: resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clsx@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.2.1.tgz#0ddc4a20a549b59c93a4116bb26f5294ca17dc12" @@ -1721,6 +1735,21 @@ concat-map@0.0.1: resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== +concurrently@^8.2.2: + version "8.2.2" + resolved "https://registry.yarnpkg.com/concurrently/-/concurrently-8.2.2.tgz#353141985c198cfa5e4a3ef90082c336b5851784" + integrity sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg== + dependencies: + chalk "^4.1.2" + date-fns "^2.30.0" + lodash "^4.17.21" + rxjs "^7.8.1" + shell-quote "^1.8.1" + spawn-command "0.0.2" + supports-color "^8.1.1" + tree-kill "^1.2.2" + yargs "^17.7.2" + convert-source-map@^1.5.0: version "1.9.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" @@ -1793,7 +1822,7 @@ data-view-byte-offset@^1.0.0: es-errors "^1.3.0" is-data-view "^1.0.1" -date-fns@^2: +date-fns@^2, date-fns@^2.30.0: version "2.30.0" resolved "https://registry.yarnpkg.com/date-fns/-/date-fns-2.30.0.tgz#f367e644839ff57894ec6ac480de40cae4b0f4d0" integrity sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw== @@ -1867,11 +1896,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -density-clustering@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/density-clustering/-/density-clustering-1.3.0.tgz#dc9f59c8f0ab97e1624ac64930fd3194817dcac5" - integrity sha512-icpmBubVTwLnsaor9qH/4tG5+7+f61VcqMN3V3pm9sxxSCt2Jcs0zWOgwZW9ARJYaKD3FumIgHiMOcIMRRAzFQ== - dequal@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" @@ -2290,15 +2314,20 @@ eslint-plugin-jsx-a11y@^6.7.1: object.entries "^1.1.7" object.fromentries "^2.0.7" -"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705", eslint-plugin-react-hooks@^4.6: +"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705": version "4.6.0" resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== -eslint-plugin-react-refresh@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.6.tgz#e8e8accab681861baed00c5c12da70267db0936f" - integrity sha512-NjGXdm7zgcKRkKMua34qVO9doI7VOxZ6ancSvBELJSSoX97jyndXcSoa8XBh69JoB31dNz3EEzlMcizZl7LaMA== +eslint-plugin-react-hooks@^4.6: + version "4.6.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.2.tgz#c829eb06c0e6f484b3fbb85a97e57784f328c596" + integrity sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ== + +eslint-plugin-react-refresh@^0.4.7: + version "0.4.7" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.7.tgz#1f597f9093b254f10ee0961c139a749acb19af7d" + integrity sha512-yrj+KInFmwuQS2UQcg1SF83ha1tuHC1jMQbRNyuWtlEzzKRDgAl7L4Yp4NlDUZTZNlWvHEzOtJhMi40R7JxcSw== eslint-plugin-react@^7.33.2: version "7.33.2" @@ -2503,12 +2532,12 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" -file-selector@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.4.0.tgz#59ec4f27aa5baf0841e9c6385c8386bef4d18b17" - integrity sha512-iACCiXeMYOvZqlF1kTiYINzgepRBymz1wwjiuup9u9nayhb6g4fSwiyJ/6adli+EPwrWtpgQAh2PoS7HukEGEg== +file-selector@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.6.0.tgz#fa0a8d9007b829504db4d07dd4de0310b65287dc" + integrity sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw== dependencies: - tslib "^2.0.3" + tslib "^2.4.0" file-type@16.5.4: version "16.5.4" @@ -2631,6 +2660,11 @@ gensync@^1.0.0-beta.2: resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" @@ -2839,7 +2873,7 @@ hdbscan@0.0.1-alpha.5: dependencies: kd-tree-javascript "^1.0.3" -heic-convert@^2.0.0: +heic-convert@^2.1: version "2.1.0" resolved "https://registry.yarnpkg.com/heic-convert/-/heic-convert-2.1.0.tgz#7f764529e37591ae263ef49582d1d0c13491526e" integrity sha512-1qDuRvEHifTVAj3pFIgkqGgJIr0M3X7cxEPjEp0oG4mo8GFjq99DpCo8Eg3kg17Cy0MTjxpFdoBHOatj7ZVKtg== @@ -3433,7 +3467,7 @@ mime-db@1.52.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-types@^2.1.12, mime-types@^2.1.35: +mime-types@^2.1.12: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -3494,7 +3528,7 @@ ml-array-rescale@^1.3.7: ml-array-max "^1.2.4" ml-array-min "^1.2.3" -ml-matrix@^6.10.4: +ml-matrix@^6.11: version "6.11.0" resolved "https://registry.yarnpkg.com/ml-matrix/-/ml-matrix-6.11.0.tgz#3cf2260ef04cbb8e0e0425e71d200f5cbcf82772" integrity sha512-7jr9NmFRkaUxbKslfRu3aZOjJd2LkSitCGv+QH9PF0eJoEG7jIpjXra1Vw8/kgao8+kHCSsJONG6vfWmXQ+/Eg== @@ -3821,12 +3855,12 @@ prettier-plugin-organize-imports@^3.2: resolved "https://registry.yarnpkg.com/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz#77967f69d335e9c8e6e5d224074609309c62845e" integrity sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog== -prettier-plugin-packagejson@^2.4: - version "2.4.12" - resolved "https://registry.yarnpkg.com/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.4.12.tgz#eeb917dad83ae42d0caccc9f26d3728b5c4f2434" - integrity sha512-hifuuOgw5rHHTdouw9VrhT8+Nd7UwxtL1qco8dUfd4XUFQL6ia3xyjSxhPQTsGnSYFraTWy5Omb+MZm/OWDTpQ== +prettier-plugin-packagejson@^2.5: + version "2.5.0" + resolved "https://registry.yarnpkg.com/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.5.0.tgz#23d2cb8b1f7840702d35e3a5078e564ea0bc63e0" + integrity sha512-6XkH3rpin5QEQodBSVNg+rBo4r91g/1mCaRwS1YGdQJZ6jwqrg2UchBsIG9tpS1yK1kNBvOt84OILsX8uHzBGg== dependencies: - sort-package-json "2.8.0" + sort-package-json "2.10.0" synckit "0.9.0" prettier@^3: @@ -3887,13 +3921,13 @@ react-dom@^18: loose-envify "^1.1.0" scheduler "^0.23.0" -react-dropzone@^11.2.4: - version "11.7.1" - resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-11.7.1.tgz#3851bb75b26af0bf1b17ce1449fd980e643b9356" - integrity sha512-zxCMwhfPy1olUEbw3FLNPLhAm/HnaYH5aELIEglRbqabizKAdHs0h+WuyOpmA+v1JXn0++fpQDdNfUagWt5hJQ== +react-dropzone@^14.2: + version "14.2.3" + resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-14.2.3.tgz#0acab68308fda2d54d1273a1e626264e13d4e84b" + integrity sha512-O3om8I+PkFKbxCukfIR3QAGftYXDZfOE2N1mr/7qebQJHs7U+/RSL/9xomJNpRg9kM5h9soQSdf0Gc7OHF5Fug== dependencies: attr-accept "^2.2.2" - file-selector "^0.4.0" + file-selector "^0.6.0" prop-types "^15.8.1" react-fast-compare@^2.0.1: @@ -4046,6 +4080,11 @@ regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.2: es-errors "^1.3.0" set-function-name "^2.0.1" +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" @@ -4122,6 +4161,13 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" +rxjs@^7.8.1: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + safe-array-concat@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.0.tgz#8d0cae9cb806d6d1c06e08ab13d847293ebe0692" @@ -4231,6 +4277,11 @@ shebang-regex@^3.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== +shell-quote@^1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + side-channel@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.5.tgz#9a84546599b48909fb6af1211708d23b1946221b" @@ -4285,10 +4336,10 @@ sort-object-keys@^1.1.3: resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" integrity sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg== -sort-package-json@2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-2.8.0.tgz#6a46439ad0fef77f091e678e103f03ecbea575c8" - integrity sha512-PxeNg93bTJWmDGnu0HADDucoxfFiKkIr73Kv85EBThlI1YQPdc0XovBgg2llD0iABZbu2SlKo8ntGmOP9wOj/g== +sort-package-json@2.10.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/sort-package-json/-/sort-package-json-2.10.0.tgz#6be07424bf3b7db9fbb1bdd69e7945f301026d8a" + integrity sha512-MYecfvObMwJjjJskhxYfuOADkXp1ZMMnCFC8yhp+9HDsk7HhR336hd7eiBs96lTXfiqmUNI+WQCeCMRBhl251g== dependencies: detect-indent "^7.0.1" detect-newline "^4.0.0" @@ -4296,6 +4347,7 @@ sort-package-json@2.8.0: git-hooks-list "^3.0.0" globby "^13.1.2" is-plain-obj "^4.1.0" + semver "^7.6.0" sort-object-keys "^1.1.3" source-map-js@^1.0.2: @@ -4313,13 +4365,17 @@ source-map@^0.5.7: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== +spawn-command@0.0.2: + version "0.0.2" + resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e" + integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ== + streamsearch@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0: - name string-width-cjs +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -4497,6 +4553,13 @@ supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-color@^8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" @@ -4565,11 +4628,16 @@ tr46@~0.0.3: resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== -transformation-matrix@^2.15.0: +transformation-matrix@^2.16: version "2.16.1" resolved "https://registry.yarnpkg.com/transformation-matrix/-/transformation-matrix-2.16.1.tgz#4a2de06331b94ae953193d1b9a5ba002ec5f658a" integrity sha512-tdtC3wxVEuzU7X/ydL131Q3JU5cPMEn37oqVLITjRDSDsnSHVFzW2JiCLfZLIQEgWzZHdSy3J6bZzvKEN24jGA== +tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + truncate-utf8-bytes@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz#405923909592d56f78a5818434b0b78489ca5f2b" @@ -4592,7 +4660,7 @@ tsconfig-paths@^3.15.0: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^2.0.0, tslib@^2.0.3, tslib@^2.4.0, tslib@^2.6.2: +tslib@^2.0.0, tslib@^2.1.0, tslib@^2.4.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== @@ -4841,7 +4909,7 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -4876,6 +4944,11 @@ xml-js@^1.6.11: dependencies: sax "^1.2.4" +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" @@ -4891,6 +4964,24 @@ yaml@^1.10.0: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@^17.7.2: + version "17.7.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"