diff --git a/.github/workflows/auth-crowdin.yml b/.github/workflows/auth-crowdin.yml index 811def9396..bd92f14591 100644 --- a/.github/workflows/auth-crowdin.yml +++ b/.github/workflows/auth-crowdin.yml @@ -30,7 +30,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-auth + localization_branch_name: translations/auth create_pull_request: true skip_untranslated_strings: true pull_request_title: "[auth] New translations" diff --git a/.github/workflows/auth-release.yml b/.github/workflows/auth-release.yml index cc3e598e36..174b6c1d33 100644 --- a/.github/workflows/auth-release.yml +++ b/.github/workflows/auth-release.yml @@ -17,8 +17,8 @@ name: "Release (auth)" # We use a suffix like `-test` to indicate that these are test tags, and that # they belong to a pre-release. # -# If you need to do multiple tests, add a +x at the end of the tag. e.g. -# `auth-v1.2.3-test+1`. +# If you need to do multiple tests, add a .x at the end of the tag. e.g. +# `auth-v1.2.3-test.1`. # # Once the testing is done, also delete the tag(s) please. @@ -85,7 +85,7 @@ jobs: - name: Install dependencies for desktop build run: | sudo apt-get update -y - sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' - name: Install appimagetool diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml new file mode 100644 index 0000000000..0b8263f3d3 --- /dev/null +++ b/.github/workflows/desktop-lint.yml @@ -0,0 +1,30 @@ +name: "Lint (desktop)" + +on: + # Run on every push to a branch other than main that changes desktop/ + push: + branches-ignore: [main, "deploy/**"] + paths: + - "desktop/**" + - ".github/workflows/desktop-lint.yml" + +jobs: + lint: + runs-on: ubuntu-latest + defaults: + run: + working-directory: desktop + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "desktop/yarn.lock" + + - run: yarn install + + - run: yarn lint diff --git a/.github/workflows/mobile-crowdin.yml b/.github/workflows/mobile-crowdin.yml index 5c52b59ad1..556ac45f24 100644 --- a/.github/workflows/mobile-crowdin.yml +++ b/.github/workflows/mobile-crowdin.yml @@ -30,7 +30,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-mobile + localization_branch_name: translations/mobile create_pull_request: true skip_untranslated_strings: true pull_request_title: "[mobile] New translations" diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml new file mode 100644 index 0000000000..4ee7367424 --- /dev/null +++ b/.github/workflows/mobile-internal-release.yml @@ -0,0 +1,57 @@ +name: "Internal Release - Photos" + +on: + workflow_dispatch: # Allow manually running the action + +env: + FLUTTER_VERSION: "3.19.3" + +jobs: + build: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: mobile + + steps: + - name: Checkout code and submodules + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup JDK 17 + uses: actions/setup-java@v1 + with: + java-version: 17 + + - name: Install Flutter ${{ env.FLUTTER_VERSION }} + uses: subosito/flutter-action@v2 + with: + channel: "stable" + flutter-version: ${{ env.FLUTTER_VERSION }} + cache: true + + - name: Setup keys + uses: timheuer/base64-to-file@v1 + with: + fileName: "keystore/ente_photos_key.jks" + encodedString: ${{ secrets.SIGNING_KEY_PHOTOS }} + + - name: Build PlayStore AAB + run: | + flutter build appbundle --release --flavor playstore + env: + SIGNING_KEY_PATH: "/home/runner/work/_temp/keystore/ente_photos_key.jks" + SIGNING_KEY_ALIAS: ${{ secrets.SIGNING_KEY_ALIAS_PHOTOS }} + SIGNING_KEY_PASSWORD: ${{ secrets.SIGNING_KEY_PASSWORD_PHOTOS }} + SIGNING_STORE_PASSWORD: ${{ secrets.SIGNING_STORE_PASSWORD_PHOTOS }} + + - name: Upload AAB to PlayStore + uses: r0adkll/upload-google-play@v1 + with: + serviceAccountJsonPlainText: ${{ secrets.SERVICE_ACCOUNT_JSON }} + packageName: io.ente.photos + releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab + track: internal + changesNotSentForReview: true diff --git a/.github/workflows/mobile-release.yml b/.github/workflows/mobile-release.yml index 0f45df751d..6211f2c262 100644 --- a/.github/workflows/mobile-release.yml +++ b/.github/workflows/mobile-release.yml @@ -9,7 +9,7 @@ on: - "photos-v*" env: - FLUTTER_VERSION: "3.19.5" + FLUTTER_VERSION: "3.19.3" jobs: build: @@ -25,6 +25,11 @@ jobs: with: submodules: recursive + - name: Setup JDK 17 + uses: actions/setup-java@v1 + with: + java-version: 17 + - name: Install Flutter ${{ env.FLUTTER_VERSION }} uses: subosito/flutter-action@v2 with: diff --git a/.github/workflows/web-crowdin-update.yml b/.github/workflows/web-crowdin-update.yml new file mode 100644 index 0000000000..63a643cfcf --- /dev/null +++ b/.github/workflows/web-crowdin-update.yml @@ -0,0 +1,39 @@ +name: "Update Crowdin translations (web)" + +# This is a variant of web-crowdin.yml that also uploads the translated strings +# (in addition to the source strings). This allows us to change the strings in +# our source code for an automated refactoring (e.g. renaming a key), and then +# run this workflow to update the data in Crowdin taking our source code as the +# source of truth. + +on: + # Only allow running manually. + workflow_dispatch: + +jobs: + synchronize-with-crowdin: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Crowdin's action + uses: crowdin/github-action@v1 + with: + base_path: "web/" + config: "web/crowdin.yml" + upload_sources: true + # This is what differs from web-crowdin.yml + upload_translations: true + download_translations: true + localization_branch_name: translations/web + create_pull_request: true + skip_untranslated_strings: true + pull_request_title: "[web] Updated translations" + pull_request_body: "Updated translations from [Crowdin](https://crowdin.com/project/ente-photos-web)" + pull_request_base_branch_name: "main" + project_id: 569613 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }} diff --git a/.github/workflows/web-crowdin.yml b/.github/workflows/web-crowdin.yml index d986850653..b20b19ce3f 100644 --- a/.github/workflows/web-crowdin.yml +++ b/.github/workflows/web-crowdin.yml @@ -36,7 +36,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-web + localization_branch_name: translations/web create_pull_request: true skip_untranslated_strings: true pull_request_title: "[web] New translations" diff --git a/.github/workflows/web-deploy-accounts.yml b/.github/workflows/web-deploy-accounts.yml index 61411cac6f..33da5ee6f1 100644 --- a/.github/workflows/web-deploy-accounts.yml +++ b/.github/workflows/web-deploy-accounts.yml @@ -3,7 +3,7 @@ name: "Deploy (accounts)" on: push: # Run workflow on pushes to the deploy/accounts - branches: [deploy/accounts] + branches: [deploy/accounts, deploy-f/accounts] jobs: deploy: diff --git a/.github/workflows/web-deploy-cast.yml b/.github/workflows/web-deploy-cast.yml index c5bbca9542..01e17486d0 100644 --- a/.github/workflows/web-deploy-cast.yml +++ b/.github/workflows/web-deploy-cast.yml @@ -3,7 +3,7 @@ name: "Deploy (cast)" on: push: # Run workflow on pushes to the deploy/cast - branches: [deploy/cast] + branches: [deploy/cast, deploy-f/cast] jobs: deploy: diff --git a/auth/lib/l10n/arb/app_ar.arb b/auth/lib/l10n/arb/app_ar.arb index 68bd38900e..f9d37c7ba9 100644 --- a/auth/lib/l10n/arb/app_ar.arb +++ b/auth/lib/l10n/arb/app_ar.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "المصدِّر", "codeSecretKeyHint": "الرمز السري", "codeAccountHint": "الحساب (you@domain.com)", - "accountKeyType": "نوع المفتاح", "sessionExpired": "انتهت صلاحية الجلسة", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_de.arb b/auth/lib/l10n/arb/app_de.arb index f3ea23b512..0c4d29eaf3 100644 --- a/auth/lib/l10n/arb/app_de.arb +++ b/auth/lib/l10n/arb/app_de.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Aussteller", "codeSecretKeyHint": "Geheimer Schlüssel", "codeAccountHint": "Konto (you@domain.com)", - "accountKeyType": "Art des Schlüssels", "sessionExpired": "Sitzung abgelaufen", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -78,12 +77,14 @@ "data": "Datei", "importCodes": "Codes importieren", "importTypePlainText": "Klartext", + "importTypeEnteEncrypted": "Verschlüsselter Ente-Export", "passwordForDecryptingExport": "Passwort um den Export zu entschlüsseln", "passwordEmptyError": "Passwort kann nicht leer sein", "importFromApp": "Importiere Codes von {appName}", "importGoogleAuthGuide": "Exportiere deine Accounts von Google Authenticator zu einem QR-Code, durch die \"Konten übertragen\" Option. Scanne den QR-Code danach mit einem anderen Gerät.\n\nTipp: Du kannst die Kamera eines Laptops verwenden, um ein Foto den dem QR-Code zu erstellen.", "importSelectJsonFile": "Wähle eine JSON-Datei", "importSelectAppExport": "{appName} Exportdatei auswählen", + "importEnteEncGuide": "Wähle die von Ente exportierte, verschlüsselte JSON-Datei", "importRaivoGuide": "Verwenden Sie die Option \"Export OTPs to Zip archive\" in den Raivo-Einstellungen.\n\nEntpacken Sie die Zip-Datei und importieren Sie die JSON-Datei.", "importBitwardenGuide": "Verwenden Sie die Option \"Tresor exportieren\" innerhalb der Bitwarden Tools und importieren Sie die unverschlüsselte JSON-Datei.", "importAegisGuide": "Verwenden Sie die Option \"Tresor exportieren\" in den Aegis-Einstellungen.\n\nFalls Ihr Tresor verschlüsselt ist, müssen Sie das Passwort für den Tresor eingeben, um ihn zu entschlüsseln.", @@ -121,12 +122,14 @@ "suggestFeatures": "Features vorschlagen", "faq": "FAQ", "faq_q_1": "Wie sicher ist Auth?", + "faq_a_1": "Alle Codes, die du über Auth sicherst, werden Ende-zu-Ende-verschlüsselt gespeichert. Das bedeutet, dass nur du auf deine Codes zugreifen kannst. Unsere Anwendungen sind quelloffen und unsere Kryptografie wurde extern geprüft.", "faq_q_2": "Kann ich auf meine Codes auf dem Desktop zugreifen?", "faq_a_2": "Sie können auf Ihre Codes im Web via auth.ente.io zugreifen.", "faq_q_3": "Wie kann ich Codes löschen?", "faq_a_3": "Sie können einen Code löschen, indem Sie auf dem Code nach links wischen.", "faq_q_4": "Wie kann ich das Projekt unterstützen?", "faq_a_4": "Sie können die Entwicklung dieses Projekts unterstützen, indem Sie unsere Fotos-App auf ente.io abonnieren.", + "faq_q_5": "Wie kann ich die FaceID-Sperre in Auth aktivieren", "faq_a_5": "Sie können FaceID unter Einstellungen → Sicherheit → Sperrbildschirm aktivieren.", "somethingWentWrongMessage": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut", "leaveFamily": "Familie verlassen", @@ -196,6 +199,9 @@ "doThisLater": "Auf später verschieben", "saveKey": "Schlüssel speichern", "save": "Speichern", + "send": "Senden", + "saveOrSendDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) oder an andere Apps senden?", + "saveOnlyDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) speichern?", "back": "Zurück", "createAccount": "Account erstellen", "passwordStrength": "Passwortstärke: {passwordStrengthValue}", @@ -343,6 +349,7 @@ "deleteCodeAuthMessage": "Authentifizieren, um Code zu löschen", "showQRAuthMessage": "Authentifizieren, um QR-Code anzuzeigen", "confirmAccountDeleteTitle": "Kontolöschung bestätigen", + "confirmAccountDeleteMessage": "Dieses Konto ist mit anderen Ente-Apps verknüpft, falls du welche verwendest.\n\nDeine hochgeladenen Daten werden in allen Ente-Apps zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.", "androidBiometricHint": "Identität bestätigen", "@androidBiometricHint": { "description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters." diff --git a/auth/lib/l10n/arb/app_en.arb b/auth/lib/l10n/arb/app_en.arb index e16a39c799..c22bac930d 100644 --- a/auth/lib/l10n/arb/app_en.arb +++ b/auth/lib/l10n/arb/app_en.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Issuer", "codeSecretKeyHint": "Secret Key", "codeAccountHint": "Account (you@domain.com)", - "accountKeyType": "Type of key", "sessionExpired": "Session expired", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_es.arb b/auth/lib/l10n/arb/app_es.arb index 41113f0b9a..f0c8971a0f 100644 --- a/auth/lib/l10n/arb/app_es.arb +++ b/auth/lib/l10n/arb/app_es.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emisor", "codeSecretKeyHint": "Llave Secreta", "codeAccountHint": "Cuenta (tu@dominio.com)", - "accountKeyType": "Tipo de llave", "sessionExpired": "La sesión ha expirado", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -113,6 +112,7 @@ "copied": "Copiado", "pleaseTryAgain": "Por favor, inténtalo nuevamente", "existingUser": "Usuario existente", + "newUser": "Nuevo a Ente", "delete": "Borrar", "enterYourPasswordHint": "Ingrese su contraseña", "forgotPassword": "Olvidé mi contraseña", @@ -138,6 +138,8 @@ "enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación", "lostDeviceTitle": "¿Perdió su dispositivo?", "twoFactorAuthTitle": "Autenticación de dos factores", + "passkeyAuthTitle": "Verificación de llave de acceso", + "verifyPasskey": "Verificar llave de acceso", "recoverAccount": "Recuperar cuenta", "enterRecoveryKeyHint": "Introduzca su clave de recuperación", "recover": "Recuperar", @@ -191,6 +193,8 @@ "recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.", "doThisLater": "Hacer esto más tarde", "saveKey": "Guardar Clave", + "save": "Guardar", + "send": "Enviar", "back": "Atrás", "createAccount": "Crear cuenta", "passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}", @@ -397,5 +401,8 @@ "signOutOtherDevices": "Cerrar la sesión de otros dispositivos", "doNotSignOut": "No cerrar la sesión", "hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)", - "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!" + "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!", + "passkey": "Llave de acceso", + "developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?", + "developerSettings": "Ajustes de desarrollador" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_fa.arb b/auth/lib/l10n/arb/app_fa.arb index 0cba193a96..948aa8b223 100644 --- a/auth/lib/l10n/arb/app_fa.arb +++ b/auth/lib/l10n/arb/app_fa.arb @@ -14,7 +14,6 @@ "codeIssuerHint": "صادر کننده", "codeSecretKeyHint": "کلید مخفی", "codeAccountHint": "حساب (you@domain.com)", - "accountKeyType": "نوع کلید", "sessionExpired": "نشست منقضی شده است", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_fi.arb b/auth/lib/l10n/arb/app_fi.arb index 72309b3310..2a04041475 100644 --- a/auth/lib/l10n/arb/app_fi.arb +++ b/auth/lib/l10n/arb/app_fi.arb @@ -12,7 +12,6 @@ "codeIssuerHint": "Myöntäjä", "codeSecretKeyHint": "Salainen avain", "codeAccountHint": "Tili (sinun@jokinosoite.com)", - "accountKeyType": "Avaimen tyyppi", "sessionExpired": "Istunto on vanheutunut", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_fr.arb b/auth/lib/l10n/arb/app_fr.arb index 04a7058c7c..71ddc0b31c 100644 --- a/auth/lib/l10n/arb/app_fr.arb +++ b/auth/lib/l10n/arb/app_fr.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Émetteur", "codeSecretKeyHint": "Clé secrète", "codeAccountHint": "Compte (vous@exemple.com)", - "accountKeyType": "Type de clé", "sessionExpired": "Session expirée", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_he.arb b/auth/lib/l10n/arb/app_he.arb index 3305850979..8f22e1e82c 100644 --- a/auth/lib/l10n/arb/app_he.arb +++ b/auth/lib/l10n/arb/app_he.arb @@ -19,7 +19,6 @@ "codeIssuerHint": "מנפיק", "codeSecretKeyHint": "מפתח סודי", "codeAccountHint": "חשבון(you@domain.com)", - "accountKeyType": "סוג מפתח", "sessionExpired": "זמן החיבור הסתיים", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_it.arb b/auth/lib/l10n/arb/app_it.arb index e35fd11dc0..92543ed821 100644 --- a/auth/lib/l10n/arb/app_it.arb +++ b/auth/lib/l10n/arb/app_it.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emittente", "codeSecretKeyHint": "Codice segreto", "codeAccountHint": "Account (username@dominio.it)", - "accountKeyType": "Tipo di chiave", "sessionExpired": "Sessione scaduta", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ja.arb b/auth/lib/l10n/arb/app_ja.arb index 60d0a51507..8fea34c5e1 100644 --- a/auth/lib/l10n/arb/app_ja.arb +++ b/auth/lib/l10n/arb/app_ja.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "発行者", "codeSecretKeyHint": "秘密鍵", "codeAccountHint": "アカウント (you@domain.com)", - "accountKeyType": "鍵の種類", "sessionExpired": "セッションが失効しました", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ka.arb b/auth/lib/l10n/arb/app_ka.arb index cb7dc82818..93631df2d5 100644 --- a/auth/lib/l10n/arb/app_ka.arb +++ b/auth/lib/l10n/arb/app_ka.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "მომწოდებელი", "codeSecretKeyHint": "გასაღები", "codeAccountHint": "ანგარიში (you@domain.com)", - "accountKeyType": "გასაღების ტიპი", "sessionExpired": "სესიის დრო ამოიწურა", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_nl.arb b/auth/lib/l10n/arb/app_nl.arb index 2e84ae11bb..36280f69dc 100644 --- a/auth/lib/l10n/arb/app_nl.arb +++ b/auth/lib/l10n/arb/app_nl.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Uitgever", "codeSecretKeyHint": "Geheime sleutel", "codeAccountHint": "Account (jij@domein.nl)", - "accountKeyType": "Type sleutel", "sessionExpired": "Sessie verlopen", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_pl.arb b/auth/lib/l10n/arb/app_pl.arb index 8ebc935dc8..3132f66608 100644 --- a/auth/lib/l10n/arb/app_pl.arb +++ b/auth/lib/l10n/arb/app_pl.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Wydawca", "codeSecretKeyHint": "Tajny klucz", "codeAccountHint": "Konto (ty@domena.com)", - "accountKeyType": "Rodzaj klucza", "sessionExpired": "Sesja wygasła", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_pt.arb b/auth/lib/l10n/arb/app_pt.arb index b27a018fba..9b1f5b1b0a 100644 --- a/auth/lib/l10n/arb/app_pt.arb +++ b/auth/lib/l10n/arb/app_pt.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emissor", "codeSecretKeyHint": "Chave secreta", "codeAccountHint": "Conta (voce@dominio.com)", - "accountKeyType": "Tipo de chave", "sessionExpired": "Sessão expirada", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ru.arb b/auth/lib/l10n/arb/app_ru.arb index 7ae37a87b9..ca98611ee1 100644 --- a/auth/lib/l10n/arb/app_ru.arb +++ b/auth/lib/l10n/arb/app_ru.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Эмитент", "codeSecretKeyHint": "Секретный ключ", "codeAccountHint": "Аккаунт (you@domain.com)", - "accountKeyType": "Тип ключа", "sessionExpired": "Сеанс истек", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_sv.arb b/auth/lib/l10n/arb/app_sv.arb index cfb41d7bdc..9761325ce1 100644 --- a/auth/lib/l10n/arb/app_sv.arb +++ b/auth/lib/l10n/arb/app_sv.arb @@ -16,7 +16,6 @@ "codeIssuerHint": "Utfärdare", "codeSecretKeyHint": "Secret Key", "codeAccountHint": "Konto (du@domän.com)", - "accountKeyType": "Typ av nyckel", "sessionExpired": "Sessionen har gått ut", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ti.arb b/auth/lib/l10n/arb/app_ti.arb index 27147ebb6e..b41128f6ea 100644 --- a/auth/lib/l10n/arb/app_ti.arb +++ b/auth/lib/l10n/arb/app_ti.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "ኣዋጂ", "codeSecretKeyHint": "ምስጢራዊ መፍትሕ", "codeAccountHint": "ሕሳብ (you@domain.com)", - "accountKeyType": "ዓይነት መፍትሕ", "sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_tr.arb b/auth/lib/l10n/arb/app_tr.arb index 9b847faf0f..322af5f48c 100644 --- a/auth/lib/l10n/arb/app_tr.arb +++ b/auth/lib/l10n/arb/app_tr.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Yayınlayan", "codeSecretKeyHint": "Gizli Anahtar", "codeAccountHint": "Hesap (ornek@domain.com)", - "accountKeyType": "Anahtar türü", "sessionExpired": "Oturum süresi doldu", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_vi.arb b/auth/lib/l10n/arb/app_vi.arb index e318f9b557..a8cccdbec5 100644 --- a/auth/lib/l10n/arb/app_vi.arb +++ b/auth/lib/l10n/arb/app_vi.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Nhà phát hành", "codeSecretKeyHint": "Khóa bí mật", "codeAccountHint": "Tài khoản (bạn@miền.com)", - "accountKeyType": "Loại khóa", "sessionExpired": "Phiên làm việc đã hết hạn", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_zh.arb b/auth/lib/l10n/arb/app_zh.arb index 077ee26fdf..c50e76c1dd 100644 --- a/auth/lib/l10n/arb/app_zh.arb +++ b/auth/lib/l10n/arb/app_zh.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "发行人", "codeSecretKeyHint": "私钥", "codeAccountHint": "账户 (you@domain.com)", - "accountKeyType": "密钥类型", "sessionExpired": "会话已过期", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/main.dart b/auth/lib/main.dart index 09b85d8b35..d8d22ca4fe 100644 --- a/auth/lib/main.dart +++ b/auth/lib/main.dart @@ -37,6 +37,7 @@ import 'package:window_manager/window_manager.dart'; final _logger = Logger("main"); Future initSystemTray() async { + if (PlatformUtil.isMobile()) return; String path = Platform.isWindows ? 'assets/icons/auth-icon.ico' : 'assets/icons/auth-icon.png'; diff --git a/auth/lib/models/code.dart b/auth/lib/models/code.dart index 7853eb19d0..bd6077326c 100644 --- a/auth/lib/models/code.dart +++ b/auth/lib/models/code.dart @@ -2,6 +2,7 @@ import 'package:ente_auth/utils/totp_util.dart'; class Code { static const defaultDigits = 6; + static const steamDigits = 5; static const defaultPeriod = 30; int? generatedID; @@ -57,36 +58,42 @@ class Code { updatedAlgo, updatedType, updatedCounter, - "otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}&digits=$updatedDigits&issuer=$updateIssuer&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}", + "otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}" + "&digits=$updatedDigits&issuer=$updateIssuer" + "&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}", generatedID: generatedID, ); } static Code fromAccountAndSecret( + Type type, String account, String issuer, String secret, + int digits, ) { return Code( account, issuer, - defaultDigits, + digits, defaultPeriod, secret, Algorithm.sha1, - Type.totp, + type, 0, - "otpauth://totp/$issuer:$account?algorithm=SHA1&digits=6&issuer=$issuer&period=30&secret=$secret", + "otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret", ); } static Code fromRawData(String rawData) { Uri uri = Uri.parse(rawData); + final issuer = _getIssuer(uri); + try { return Code( _getAccount(uri), - _getIssuer(uri), - _getDigits(uri), + issuer, + _getDigits(uri, issuer), _getPeriod(uri), getSanitizedSecret(uri.queryParameters['secret']!), _getAlgorithm(uri), @@ -140,10 +147,13 @@ class Code { } } - static int _getDigits(Uri uri) { + static int _getDigits(Uri uri, String issuer) { try { return int.parse(uri.queryParameters['digits']!); } catch (e) { + if (issuer.toLowerCase() == "steam") { + return steamDigits; + } return defaultDigits; } } @@ -186,6 +196,8 @@ class Code { static Type _getType(Uri uri) { if (uri.host == "totp") { return Type.totp; + } else if (uri.host == "steam") { + return Type.steam; } else if (uri.host == "hotp") { return Type.hotp; } @@ -223,6 +235,9 @@ class Code { enum Type { totp, hotp, + steam; + + bool get isTOTPCompatible => this == totp || this == steam; } enum Algorithm { diff --git a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart index 3937142d6c..57edcc2e1a 100644 --- a/auth/lib/onboarding/view/setup_enter_secret_key_page.dart +++ b/auth/lib/onboarding/view/setup_enter_secret_key_page.dart @@ -61,6 +61,8 @@ class _SetupEnterSecretKeyPageState extends State { }, decoration: InputDecoration( hintText: l10n.codeIssuerHint, + floatingLabelBehavior: FloatingLabelBehavior.auto, + labelText: l10n.codeIssuerHint, ), controller: _issuerController, autofocus: true, @@ -78,6 +80,8 @@ class _SetupEnterSecretKeyPageState extends State { }, decoration: InputDecoration( hintText: l10n.codeSecretKeyHint, + floatingLabelBehavior: FloatingLabelBehavior.auto, + labelText: l10n.codeSecretKeyHint, suffixIcon: IconButton( onPressed: () { setState(() { @@ -105,12 +109,12 @@ class _SetupEnterSecretKeyPageState extends State { }, decoration: InputDecoration( hintText: l10n.codeAccountHint, + floatingLabelBehavior: FloatingLabelBehavior.auto, + labelText: l10n.codeAccountHint, ), controller: _accountController, ), - const SizedBox( - height: 40, - ), + const SizedBox(height: 40), SizedBox( width: 400, child: OutlinedButton( @@ -152,6 +156,7 @@ class _SetupEnterSecretKeyPageState extends State { final account = _accountController.text.trim(); final issuer = _issuerController.text.trim(); final secret = _secretController.text.trim().replaceAll(' ', ''); + final isStreamCode = issuer.toLowerCase() == "steam"; if (widget.code != null && widget.code!.secret != secret) { ButtonResult? result = await showChoiceActionSheet( context, @@ -168,9 +173,11 @@ class _SetupEnterSecretKeyPageState extends State { } final Code newCode = widget.code == null ? Code.fromAccountAndSecret( + isStreamCode ? Type.steam : Type.totp, account, issuer, secret, + isStreamCode ? Code.steamDigits : Code.defaultDigits, ) : widget.code!.copyWith( account: account, diff --git a/auth/lib/ui/code_widget.dart b/auth/lib/ui/code_widget.dart index f97e865ec7..d989edf18f 100644 --- a/auth/lib/ui/code_widget.dart +++ b/auth/lib/ui/code_widget.dart @@ -53,7 +53,7 @@ class _CodeWidgetState extends State { String newCode = _getCurrentOTP(); if (newCode != _currentCode.value) { _currentCode.value = newCode; - if (widget.code.type == Type.totp) { + if (widget.code.type.isTOTPCompatible) { _nextCode.value = _getNextTotp(); } } @@ -78,7 +78,7 @@ class _CodeWidgetState extends State { _shouldShowLargeIcon = PreferenceService.instance.shouldShowLargeIcons(); if (!_isInitialized) { _currentCode.value = _getCurrentOTP(); - if (widget.code.type == Type.totp) { + if (widget.code.type.isTOTPCompatible) { _nextCode.value = _getNextTotp(); } _isInitialized = true; @@ -213,7 +213,7 @@ class _CodeWidgetState extends State { crossAxisAlignment: CrossAxisAlignment.start, mainAxisAlignment: MainAxisAlignment.center, children: [ - if (widget.code.type == Type.totp) + if (widget.code.type.isTOTPCompatible) CodeTimerProgress( period: widget.code.period, ), @@ -263,7 +263,7 @@ class _CodeWidgetState extends State { }, ), ), - widget.code.type == Type.totp + widget.code.type.isTOTPCompatible ? GestureDetector( onTap: () { _copyNextToClipboard(); @@ -481,7 +481,7 @@ class _CodeWidgetState extends State { String _getNextTotp() { try { - assert(widget.code.type == Type.totp); + assert(widget.code.type.isTOTPCompatible); return getNextTotp(widget.code); } catch (e) { return context.l10n.error; diff --git a/auth/lib/ui/settings/data/import/bitwarden_import.dart b/auth/lib/ui/settings/data/import/bitwarden_import.dart index 90e527dde0..7a562d82b5 100644 --- a/auth/lib/ui/settings/data/import/bitwarden_import.dart +++ b/auth/lib/ui/settings/data/import/bitwarden_import.dart @@ -92,9 +92,11 @@ Future _processBitwardenExportFile( var account = item['login']['username']; code = Code.fromAccountAndSecret( + Type.totp, account, issuer, totp, + Code.defaultDigits, ); } diff --git a/auth/lib/utils/totp_util.dart b/auth/lib/utils/totp_util.dart index a494485249..0d6a8bd68f 100644 --- a/auth/lib/utils/totp_util.dart +++ b/auth/lib/utils/totp_util.dart @@ -3,7 +3,7 @@ import 'package:flutter/foundation.dart'; import 'package:otp/otp.dart' as otp; String getOTP(Code code) { - if(code.type == Type.hotp) { + if (code.type == Type.hotp) { return _getHOTPCode(code); } return otp.OTP.generateTOTPCodeString( @@ -60,4 +60,4 @@ String safeDecode(String value) { debugPrint("Failed to decode $e"); return value; } -} \ No newline at end of file +} diff --git a/auth/linux/packaging/rpm/make_config.yaml b/auth/linux/packaging/rpm/make_config.yaml index 5d5f3aab53..e82dd63bfb 100644 --- a/auth/linux/packaging/rpm/make_config.yaml +++ b/auth/linux/packaging/rpm/make_config.yaml @@ -11,7 +11,7 @@ display_name: Auth requires: - libsqlite3x - - webkit2gtk-4.0 + - webkit2gtk4.0 - libsodium - libsecret - libappindicator diff --git a/auth/pubspec.lock b/auth/pubspec.lock index 2d61b77c39..7724160420 100644 --- a/auth/pubspec.lock +++ b/auth/pubspec.lock @@ -293,9 +293,9 @@ packages: dependency: "direct main" description: path: "packages/desktop_webview_window" - ref: HEAD - resolved-ref: "8cbbf9cd6efcfee5e0f420a36f7f8e7e64b667a1" - url: "https://github.com/MixinNetwork/flutter-plugins" + ref: fix-webkit-version + resolved-ref: fe2223e4edfecdbb3a97bb9e3ced73db4ae9d979 + url: "https://github.com/ente-io/flutter-desktopwebview-fork" source: git version: "0.2.4" device_info_plus: diff --git a/auth/pubspec.yaml b/auth/pubspec.yaml index 2ef543aa69..b7a35b6996 100644 --- a/auth/pubspec.yaml +++ b/auth/pubspec.yaml @@ -1,6 +1,6 @@ name: ente_auth description: ente two-factor authenticator -version: 2.0.55+255 +version: 2.0.57+257 publish_to: none environment: @@ -20,7 +20,8 @@ dependencies: convert: ^3.1.1 desktop_webview_window: git: - url: https://github.com/MixinNetwork/flutter-plugins + url: https://github.com/ente-io/flutter-desktopwebview-fork + ref: fix-webkit-version path: packages/desktop_webview_window device_info_plus: ^9.1.1 dio: ^5.4.0 diff --git a/cli/README.md b/cli/README.md index 8fc9aa6948..40858da0f8 100644 --- a/cli/README.md +++ b/cli/README.md @@ -36,7 +36,8 @@ ente --help ### Accounts -If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool. +If you wish, you can add multiple accounts (your own and that of your family +members) and export all data using this tool. #### Add an account @@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem ente account add ``` +> [!NOTE] +> +> `ente account add` does not create new accounts, it just adds pre-existing +> accounts to the list of accounts that the CLI knows about so that you can use +> them for other actions. + #### List accounts ```shell diff --git a/cli/cmd/account.go b/cli/cmd/account.go index a4c78fb10e..4bc48dcf30 100644 --- a/cli/cmd/account.go +++ b/cli/cmd/account.go @@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{ // Subcommand for 'account add' var addAccCmd = &cobra.Command{ Use: "add", - Short: "Add a new account", + Short: "login into existing account", + Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app", Run: func(cmd *cobra.Command, args []string) { recoverWithLog() ctrl.AddAccount(context.Background()) diff --git a/cli/docs/generated/ente.md b/cli/docs/generated/ente.md index b9d3cde176..4f85dd0980 100644 --- a/cli/docs/generated/ente.md +++ b/cli/docs/generated/ente.md @@ -25,4 +25,4 @@ ente [flags] * [ente export](ente_export.md) - Starts the export process * [ente version](ente_version.md) - Prints the current version -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account.md b/cli/docs/generated/ente_account.md index c48a653365..41c37b0547 100644 --- a/cli/docs/generated/ente_account.md +++ b/cli/docs/generated/ente_account.md @@ -11,9 +11,9 @@ Manage account settings ### SEE ALSO * [ente](ente.md) - CLI tool for exporting your photos from ente.io -* [ente account add](ente_account_add.md) - Add a new account +* [ente account add](ente_account_add.md) - login into existing account * [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app * [ente account list](ente_account_list.md) - list configured accounts * [ente account update](ente_account_update.md) - Update an existing account's export directory -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_add.md b/cli/docs/generated/ente_account_add.md index 1904ca3702..1e86ae12f7 100644 --- a/cli/docs/generated/ente_account_add.md +++ b/cli/docs/generated/ente_account_add.md @@ -1,6 +1,10 @@ ## ente account add -Add a new account +login into existing account + +### Synopsis + +Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app ``` ente account add [flags] @@ -16,4 +20,4 @@ ente account add [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_get-token.md b/cli/docs/generated/ente_account_get-token.md index d7ee77255c..3d8814d7d1 100644 --- a/cli/docs/generated/ente_account_get-token.md +++ b/cli/docs/generated/ente_account_get-token.md @@ -18,4 +18,4 @@ ente account get-token [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_list.md b/cli/docs/generated/ente_account_list.md index cfc59bb8d2..a7677eb855 100644 --- a/cli/docs/generated/ente_account_list.md +++ b/cli/docs/generated/ente_account_list.md @@ -16,4 +16,4 @@ ente account list [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_update.md b/cli/docs/generated/ente_account_update.md index acb65412aa..8d9c8d7e54 100644 --- a/cli/docs/generated/ente_account_update.md +++ b/cli/docs/generated/ente_account_update.md @@ -19,4 +19,4 @@ ente account update [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin.md b/cli/docs/generated/ente_admin.md index aafe51b396..5ac72489d6 100644 --- a/cli/docs/generated/ente_admin.md +++ b/cli/docs/generated/ente_admin.md @@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage * [ente admin list-users](ente_admin_list-users.md) - List all users * [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_delete-user.md b/cli/docs/generated/ente_admin_delete-user.md index 56c96841ed..a1d52a73d2 100644 --- a/cli/docs/generated/ente_admin_delete-user.md +++ b/cli/docs/generated/ente_admin_delete-user.md @@ -18,4 +18,4 @@ ente admin delete-user [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_disable-2fa.md b/cli/docs/generated/ente_admin_disable-2fa.md index 333f0912e3..23cd330800 100644 --- a/cli/docs/generated/ente_admin_disable-2fa.md +++ b/cli/docs/generated/ente_admin_disable-2fa.md @@ -18,4 +18,4 @@ ente admin disable-2fa [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_get-user-id.md b/cli/docs/generated/ente_admin_get-user-id.md index 3d26f624ac..47d632abb6 100644 --- a/cli/docs/generated/ente_admin_get-user-id.md +++ b/cli/docs/generated/ente_admin_get-user-id.md @@ -18,4 +18,4 @@ ente admin get-user-id [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_list-users.md b/cli/docs/generated/ente_admin_list-users.md index 8841df57b5..635e8ec3cd 100644 --- a/cli/docs/generated/ente_admin_list-users.md +++ b/cli/docs/generated/ente_admin_list-users.md @@ -17,4 +17,4 @@ ente admin list-users [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_update-subscription.md b/cli/docs/generated/ente_admin_update-subscription.md index cc1fa96234..d0fadcd2ba 100644 --- a/cli/docs/generated/ente_admin_update-subscription.md +++ b/cli/docs/generated/ente_admin_update-subscription.md @@ -23,4 +23,4 @@ ente admin update-subscription [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_auth.md b/cli/docs/generated/ente_auth.md index 5770f36f39..e0e97d84fc 100644 --- a/cli/docs/generated/ente_auth.md +++ b/cli/docs/generated/ente_auth.md @@ -13,4 +13,4 @@ Authenticator commands * [ente](ente.md) - CLI tool for exporting your photos from ente.io * [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_auth_decrypt.md b/cli/docs/generated/ente_auth_decrypt.md index e573db2a33..c9db6ea545 100644 --- a/cli/docs/generated/ente_auth_decrypt.md +++ b/cli/docs/generated/ente_auth_decrypt.md @@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags] * [ente auth](ente_auth.md) - Authenticator commands -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_export.md b/cli/docs/generated/ente_export.md index c5783236cf..d809e06e46 100644 --- a/cli/docs/generated/ente_export.md +++ b/cli/docs/generated/ente_export.md @@ -16,4 +16,4 @@ ente export [flags] * [ente](ente.md) - CLI tool for exporting your photos from ente.io -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_version.md b/cli/docs/generated/ente_version.md index b51055697f..08f384b52f 100644 --- a/cli/docs/generated/ente_version.md +++ b/cli/docs/generated/ente_version.md @@ -16,4 +16,4 @@ ente version [flags] * [ente](ente.md) - CLI tool for exporting your photos from ente.io -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/pkg/account.go b/cli/pkg/account.go index 9363e2f80b..e411ffacd5 100644 --- a/cli/pkg/account.go +++ b/cli/pkg/account.go @@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) { authResponse, flowErr = c.validateTOTP(cxt, authResponse) } if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil { - panic("no encrypted token or keyAttributes") + log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.") } secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey) if decErr != nil { diff --git a/desktop/.eslintrc.js b/desktop/.eslintrc.js index a47eb483fd..44d03ef0c1 100644 --- a/desktop/.eslintrc.js +++ b/desktop/.eslintrc.js @@ -1,26 +1,36 @@ /* eslint-env node */ module.exports = { + root: true, extends: [ "eslint:recommended", "plugin:@typescript-eslint/eslint-recommended", - /* What we really want eventually */ - // "plugin:@typescript-eslint/strict-type-checked", - // "plugin:@typescript-eslint/stylistic-type-checked", + "plugin:@typescript-eslint/strict-type-checked", + "plugin:@typescript-eslint/stylistic-type-checked", ], - /* Temporarily add a global - Enhancement: Remove me */ - globals: { - NodeJS: "readonly", - }, plugins: ["@typescript-eslint"], parser: "@typescript-eslint/parser", parserOptions: { project: true, }, - root: true, ignorePatterns: [".eslintrc.js", "app", "out", "dist"], env: { es2022: true, node: true, }, + rules: { + /* Allow numbers to be used in template literals */ + "@typescript-eslint/restrict-template-expressions": [ + "error", + { + allowNumber: true, + }, + ], + /* Allow void expressions as the entire body of an arrow function */ + "@typescript-eslint/no-confusing-void-expression": [ + "error", + { + ignoreArrowShorthand: true, + }, + ], + }, }; diff --git a/desktop/.github/workflows/build.yml b/desktop/.github/workflows/build.yml deleted file mode 100644 index acd744c056..0000000000 --- a/desktop/.github/workflows/build.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Build/release - -on: - push: - tags: - - v* - -jobs: - release: - runs-on: ${{ matrix.os }} - - strategy: - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - - steps: - - name: Check out Git repository - uses: actions/checkout@v3 - with: - submodules: recursive - - - name: Install Node.js, NPM and Yarn - uses: actions/setup-node@v3 - with: - node-version: 20 - - - name: Prepare for app notarization - if: startsWith(matrix.os, 'macos') - # Import Apple API key for app notarization on macOS - run: | - mkdir -p ~/private_keys/ - echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8 - - - name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181 - if: startsWith(matrix.os, 'ubuntu') - run: sudo apt-get install libarchive-tools - - - name: Ente Electron Builder Action - uses: ente-io/action-electron-builder@v1.0.0 - with: - # GitHub token, automatically provided to the action - # (No need to define this secret in the repo settings) - github_token: ${{ secrets.github_token }} - - # If the commit is tagged with a version (e.g. "v1.0.0"), - # release the app after building - release: ${{ startsWith(github.ref, 'refs/tags/v') }} - - mac_certs: ${{ secrets.mac_certs }} - mac_certs_password: ${{ secrets.mac_certs_password }} - env: - # macOS notarization API key - API_KEY_ID: ${{ secrets.api_key_id }} - API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}} - USE_HARD_LINKS: false diff --git a/desktop/.github/workflows/desktop-draft-release.yml b/desktop/.github/workflows/desktop-draft-release.yml new file mode 100644 index 0000000000..8c0652dfcd --- /dev/null +++ b/desktop/.github/workflows/desktop-draft-release.yml @@ -0,0 +1,70 @@ +name: "Draft release" + +# Build the desktop/draft-release branch and update the existing draft release +# with the resultant artifacts. +# +# This is meant for doing tests that require the app to be signed and packaged. +# Such releases should not be published to end users. +# +# Workflow: +# +# 1. Push your changes to the "desktop/draft-release" branch on +# https://github.com/ente-io/ente. +# +# 2. Create a draft release with tag equal to the version in the `package.json`. +# +# 3. Trigger this workflow. You can trigger it multiple times, each time it'll +# just update the artifacts attached to the same draft. +# +# 4. Once testing is done delete the draft. + +on: + # Trigger manually or `gh workflow run desktop-draft-release.yml`. + workflow_dispatch: + +jobs: + release: + runs-on: macos-latest + + defaults: + run: + working-directory: desktop + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + repository: ente-io/ente + ref: desktop/draft-release + submodules: recursive + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install dependencies + run: yarn install + + - name: Build + uses: ente-io/action-electron-builder@v1.0.0 + with: + package_root: desktop + + # GitHub token, automatically provided to the action + # (No need to define this secret in the repo settings) + github_token: ${{ secrets.GITHUB_TOKEN }} + + # If the commit is tagged with a version (e.g. "v1.0.0"), + # release the app after building. + release: ${{ startsWith(github.ref, 'refs/tags/v') }} + + mac_certs: ${{ secrets.MAC_CERTS }} + mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }} + env: + # macOS notarization credentials key details + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_APP_SPECIFIC_PASSWORD: + ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + USE_HARD_LINKS: false diff --git a/desktop/.github/workflows/desktop-release.yml b/desktop/.github/workflows/desktop-release.yml new file mode 100644 index 0000000000..2fa3823767 --- /dev/null +++ b/desktop/.github/workflows/desktop-release.yml @@ -0,0 +1,83 @@ +name: "Release" + +# This will create a new draft release with public artifacts. +# +# Note that a release will only get created if there is an associated tag +# (GitHub releases need a corresponding tag). +# +# The canonical source for this action is in the repository where we keep the +# source code for the Ente Photos desktop app: https://github.com/ente-io/ente +# +# However, it actually lives and runs in the repository that we use for making +# releases: https://github.com/ente-io/photos-desktop +# +# We need two repositories because Electron updater currently doesn't work well +# with monorepos. For more details, see `docs/release.md`. + +on: + push: + # Run when a tag matching the pattern "v*"" is pushed. + # + # See: [Note: Testing release workflows that are triggered by tags]. + tags: + - "v*" + +jobs: + release: + runs-on: ${{ matrix.os }} + + defaults: + run: + working-directory: desktop + + strategy: + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + # Checkout the tag photosd-v1.x.x from the source code + # repository when we're invoked for tag v1.x.x on the releases + # repository. + repository: ente-io/ente + ref: photosd-${{ github.ref_name }} + submodules: recursive + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install dependencies + run: yarn install + + - name: Install libarchive-tools for pacman build + if: startsWith(matrix.os, 'ubuntu') + # See: + # https://github.com/electron-userland/electron-builder/issues/4181 + run: sudo apt-get install libarchive-tools + + - name: Build + uses: ente-io/action-electron-builder@v1.0.0 + with: + package_root: desktop + + # GitHub token, automatically provided to the action + # (No need to define this secret in the repo settings) + github_token: ${{ secrets.GITHUB_TOKEN }} + + # If the commit is tagged with a version (e.g. "v1.0.0"), + # release the app after building. + release: ${{ startsWith(github.ref, 'refs/tags/v') }} + + mac_certs: ${{ secrets.MAC_CERTS }} + mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }} + env: + # macOS notarization credentials key details + APPLE_ID: ${{ secrets.APPLE_ID }} + APPLE_APP_SPECIFIC_PASSWORD: + ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }} + APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} + USE_HARD_LINKS: false diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md index 83d2123d86..eb118a424d 100644 --- a/desktop/CHANGELOG.md +++ b/desktop/CHANGELOG.md @@ -1,5 +1,13 @@ # CHANGELOG +## v1.7.0 (Unreleased) + +v1.7 is a major rewrite to improve the security of our app. We have enabled +sandboxing and disabled node integration for the renderer process. All this +required restructuring our IPC mechanisms, which resulted in a lot of under the +hood changes. The outcome is a more secure app that also uses the latest and +greatest Electron recommendations. + ## v1.6.63 ### New diff --git a/desktop/README.md b/desktop/README.md index 05149f5d0c..39b7663fab 100644 --- a/desktop/README.md +++ b/desktop/README.md @@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit ## Building from source -> [!CAUTION] -> -> We're improving the security of the desktop app further by migrating to -> Electron's sandboxing and contextIsolation. These updates are still WIP and -> meanwhile the instructions below might not fully work on the main branch. - Fetch submodules ```sh diff --git a/desktop/docs/dependencies.md b/desktop/docs/dependencies.md index 62f70e8e46..6052357033 100644 --- a/desktop/docs/dependencies.md +++ b/desktop/docs/dependencies.md @@ -13,7 +13,7 @@ Electron embeds Chromium and Node.js in the generated app's binary. The generated app thus consists of two separate processes - the _main_ process, and a _renderer_ process. -- The _main_ process is runs the embedded node. This process can deal with the +- The _main_ process runs the embedded node. This process can deal with the host OS - it is conceptually like a `node` repl running on your machine. In our case, the TypeScript code (in the `src/` directory) gets transpiled by `tsc` into JavaScript in the `build/app/` directory, which gets bundled in @@ -90,16 +90,19 @@ Some extra ones specific to the code here are: Unix commands in our `package.json` scripts. This allows us to use the same commands (like `ln`) across different platforms like Linux and Windows. +- [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base + tsconfig for the Node.js version that our current Electron version uses. + ## Functionality ### Format conversion -The main tool we use is for arbitrary conversions is FFMPEG. To bundle a +The main tool we use is for arbitrary conversions is ffmpeg. To bundle a (platform specific) static binary of ffmpeg with our app, we use [ffmpeg-static](https://github.com/eugeneware/ffmpeg-static). > There is a significant (~20x) speed difference between using the compiled -> FFMPEG binary and using the WASM one (that our renderer process already has). +> ffmpeg binary and using the wasm one (that our renderer process already has). > Which is why we bundle it to speed up operations on the desktop app. In addition, we also bundle a static Linux binary of imagemagick in our extra diff --git a/desktop/docs/release.md b/desktop/docs/release.md index 7254e26fc1..b55c96326d 100644 --- a/desktop/docs/release.md +++ b/desktop/docs/release.md @@ -1,43 +1,47 @@ ## Releases -> [!NOTE] -> -> TODO(MR): This document needs to be audited and changed as we do the first -> release from this new monorepo. +Conceptually, the release is straightforward: We push a tag, a GitHub workflow +gets triggered that creates a draft release with artifacts built from that tag. +We then publish that release. The download links on our website, and existing +apps already know how to check for the latest GitHub release and update +accordingly. -The Github Action that builds the desktop binaries is triggered by pushing a tag -matching the pattern `photos-desktop-v1.2.3`. This value should match the -version in `package.json`. +The complication comes by the fact that Electron Updater (the mechanism that we +use for auto updates) doesn't work well with monorepos. So we need to keep a +separate (non-mono) repository just for doing releases. -So the process for doing a release would be. +- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente). -1. Create a new branch (can be named anything). On this branch, include your - changes. +- Releases are done from + [ente-io/photos-desktop](https://github.com/ente-io/photos-desktop). -2. Mention the changes in `CHANGELOG.md`. +## Workflow -3. Changing the `version` in `package.json` to `1.x.x`. +The workflow is: -4. Commit and push to remote +1. Finalize the changes in the source repo. + + - Update the CHANGELOG. + - Update the version in `package.json` + - `git commit -m "[photosd] Release v1.2.3"` + - Open PR, merge into main. + +2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where + `1.2.3` is the version in `package.json` ```sh - git add package.json && git commit -m 'Release v1.x.x' - git tag v1.x.x - git push && git push --tags + git tag photosd-v1.x.x + git push origin photosd-v1.x.x ``` -This by itself will already trigger a new release. The GitHub action will create -a new draft release that can then be used as descibed below. +3. Head over to the releases repository and run the trigger script, passing it + the tag _without_ the `photosd-` prefix. -To wrap up, we also need to merge back these changes into main. So for that, + ```sh + ./.github/trigger-release.sh v1.x.x + ``` -5. Open a PR for the branch that we're working on (where the above tag was - pushed from) to get it merged into main. - -6. In this PR, also increase the version number for the next release train. That - is, supposed we just released `v4.0.1`. Then we'll change the version number - in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part. - Finally, at the time of the next release, this'll become `v4.0.2`. +## Post build The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts defined in the `build` value in `package.json`. @@ -46,29 +50,11 @@ defined in the `build` value in `package.json`. - Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`) - macOS - A universal DMG -Additionally, the GitHub action notarizes the macOS DMG. For this it needs -credentials provided via GitHub secrets. +Additionally, the GitHub action notarizes and signs the macOS DMG (For this it +uses credentials provided via GitHub secrets). -During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is -defined. If so, it uploads the sourcemaps for the renderer process to Sentry -(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret). - -The sourcemaps for the main (node) process are currently not sent to Sentry -(this works fine in practice since the node process files are not minified, we -only run `tsc`). - -Once the build is done, a draft release with all these artifacts attached is -created. The build is idempotent, so if something goes wrong and we need to -re-run the GitHub action, just delete the draft release (if it got created) and -start a new run by pushing a new tag (if some code changes are required). - -If no code changes are required, say the build failed for some transient network -or sentry issue, we can even be re-run by the build by going to Github Action -age and rerun from there. This will re-trigger for the same tag. - -If everything goes well, we'll have a release on GitHub, and the corresponding -source maps for the renderer process uploaded to Sentry. There isn't anything -else to do: +To rollout the build, we need to publish the draft release. Thereafter, +everything is automated: - The website automatically redirects to the latest release on GitHub when people try to download. @@ -76,7 +62,7 @@ else to do: - The file formats with support auto update (Windows `exe`, the Linux AppImage and the macOS DMG) also check the latest GitHub release automatically to download and apply the update (the rest of the formats don't support auto - updates). + updates yet). - We're not putting the desktop app in other stores currently. It is available as a `brew cask`, but we only had to open a PR to add the initial formula, @@ -87,6 +73,4 @@ else to do: We can also publish the draft releases by checking the "pre-release" option. Such releases don't cause any of the channels (our website, or the desktop app auto updater, or brew) to be notified, instead these are useful for giving links -to pre-release builds to customers. Generally, in the version number for these -we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This -should be done both in `package.json`, and what we tag the commit with. +to pre-release builds to customers. diff --git a/desktop/package.json b/desktop/package.json index 69d54f75be..462857a8bf 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -1,8 +1,9 @@ { "name": "ente", - "version": "1.6.63", + "version": "1.7.0-beta.0", "private": true, "description": "Desktop client for Ente Photos", + "repository": "github:ente-io/photos-desktop", "author": "Ente ", "main": "app/main.js", "scripts": { @@ -15,8 +16,11 @@ "dev-main": "tsc && electron app/main.js", "dev-renderer": "cd ../web && yarn install && yarn dev:photos", "postinstall": "electron-builder install-app-deps", - "lint": "yarn prettier --check . && eslint --ext .ts src", - "lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src" + "lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc", + "lint-fix": "yarn prettier --write --log-level warn . && eslint --fix --ext .ts src && yarn tsc" + }, + "resolutions": { + "jackspeak": "2.1.1" }, "dependencies": { "any-shell-escape": "^0.1", @@ -34,13 +38,14 @@ "onnxruntime-node": "^1.17" }, "devDependencies": { + "@tsconfig/node20": "^20.1.4", "@types/auto-launch": "^5.0", "@types/ffmpeg-static": "^3.0", "@typescript-eslint/eslint-plugin": "^7", "@typescript-eslint/parser": "^7", "concurrently": "^8", - "electron": "^29", - "electron-builder": "^24", + "electron": "^30", + "electron-builder": "25.0.0-alpha.6", "electron-builder-notarize": "^1.5", "eslint": "^8", "prettier": "^3", diff --git a/desktop/src/main.ts b/desktop/src/main.ts index 8526e23632..9cba9178df 100644 --- a/desktop/src/main.ts +++ b/desktop/src/main.ts @@ -8,32 +8,30 @@ * * https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process */ -import { nativeImage } from "electron"; -import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main"; + +import { nativeImage, shell } from "electron/common"; +import type { WebContents } from "electron/main"; +import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main"; import serveNextAt from "next-electron-server"; import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import os from "node:os"; import path from "node:path"; -import { - addAllowOriginHeader, - handleDownloads, - handleExternalLinks, -} from "./main/init"; import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc"; import log, { initLogging } from "./main/log"; import { createApplicationMenu, createTrayContextMenu } from "./main/menu"; import { setupAutoUpdater } from "./main/services/app-update"; -import autoLauncher from "./main/services/autoLauncher"; -import { initWatcher } from "./main/services/chokidar"; +import autoLauncher from "./main/services/auto-launcher"; +import { createWatcher } from "./main/services/watch"; import { userPreferences } from "./main/stores/user-preferences"; +import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch"; import { registerStreamProtocol } from "./main/stream"; -import { isDev } from "./main/util"; +import { isDev } from "./main/utils/electron"; /** * The URL where the renderer HTML is being served from. */ -export const rendererURL = "ente://app"; +const rendererURL = "ente://app"; /** * We want to hide our window instead of closing it when the user presses the @@ -129,54 +127,22 @@ const registerPrivilegedSchemes = () => { { scheme: "stream", privileges: { - // TODO(MR): Remove the commented bits if we don't end up - // needing them by the time the IPC refactoring is done. - - // Prevent the insecure origin issues when fetching this - // secure: true, - // Allow the web fetch API in the renderer to use this scheme. supportFetchAPI: true, - // Allow it to be used with video tags. - // stream: true, }, }, ]); }; -/** - * [Note: Increased disk cache for the desktop app] - * - * Set the "disk-cache-size" command line flag to ask the Chromium process to - * use a larger size for the caches that it keeps on disk. This allows us to use - * the web based caching mechanisms on both the web and the desktop app, just - * ask the embedded Chromium to be a bit more generous in disk usage when - * running as the desktop app. - * - * The size we provide is in bytes. - * https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize - * - * Note that increasing the disk cache size does not guarantee that Chromium - * will respect in verbatim, it uses its own heuristics atop this hint. - * https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693 - * - * See also: [Note: Caching files]. - */ -const increaseDiskCache = () => - app.commandLine.appendSwitch( - "disk-cache-size", - `${5 * 1024 * 1024 * 1024}`, // 5 GB - ); - /** * Create an return the {@link BrowserWindow} that will form our app's UI. * * This window will show the HTML served from {@link rendererURL}. */ -const createMainWindow = async () => { +const createMainWindow = () => { // Create the main window. This'll show our web content. const window = new BrowserWindow({ webPreferences: { - preload: path.join(app.getAppPath(), "preload.js"), + preload: path.join(__dirname, "preload.js"), sandbox: true, }, // The color to show in the window until the web content gets loaded. @@ -186,7 +152,7 @@ const createMainWindow = async () => { show: false, }); - const wasAutoLaunched = await autoLauncher.wasAutoLaunched(); + const wasAutoLaunched = autoLauncher.wasAutoLaunched(); if (wasAutoLaunched) { // Don't automatically show the app's window if we were auto-launched. // On macOS, also hide the dock icon on macOS. @@ -196,19 +162,19 @@ const createMainWindow = async () => { window.maximize(); } - window.loadURL(rendererURL); - // Open the DevTools automatically when running in dev mode if (isDev) window.webContents.openDevTools(); window.webContents.on("render-process-gone", (_, details) => { - log.error(`render-process-gone: ${details}`); + log.error(`render-process-gone: ${details.reason}`); window.webContents.reload(); }); + // "The unresponsive event is fired when Chromium detects that your + // webContents is not responding to input messages for > 30 seconds." window.webContents.on("unresponsive", () => { log.error( - "Main window's webContents are unresponsive, will restart the renderer process", + "MainWindow's webContents are unresponsive, will restart the renderer process", ); window.webContents.forcefullyCrashRenderer(); }); @@ -229,7 +195,7 @@ const createMainWindow = async () => { }); window.on("show", () => { - if (process.platform == "darwin") app.dock.show(); + if (process.platform == "darwin") void app.dock.show(); }); // Let ipcRenderer know when mainWindow is in the foreground so that it can @@ -239,6 +205,58 @@ const createMainWindow = async () => { return window; }; +/** + * Automatically set the save path for user initiated downloads to the system's + * "downloads" directory instead of asking the user to select a save location. + */ +export const setDownloadPath = (webContents: WebContents) => { + webContents.session.on("will-download", (_, item) => { + item.setSavePath( + uniqueSavePath(app.getPath("downloads"), item.getFilename()), + ); + }); +}; + +const uniqueSavePath = (dirPath: string, fileName: string) => { + const { name, ext } = path.parse(fileName); + + let savePath = path.join(dirPath, fileName); + let n = 1; + while (existsSync(savePath)) { + const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join("."); + savePath = path.join(dirPath, suffixedName); + n++; + } + return savePath; +}; + +/** + * Allow opening external links, e.g. when the user clicks on the "Feature + * requests" button in the sidebar (to open our GitHub repository), or when they + * click the "Support" button to send an email to support. + * + * @param webContents The renderer to configure. + */ +export const allowExternalLinks = (webContents: WebContents) => { + // By default, if the user were open a link, say + // https://github.com/ente-io/ente/discussions, then it would open a _new_ + // BrowserWindow within our app. + // + // This is not the behaviour we want; what we want is to ask the system to + // handle the link (e.g. open the URL in the default browser, or if it is a + // mailto: link, then open the user's mail client). + // + // Returning `action` "deny" accomplishes this. + webContents.setWindowOpenHandler(({ url }) => { + if (!url.startsWith(rendererURL)) { + void shell.openExternal(url); + return { action: "deny" }; + } else { + return { action: "allow" }; + } + }); +}; + /** * Add an icon for our app in the system tray. * @@ -269,30 +287,61 @@ const setupTrayItem = (mainWindow: BrowserWindow) => { /** * Older versions of our app used to maintain a cache dir using the main - * process. This has been deprecated in favor of using a normal web cache. + * process. This has been removed in favor of cache on the web layer. * - * See [Note: Increased disk cache for the desktop app] + * Delete the old cache dir if it exists. * - * Delete the old cache dir if it exists. This code was added March 2024, and - * can be removed after some time once most people have upgraded to newer - * versions. + * This will happen in two phases. The cache had three subdirectories: + * + * - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024). + * + * - The third one, "face-crops" will be removed once we finish the face search + * changes. See: [Note: Legacy face crops]. + * + * This migration code can be removed after some time once most people have + * upgraded to newer versions. */ const deleteLegacyDiskCacheDirIfExists = async () => { - // The existing code was passing "cache" as a parameter to getPath. This is - // incorrect if we go by the types - "cache" is not a valid value for the - // parameter to `app.getPath`. + const removeIfExists = async (dirPath: string) => { + if (existsSync(dirPath)) { + log.info(`Removing legacy disk cache from ${dirPath}`); + await fs.rm(dirPath, { recursive: true }); + } + }; + // [Note: Getting the cache path] // - // It might be an issue in the types, since at runtime it seems to work. For - // example, on macOS I get `~/Library/Caches`. + // The existing code was passing "cache" as a parameter to getPath. + // + // However, "cache" is not a valid parameter to getPath. It works! (for + // example, on macOS I get `~/Library/Caches`), but it is intentionally not + // documented as part of the public API: + // + // - docs: remove "cache" from app.getPath + // https://github.com/electron/electron/pull/33509 // // Irrespective, we replicate the original behaviour so that we get back the - // same path that the old got was getting. + // same path that the old code was getting. // - // @ts-expect-error + // @ts-expect-error "cache" works but is not part of the public API. const cacheDir = path.join(app.getPath("cache"), "ente"); if (existsSync(cacheDir)) { - log.info(`Removing legacy disk cache from ${cacheDir}`); - await fs.rm(cacheDir, { recursive: true }); + await removeIfExists(path.join(cacheDir, "thumbs")); + await removeIfExists(path.join(cacheDir, "files")); + } +}; + +/** + * Older versions of our app used to keep a keys.json. It is not needed anymore, + * remove it if it exists. + * + * This code was added March 2024, and can be removed after some time once most + * people have upgraded to newer versions. + */ +const deleteLegacyKeysStoreIfExists = async () => { + const keysStore = path.join(app.getPath("userData"), "keys.json"); + if (existsSync(keysStore)) { + log.info(`Removing legacy keys store at ${keysStore}`); + await fs.rm(keysStore); } }; @@ -310,7 +359,7 @@ const main = () => { // The order of the next two calls is important setupRendererServer(); registerPrivilegedSchemes(); - increaseDiskCache(); + migrateLegacyWatchStoreIfNeeded(); app.on("second-instance", () => { // Someone tried to run a second instance, we should focus our window. @@ -324,25 +373,35 @@ const main = () => { // Emitted once, when Electron has finished initializing. // // Note that some Electron APIs can only be used after this event occurs. - app.on("ready", async () => { - mainWindow = await createMainWindow(); - Menu.setApplicationMenu(await createApplicationMenu(mainWindow)); - setupTrayItem(mainWindow); - attachIPCHandlers(); - attachFSWatchIPCHandlers(initWatcher(mainWindow)); - registerStreamProtocol(); - if (!isDev) setupAutoUpdater(mainWindow); - handleDownloads(mainWindow); - handleExternalLinks(mainWindow); - addAllowOriginHeader(mainWindow); + void app.whenReady().then(() => { + void (async () => { + // Create window and prepare for the renderer. + mainWindow = createMainWindow(); + attachIPCHandlers(); + attachFSWatchIPCHandlers(createWatcher(mainWindow)); + registerStreamProtocol(); - try { - deleteLegacyDiskCacheDirIfExists(); - } catch (e) { - // Log but otherwise ignore errors during non-critical startup - // actions. - log.error("Ignoring startup error", e); - } + // Configure the renderer's environment. + setDownloadPath(mainWindow.webContents); + allowExternalLinks(mainWindow.webContents); + + // Start loading the renderer. + void mainWindow.loadURL(rendererURL); + + // Continue on with the rest of the startup sequence. + Menu.setApplicationMenu(await createApplicationMenu(mainWindow)); + setupTrayItem(mainWindow); + setupAutoUpdater(mainWindow); + + try { + await deleteLegacyDiskCacheDirIfExists(); + await deleteLegacyKeysStoreIfExists(); + } catch (e) { + // Log but otherwise ignore errors during non-critical startup + // actions. + log.error("Ignoring startup error", e); + } + })(); }); // This is a macOS only event. Show our window when the user activates the diff --git a/desktop/src/main/dialogs.ts b/desktop/src/main/dialogs.ts deleted file mode 100644 index 2f91f5c400..0000000000 --- a/desktop/src/main/dialogs.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { dialog } from "electron/main"; -import path from "node:path"; -import type { ElectronFile } from "../types/ipc"; -import { getDirFilePaths, getElectronFile } from "./services/fs"; -import { getElectronFilesFromGoogleZip } from "./services/upload"; - -export const selectDirectory = async () => { - const result = await dialog.showOpenDialog({ - properties: ["openDirectory"], - }); - if (result.filePaths && result.filePaths.length > 0) { - return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep); - } -}; - -export const showUploadFilesDialog = async () => { - const selectedFiles = await dialog.showOpenDialog({ - properties: ["openFile", "multiSelections"], - }); - const filePaths = selectedFiles.filePaths; - return await Promise.all(filePaths.map(getElectronFile)); -}; - -export const showUploadDirsDialog = async () => { - const dir = await dialog.showOpenDialog({ - properties: ["openDirectory", "multiSelections"], - }); - - let filePaths: string[] = []; - for (const dirPath of dir.filePaths) { - filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))]; - } - - return await Promise.all(filePaths.map(getElectronFile)); -}; - -export const showUploadZipDialog = async () => { - const selectedFiles = await dialog.showOpenDialog({ - properties: ["openFile", "multiSelections"], - filters: [{ name: "Zip File", extensions: ["zip"] }], - }); - const filePaths = selectedFiles.filePaths; - - let files: ElectronFile[] = []; - - for (const filePath of filePaths) { - files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))]; - } - - return { - zipPaths: filePaths, - files, - }; -}; diff --git a/desktop/src/main/fs.ts b/desktop/src/main/fs.ts deleted file mode 100644 index 36de710c34..0000000000 --- a/desktop/src/main/fs.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * @file file system related functions exposed over the context bridge. - */ -import { existsSync } from "node:fs"; -import fs from "node:fs/promises"; - -export const fsExists = (path: string) => existsSync(path); - -export const fsRename = (oldPath: string, newPath: string) => - fs.rename(oldPath, newPath); - -export const fsMkdirIfNeeded = (dirPath: string) => - fs.mkdir(dirPath, { recursive: true }); - -export const fsRmdir = (path: string) => fs.rmdir(path); - -export const fsRm = (path: string) => fs.rm(path); - -export const fsReadTextFile = async (filePath: string) => - fs.readFile(filePath, "utf-8"); - -export const fsWriteFile = (path: string, contents: string) => - fs.writeFile(path, contents); - -/* TODO: Audit below this */ - -export const isFolder = async (dirPath: string) => { - if (!existsSync(dirPath)) return false; - const stats = await fs.stat(dirPath); - return stats.isDirectory(); -}; diff --git a/desktop/src/main/init.ts b/desktop/src/main/init.ts deleted file mode 100644 index d3e9b28b4b..0000000000 --- a/desktop/src/main/init.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { BrowserWindow, app, shell } from "electron"; -import { existsSync } from "node:fs"; -import path from "node:path"; -import { rendererURL } from "../main"; - -export function handleDownloads(mainWindow: BrowserWindow) { - mainWindow.webContents.session.on("will-download", (_, item) => { - item.setSavePath( - getUniqueSavePath(item.getFilename(), app.getPath("downloads")), - ); - }); -} - -export function handleExternalLinks(mainWindow: BrowserWindow) { - mainWindow.webContents.setWindowOpenHandler(({ url }) => { - if (!url.startsWith(rendererURL)) { - shell.openExternal(url); - return { action: "deny" }; - } else { - return { action: "allow" }; - } - }); -} - -export function getUniqueSavePath(filename: string, directory: string): string { - let uniqueFileSavePath = path.join(directory, filename); - const { name: filenameWithoutExtension, ext: extension } = - path.parse(filename); - let n = 0; - while (existsSync(uniqueFileSavePath)) { - n++; - // filter need to remove undefined extension from the array - // else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string - const fileNameWithNumberedSuffix = [ - `${filenameWithoutExtension}(${n})`, - extension, - ] - .filter((x) => x) // filters out undefined/null values - .join(""); - uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix); - } - return uniqueFileSavePath; -} - -function lowerCaseHeaders(responseHeaders: Record) { - const headers: Record = {}; - for (const key of Object.keys(responseHeaders)) { - headers[key.toLowerCase()] = responseHeaders[key]; - } - return headers; -} - -export function addAllowOriginHeader(mainWindow: BrowserWindow) { - mainWindow.webContents.session.webRequest.onHeadersReceived( - (details, callback) => { - details.responseHeaders = lowerCaseHeaders(details.responseHeaders); - details.responseHeaders["access-control-allow-origin"] = ["*"]; - callback({ - responseHeaders: details.responseHeaders, - }); - }, - ); -} diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index a5de4514f9..1393f4bfd3 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -10,23 +10,12 @@ import type { FSWatcher } from "chokidar"; import { ipcMain } from "electron/main"; -import type { ElectronFile, FILE_PATH_TYPE, FolderWatch } from "../types/ipc"; -import { - selectDirectory, - showUploadDirsDialog, - showUploadFilesDialog, - showUploadZipDialog, -} from "./dialogs"; -import { - fsExists, - fsMkdirIfNeeded, - fsReadTextFile, - fsRename, - fsRm, - fsRmdir, - fsWriteFile, - isFolder, -} from "./fs"; +import type { + CollectionMapping, + FolderWatch, + PendingUploads, + ZipItem, +} from "../types/ipc"; import { logToDisk } from "./log"; import { appVersion, @@ -34,13 +23,28 @@ import { updateAndRestart, updateOnNextRestart, } from "./services/app-update"; -import { runFFmpegCmd } from "./services/ffmpeg"; -import { getDirFiles } from "./services/fs"; import { - convertToJPEG, - generateImageThumbnail, -} from "./services/imageProcessor"; -import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip"; + legacyFaceCrop, + openDirectory, + openLogDirectory, + selectDirectory, +} from "./services/dir"; +import { ffmpegExec } from "./services/ffmpeg"; +import { + fsExists, + fsIsDir, + fsMkdirIfNeeded, + fsReadTextFile, + fsRename, + fsRm, + fsRmdir, + fsWriteFile, +} from "./services/fs"; +import { convertToJPEG, generateImageThumbnail } from "./services/image"; +import { + clipImageEmbedding, + clipTextEmbeddingIfAvailable, +} from "./services/ml-clip"; import { detectFaces, faceEmbedding } from "./services/ml-face"; import { clearStores, @@ -48,19 +52,23 @@ import { saveEncryptionKey, } from "./services/store"; import { - getElectronFilesFromGoogleZip, - getPendingUploads, - setToUploadCollection, - setToUploadFiles, + clearPendingUploads, + listZipItems, + markUploadedFiles, + markUploadedZipItems, + pathOrZipItemSize, + pendingUploads, + setPendingUploads, } from "./services/upload"; import { - addWatchMapping, - getWatchMappings, - removeWatchMapping, - updateWatchMappingIgnoredFiles, - updateWatchMappingSyncedFiles, + watchAdd, + watchFindFiles, + watchGet, + watchRemove, + watchReset, + watchUpdateIgnoredFiles, + watchUpdateSyncedFiles, } from "./services/watch"; -import { openDirectory, openLogDirectory } from "./util"; /** * Listen for IPC events sent/invoked by the renderer process, and route them to @@ -87,16 +95,20 @@ export const attachIPCHandlers = () => { ipcMain.handle("appVersion", () => appVersion()); - ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath)); + ipcMain.handle("openDirectory", (_, dirPath: string) => + openDirectory(dirPath), + ); ipcMain.handle("openLogDirectory", () => openLogDirectory()); // See [Note: Catching exception during .send/.on] - ipcMain.on("logToDisk", (_, message) => logToDisk(message)); + ipcMain.on("logToDisk", (_, message: string) => logToDisk(message)); + + ipcMain.handle("selectDirectory", () => selectDirectory()); ipcMain.on("clearStores", () => clearStores()); - ipcMain.handle("saveEncryptionKey", (_, encryptionKey) => + ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) => saveEncryptionKey(encryptionKey), ); @@ -106,21 +118,23 @@ export const attachIPCHandlers = () => { ipcMain.on("updateAndRestart", () => updateAndRestart()); - ipcMain.on("updateOnNextRestart", (_, version) => + ipcMain.on("updateOnNextRestart", (_, version: string) => updateOnNextRestart(version), ); - ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version)); + ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version)); // - FS - ipcMain.handle("fsExists", (_, path) => fsExists(path)); + ipcMain.handle("fsExists", (_, path: string) => fsExists(path)); ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) => fsRename(oldPath, newPath), ); - ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath)); + ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) => + fsMkdirIfNeeded(dirPath), + ); ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path)); @@ -132,27 +146,39 @@ export const attachIPCHandlers = () => { fsWriteFile(path, contents), ); + ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath)); + // - Conversion - ipcMain.handle("convertToJPEG", (_, fileData, filename) => - convertToJPEG(fileData, filename), + ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) => + convertToJPEG(imageData), ); ipcMain.handle( "generateImageThumbnail", - (_, inputFile, maxDimension, maxSize) => - generateImageThumbnail(inputFile, maxDimension, maxSize), + ( + _, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + maxDimension: number, + maxSize: number, + ) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize), ); ipcMain.handle( - "runFFmpegCmd", + "ffmpegExec", ( _, - cmd: string[], - inputFile: File | ElectronFile, - outputFileName: string, - dontTimeout?: boolean, - ) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout), + command: string[], + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + outputFileExtension: string, + timeoutMS: number, + ) => + ffmpegExec( + command, + dataOrPathOrZipItem, + outputFileExtension, + timeoutMS, + ), ); // - ML @@ -161,8 +187,8 @@ export const attachIPCHandlers = () => { clipImageEmbedding(jpegImageData), ); - ipcMain.handle("clipTextEmbedding", (_, text: string) => - clipTextEmbedding(text), + ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) => + clipTextEmbeddingIfAvailable(text), ); ipcMain.handle("detectFaces", (_, input: Float32Array) => @@ -173,39 +199,37 @@ export const attachIPCHandlers = () => { faceEmbedding(input), ); - // - File selection - - ipcMain.handle("selectDirectory", () => selectDirectory()); - - ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog()); - - ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog()); - - ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog()); - - // - FS Legacy - - ipcMain.handle("isFolder", (_, dirPath: string) => isFolder(dirPath)); + ipcMain.handle("legacyFaceCrop", (_, faceID: string) => + legacyFaceCrop(faceID), + ); // - Upload - ipcMain.handle("getPendingUploads", () => getPendingUploads()); + ipcMain.handle("listZipItems", (_, zipPath: string) => + listZipItems(zipPath), + ); + + ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) => + pathOrZipItemSize(pathOrZipItem), + ); + + ipcMain.handle("pendingUploads", () => pendingUploads()); + + ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) => + setPendingUploads(pendingUploads), + ); ipcMain.handle( - "setToUploadFiles", - (_, type: FILE_PATH_TYPE, filePaths: string[]) => - setToUploadFiles(type, filePaths), + "markUploadedFiles", + (_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths), ); - ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) => - getElectronFilesFromGoogleZip(filePath), + ipcMain.handle( + "markUploadedZipItems", + (_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items), ); - ipcMain.handle("setToUploadCollection", (_, collectionName: string) => - setToUploadCollection(collectionName), - ); - - ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath)); + ipcMain.handle("clearPendingUploads", () => clearPendingUploads()); }; /** @@ -213,42 +237,38 @@ export const attachIPCHandlers = () => { * watch folder functionality. * * It gets passed a {@link FSWatcher} instance which it can then forward to the - * actual handlers. + * actual handlers if they need access to it to do their thing. */ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => { // - Watch - ipcMain.handle( - "addWatchMapping", - ( - _, - collectionName: string, - folderPath: string, - uploadStrategy: number, - ) => - addWatchMapping( - watcher, - collectionName, - folderPath, - uploadStrategy, - ), - ); - - ipcMain.handle("removeWatchMapping", (_, folderPath: string) => - removeWatchMapping(watcher, folderPath), - ); - - ipcMain.handle("getWatchMappings", () => getWatchMappings()); + ipcMain.handle("watchGet", () => watchGet(watcher)); ipcMain.handle( - "updateWatchMappingSyncedFiles", - (_, folderPath: string, files: FolderWatch["syncedFiles"]) => - updateWatchMappingSyncedFiles(folderPath, files), + "watchAdd", + (_, folderPath: string, collectionMapping: CollectionMapping) => + watchAdd(watcher, folderPath, collectionMapping), + ); + + ipcMain.handle("watchRemove", (_, folderPath: string) => + watchRemove(watcher, folderPath), ); ipcMain.handle( - "updateWatchMappingIgnoredFiles", - (_, folderPath: string, files: FolderWatch["ignoredFiles"]) => - updateWatchMappingIgnoredFiles(folderPath, files), + "watchUpdateSyncedFiles", + (_, syncedFiles: FolderWatch["syncedFiles"], folderPath: string) => + watchUpdateSyncedFiles(syncedFiles, folderPath), ); + + ipcMain.handle( + "watchUpdateIgnoredFiles", + (_, ignoredFiles: FolderWatch["ignoredFiles"], folderPath: string) => + watchUpdateIgnoredFiles(ignoredFiles, folderPath), + ); + + ipcMain.handle("watchFindFiles", (_, folderPath: string) => + watchFindFiles(folderPath), + ); + + ipcMain.handle("watchReset", () => watchReset(watcher)); }; diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts index d43161feaf..cf1404a90a 100644 --- a/desktop/src/main/log.ts +++ b/desktop/src/main/log.ts @@ -1,15 +1,15 @@ import log from "electron-log"; import util from "node:util"; -import { isDev } from "./util"; +import { isDev } from "./utils/electron"; /** * Initialize logging in the main process. * * This will set our underlying logger up to log to a file named `ente.log`, * - * - on Linux at ~/.config/ente/logs/main.log - * - on macOS at ~/Library/Logs/ente/main.log - * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log + * - on Linux at ~/.config/ente/logs/ente.log + * - on macOS at ~/Library/Logs/ente/ente.log + * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log * * On dev builds, it will also log to the console. */ @@ -65,7 +65,7 @@ const logError_ = (message: string) => { if (isDev) console.error(`[error] ${message}`); }; -const logInfo = (...params: any[]) => { +const logInfo = (...params: unknown[]) => { const message = params .map((p) => (typeof p == "string" ? p : util.inspect(p))) .join(" "); @@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => { if (isDev) console.log(`[info] ${message}`); }; -const logDebug = (param: () => any) => { +const logDebug = (param: () => unknown) => { if (isDev) { const p = param(); console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`); diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts index 3441f3f2ab..45cbd63624 100644 --- a/desktop/src/main/menu.ts +++ b/desktop/src/main/menu.ts @@ -7,9 +7,9 @@ import { } from "electron"; import { allowWindowClose } from "../main"; import { forceCheckForAppUpdates } from "./services/app-update"; -import autoLauncher from "./services/autoLauncher"; +import autoLauncher from "./services/auto-launcher"; +import { openLogDirectory } from "./services/dir"; import { userPreferences } from "./stores/user-preferences"; -import { openLogDirectory } from "./util"; /** Create and return the entries in the app's main menu bar */ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { @@ -18,7 +18,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { // Whenever the menu is redrawn the current value of these variables is used // to set the checked state for the various settings checkboxes. let isAutoLaunchEnabled = await autoLauncher.isEnabled(); - let shouldHideDockIcon = userPreferences.get("hideDockIcon"); + let shouldHideDockIcon = !!userPreferences.get("hideDockIcon"); const macOSOnly = (options: MenuItemConstructorOptions[]) => process.platform == "darwin" ? options : []; @@ -26,12 +26,12 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow); const handleViewChangelog = () => - shell.openExternal( + void shell.openExternal( "https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md", ); const toggleAutoLaunch = () => { - autoLauncher.toggleAutoLaunch(); + void autoLauncher.toggleAutoLaunch(); isAutoLaunchEnabled = !isAutoLaunchEnabled; }; @@ -42,13 +42,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { shouldHideDockIcon = !shouldHideDockIcon; }; - const handleHelp = () => shell.openExternal("https://help.ente.io/photos/"); + const handleHelp = () => + void shell.openExternal("https://help.ente.io/photos/"); - const handleSupport = () => shell.openExternal("mailto:support@ente.io"); + const handleSupport = () => + void shell.openExternal("mailto:support@ente.io"); - const handleBlog = () => shell.openExternal("https://ente.io/blog/"); + const handleBlog = () => void shell.openExternal("https://ente.io/blog/"); - const handleViewLogs = openLogDirectory; + const handleViewLogs = () => void openLogDirectory(); return Menu.buildFromTemplate([ { @@ -124,11 +126,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { submenu: [ { role: "startSpeaking", - label: "start speaking", + label: "Start Speaking", }, { role: "stopSpeaking", - label: "stop speaking", + label: "Stop Speaking", }, ], }, diff --git a/desktop/src/main/platform.ts b/desktop/src/main/platform.ts deleted file mode 100644 index 1c3bb4584d..0000000000 --- a/desktop/src/main/platform.ts +++ /dev/null @@ -1,19 +0,0 @@ -export function isPlatform(platform: "mac" | "windows" | "linux") { - return getPlatform() === platform; -} - -export function getPlatform(): "mac" | "windows" | "linux" { - switch (process.platform) { - case "aix": - case "freebsd": - case "linux": - case "openbsd": - case "android": - return "linux"; - case "darwin": - case "sunos": - return "mac"; - case "win32": - return "windows"; - } -} diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts index b474485016..5788b9b27a 100644 --- a/desktop/src/main/services/app-update.ts +++ b/desktop/src/main/services/app-update.ts @@ -1,19 +1,28 @@ import { compareVersions } from "compare-versions"; -import { app, BrowserWindow } from "electron"; import { default as electronLog } from "electron-log"; import { autoUpdater } from "electron-updater"; +import { app, BrowserWindow } from "electron/main"; import { allowWindowClose } from "../../main"; -import { AppUpdateInfo } from "../../types/ipc"; +import { AppUpdate } from "../../types/ipc"; import log from "../log"; import { userPreferences } from "../stores/user-preferences"; +import { isDev } from "../utils/electron"; export const setupAutoUpdater = (mainWindow: BrowserWindow) => { autoUpdater.logger = electronLog; autoUpdater.autoDownload = false; + // Skip checking for updates automatically in dev builds. Installing an + // update would fail anyway since (at least on macOS), the auto update + // process requires signed builds. + // + // Even though this is skipped on app start, we can still use the "Check for + // updates..." menu option to trigger the update if we wish in dev builds. + if (isDev) return; + const oneDay = 1 * 24 * 60 * 60 * 1000; - setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay); - checkForUpdatesAndNotify(mainWindow); + setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay); + void checkForUpdatesAndNotify(mainWindow); }; /** @@ -22,7 +31,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => { export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => { userPreferences.delete("skipAppVersion"); userPreferences.delete("muteUpdateNotificationVersion"); - checkForUpdatesAndNotify(mainWindow); + void checkForUpdatesAndNotify(mainWindow); }; const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { @@ -36,39 +45,42 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { log.debug(() => `Update check found version ${version}`); + if (!version) + throw new Error("Unexpected empty version obtained from auto-updater"); + if (compareVersions(version, app.getVersion()) <= 0) { log.debug(() => "Skipping update, already at latest version"); return; } - if (version === userPreferences.get("skipAppVersion")) { + if (version == userPreferences.get("skipAppVersion")) { log.info(`User chose to skip version ${version}`); return; } const mutedVersion = userPreferences.get("muteUpdateNotificationVersion"); - if (version === mutedVersion) { + if (version == mutedVersion) { log.info(`User has muted update notifications for version ${version}`); return; } - const showUpdateDialog = (updateInfo: AppUpdateInfo) => - mainWindow.webContents.send("appUpdateAvailable", updateInfo); + const showUpdateDialog = (update: AppUpdate) => + mainWindow.webContents.send("appUpdateAvailable", update); log.debug(() => "Attempting auto update"); - autoUpdater.downloadUpdate(); + await autoUpdater.downloadUpdate(); - let timeout: NodeJS.Timeout; + let timeoutId: ReturnType; const fiveMinutes = 5 * 60 * 1000; autoUpdater.on("update-downloaded", () => { - timeout = setTimeout( + timeoutId = setTimeout( () => showUpdateDialog({ autoUpdatable: true, version }), fiveMinutes, ); }); autoUpdater.on("error", (error) => { - clearTimeout(timeout); + clearTimeout(timeoutId); log.error("Auto update failed", error); showUpdateDialog({ autoUpdatable: false, version }); }); diff --git a/desktop/src/main/services/auto-launcher.ts b/desktop/src/main/services/auto-launcher.ts new file mode 100644 index 0000000000..0942a49359 --- /dev/null +++ b/desktop/src/main/services/auto-launcher.ts @@ -0,0 +1,50 @@ +import AutoLaunch from "auto-launch"; +import { app } from "electron/main"; + +class AutoLauncher { + /** + * This property will be set and used on Linux and Windows. On macOS, + * there's a separate API + */ + private autoLaunch?: AutoLaunch; + + constructor() { + if (process.platform != "darwin") { + this.autoLaunch = new AutoLaunch({ + name: "ente", + isHidden: true, + }); + } + } + + async isEnabled() { + const autoLaunch = this.autoLaunch; + if (autoLaunch) { + return await autoLaunch.isEnabled(); + } else { + return app.getLoginItemSettings().openAtLogin; + } + } + + async toggleAutoLaunch() { + const wasEnabled = await this.isEnabled(); + const autoLaunch = this.autoLaunch; + if (autoLaunch) { + if (wasEnabled) await autoLaunch.disable(); + else await autoLaunch.enable(); + } else { + const openAtLogin = !wasEnabled; + app.setLoginItemSettings({ openAtLogin }); + } + } + + wasAutoLaunched() { + if (this.autoLaunch) { + return app.commandLine.hasSwitch("hidden"); + } else { + return app.getLoginItemSettings().openAtLogin; + } + } +} + +export default new AutoLauncher(); diff --git a/desktop/src/main/services/autoLauncher.ts b/desktop/src/main/services/autoLauncher.ts deleted file mode 100644 index 614c151bac..0000000000 --- a/desktop/src/main/services/autoLauncher.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { AutoLauncherClient } from "../../types/main"; -import { isPlatform } from "../platform"; -import linuxAndWinAutoLauncher from "./autoLauncherClients/linuxAndWinAutoLauncher"; -import macAutoLauncher from "./autoLauncherClients/macAutoLauncher"; - -class AutoLauncher { - private client: AutoLauncherClient; - async init() { - if (isPlatform("linux") || isPlatform("windows")) { - this.client = linuxAndWinAutoLauncher; - } else { - this.client = macAutoLauncher; - } - // migrate old auto launch settings for windows from mac auto launcher to linux and windows auto launcher - if (isPlatform("windows") && (await macAutoLauncher.isEnabled())) { - await macAutoLauncher.toggleAutoLaunch(); - await linuxAndWinAutoLauncher.toggleAutoLaunch(); - } - } - async isEnabled() { - if (!this.client) { - await this.init(); - } - return await this.client.isEnabled(); - } - async toggleAutoLaunch() { - if (!this.client) { - await this.init(); - } - await this.client.toggleAutoLaunch(); - } - - async wasAutoLaunched() { - if (!this.client) { - await this.init(); - } - return this.client.wasAutoLaunched(); - } -} - -export default new AutoLauncher(); diff --git a/desktop/src/main/services/autoLauncherClients/linuxAndWinAutoLauncher.ts b/desktop/src/main/services/autoLauncherClients/linuxAndWinAutoLauncher.ts deleted file mode 100644 index 0d2c1bb427..0000000000 --- a/desktop/src/main/services/autoLauncherClients/linuxAndWinAutoLauncher.ts +++ /dev/null @@ -1,39 +0,0 @@ -import AutoLaunch from "auto-launch"; -import { app } from "electron"; -import { AutoLauncherClient } from "../../../types/main"; - -const LAUNCHED_AS_HIDDEN_FLAG = "hidden"; - -class LinuxAndWinAutoLauncher implements AutoLauncherClient { - private instance: AutoLaunch; - constructor() { - const autoLauncher = new AutoLaunch({ - name: "ente", - isHidden: true, - }); - this.instance = autoLauncher; - } - async isEnabled() { - return await this.instance.isEnabled(); - } - async toggleAutoLaunch() { - if (await this.isEnabled()) { - await this.disableAutoLaunch(); - } else { - await this.enableAutoLaunch(); - } - } - - async wasAutoLaunched() { - return app.commandLine.hasSwitch(LAUNCHED_AS_HIDDEN_FLAG); - } - - private async disableAutoLaunch() { - await this.instance.disable(); - } - private async enableAutoLaunch() { - await this.instance.enable(); - } -} - -export default new LinuxAndWinAutoLauncher(); diff --git a/desktop/src/main/services/autoLauncherClients/macAutoLauncher.ts b/desktop/src/main/services/autoLauncherClients/macAutoLauncher.ts deleted file mode 100644 index 00320e870f..0000000000 --- a/desktop/src/main/services/autoLauncherClients/macAutoLauncher.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { app } from "electron"; -import { AutoLauncherClient } from "../../../types/main"; - -class MacAutoLauncher implements AutoLauncherClient { - async isEnabled() { - return app.getLoginItemSettings().openAtLogin; - } - async toggleAutoLaunch() { - if (await this.isEnabled()) { - this.disableAutoLaunch(); - } else { - this.enableAutoLaunch(); - } - } - - async wasAutoLaunched() { - return app.getLoginItemSettings().wasOpenedAtLogin; - } - - private disableAutoLaunch() { - app.setLoginItemSettings({ openAtLogin: false }); - } - private enableAutoLaunch() { - app.setLoginItemSettings({ openAtLogin: true }); - } -} - -export default new MacAutoLauncher(); diff --git a/desktop/src/main/services/chokidar.ts b/desktop/src/main/services/chokidar.ts deleted file mode 100644 index 5d7284d2a2..0000000000 --- a/desktop/src/main/services/chokidar.ts +++ /dev/null @@ -1,45 +0,0 @@ -import chokidar from "chokidar"; -import { BrowserWindow } from "electron"; -import path from "path"; -import log from "../log"; -import { getElectronFile } from "./fs"; -import { getWatchMappings } from "./watch"; - -/** - * Convert a file system {@link filePath} that uses the local system specific - * path separators into a path that uses POSIX file separators. - */ -const normalizeToPOSIX = (filePath: string) => - filePath.split(path.sep).join(path.posix.sep); - -export function initWatcher(mainWindow: BrowserWindow) { - const mappings = getWatchMappings(); - const folderPaths = mappings.map((mapping) => { - return mapping.folderPath; - }); - - const watcher = chokidar.watch(folderPaths, { - awaitWriteFinish: true, - }); - watcher - .on("add", async (path) => { - mainWindow.webContents.send( - "watch-add", - await getElectronFile(normalizeToPOSIX(path)), - ); - }) - .on("unlink", (path) => { - mainWindow.webContents.send("watch-unlink", normalizeToPOSIX(path)); - }) - .on("unlinkDir", (path) => { - mainWindow.webContents.send( - "watch-unlink-dir", - normalizeToPOSIX(path), - ); - }) - .on("error", (error) => { - log.error("Error while watching files", error); - }); - - return watcher; -} diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts new file mode 100644 index 0000000000..293a720f01 --- /dev/null +++ b/desktop/src/main/services/dir.ts @@ -0,0 +1,89 @@ +import { shell } from "electron/common"; +import { app, dialog } from "electron/main"; +import { existsSync } from "fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { posixPath } from "../utils/electron"; + +export const selectDirectory = async () => { + const result = await dialog.showOpenDialog({ + properties: ["openDirectory"], + }); + const dirPath = result.filePaths[0]; + return dirPath ? posixPath(dirPath) : undefined; +}; + +/** + * Open the given {@link dirPath} in the system's folder viewer. + * + * For example, on macOS this'll open {@link dirPath} in Finder. + */ +export const openDirectory = async (dirPath: string) => { + // We need to use `path.normalize` because `shell.openPath; does not support + // POSIX path, it needs to be a platform specific path: + // https://github.com/electron/electron/issues/28831#issuecomment-826370589 + const res = await shell.openPath(path.normalize(dirPath)); + // `shell.openPath` resolves with a string containing the error message + // corresponding to the failure if a failure occurred, otherwise "". + if (res) throw new Error(`Failed to open directory ${dirPath}: res`); +}; + +/** + * Open the app's log directory in the system's folder viewer. + * + * @see {@link openDirectory} + */ +export const openLogDirectory = () => openDirectory(logDirectoryPath()); + +/** + * Return the path where the logs for the app are saved. + * + * [Note: Electron app paths] + * + * There are three paths we need to be aware of usually. + * + * First is the "appData". We can obtain this with `app.getPath("appData")`. + * This is per-user application data directory. This is usually the following: + * + * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local` + * - Linux: `~/.config` + * - macOS: `~/Library/Application Support` + * + * Now, if we suffix the app's name onto the appData directory, we get the + * "userData" directory. This is the **primary** place applications are meant to + * store user's data, e.g. various configuration files and saved state. + * + * During development, our app name is "Electron", so this'd be, for example, + * `~/Library/Application Support/Electron` if we run using `yarn dev`. For the + * packaged production app, our app name is "ente", so this would be: + * + * - Windows: `%APPDATA%\ente`, e.g. `C:\Users\\AppData\Local\ente` + * - Linux: `~/.config/ente` + * - macOS: `~/Library/Application Support/ente` + * + * Note that Chromium also stores the browser state, e.g. localStorage or disk + * caches, in userData. + * + * Finally, there is the "logs" directory. This is not within "appData" but has + * a slightly different OS specific path. Since our log file is named + * "ente.log", it can be found at: + * + * - macOS: ~/Library/Logs/ente/ente.log (production) + * - macOS: ~/Library/Logs/Electron/ente.log (dev) + * + * https://www.electronjs.org/docs/latest/api/app + */ +const logDirectoryPath = () => app.getPath("logs"); + +/** + * See: [Note: Legacy face crops] + */ +export const legacyFaceCrop = async ( + faceID: string, +): Promise => { + // See: [Note: Getting the cache path] + // @ts-expect-error "cache" works but is not part of the public API. + const cacheDir = path.join(app.getPath("cache"), "ente"); + const filePath = path.join(cacheDir, "face-crops", faceID); + return existsSync(filePath) ? await fs.readFile(filePath) : undefined; +}; diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index 2597bae60f..0a5c4eed2c 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,33 +1,37 @@ import pathToFfmpeg from "ffmpeg-static"; -import { existsSync } from "node:fs"; import fs from "node:fs/promises"; -import { ElectronFile } from "../../types/ipc"; +import type { ZipItem } from "../../types/ipc"; import log from "../log"; -import { writeStream } from "../stream"; -import { generateTempFilePath, getTempDirPath } from "../temp"; -import { execAsync } from "../util"; +import { ensure, withTimeout } from "../utils/common"; +import { execAsync } from "../utils/electron"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils/temp"; -const INPUT_PATH_PLACEHOLDER = "INPUT"; -const FFMPEG_PLACEHOLDER = "FFMPEG"; -const OUTPUT_PATH_PLACEHOLDER = "OUTPUT"; +/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */ +const ffmpegPathPlaceholder = "FFMPEG"; +const inputPathPlaceholder = "INPUT"; +const outputPathPlaceholder = "OUTPUT"; /** - * Run a ffmpeg command + * Run a FFmpeg command * - * [Note: FFMPEG in Electron] + * [Note: FFmpeg in Electron] * - * There is a wasm build of FFMPEG, but that is currently 10-20 times slower + * There is a wasm build of FFmpeg, but that is currently 10-20 times slower * that the native build. That is slow enough to be unusable for our purposes. * https://ffmpegwasm.netlify.app/docs/performance * - * So the alternative is to bundle a ffmpeg binary with our app. e.g. + * So the alternative is to bundle a FFmpeg executable binary with our app. e.g. * * yarn add fluent-ffmpeg ffmpeg-static ffprobe-static * * (we only use ffmpeg-static, the rest are mentioned for completeness' sake). * - * Interestingly, Electron already bundles an ffmpeg library (it comes from the - * ffmpeg fork maintained by Chromium). + * Interestingly, Electron already bundles an binary FFmpeg library (it comes + * from the ffmpeg fork maintained by Chromium). * https://chromium.googlesource.com/chromium/third_party/ffmpeg * https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron * @@ -36,126 +40,75 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT"; * $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib * .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64 * - * I'm not sure if our code is supposed to be able to use it, and how. + * But I'm not sure if our code is supposed to be able to use it, and how. */ -export async function runFFmpegCmd( - cmd: string[], - inputFile: File | ElectronFile, - outputFileName: string, - dontTimeout?: boolean, -) { - let inputFilePath = null; - let createdTempInputFile = null; +export const ffmpegExec = async ( + command: string[], + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + outputFileExtension: string, + timeoutMS: number, +): Promise => { + // TODO (MR): This currently copies files for both input (when + // dataOrPathOrZipItem is data) and output. This needs to be tested + // extremely large video files when invoked downstream of `convertToMP4` in + // the web code. + + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); + + const outputFilePath = await makeTempFilePath(outputFileExtension); try { - if (!existsSync(inputFile.path)) { - const tempFilePath = await generateTempFilePath(inputFile.name); - await writeStream(tempFilePath, await inputFile.stream()); - inputFilePath = tempFilePath; - createdTempInputFile = true; - } else { - inputFilePath = inputFile.path; - } - const outputFileData = await runFFmpegCmd_( - cmd, + await writeToTemporaryInputFile(); + + const cmd = substitutePlaceholders( + command, inputFilePath, - outputFileName, - dontTimeout, + outputFilePath, ); - return new File([outputFileData], outputFileName); + + if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000); + else await execAsync(cmd); + + return fs.readFile(outputFilePath); } finally { - if (createdTempInputFile) { - await deleteTempFile(inputFilePath); + try { + if (isInputFileTemporary) await deleteTempFile(inputFilePath); + await deleteTempFile(outputFilePath); + } catch (e) { + log.error("Could not clean up temp files", e); } } -} +}; -export async function runFFmpegCmd_( - cmd: string[], +const substitutePlaceholders = ( + command: string[], inputFilePath: string, - outputFileName: string, - dontTimeout = false, -) { - let tempOutputFilePath: string; - try { - tempOutputFilePath = await generateTempFilePath(outputFileName); - - cmd = cmd.map((cmdPart) => { - if (cmdPart === FFMPEG_PLACEHOLDER) { - return ffmpegBinaryPath(); - } else if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return inputFilePath; - } else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } else { - return cmdPart; - } - }); - - if (dontTimeout) { - await execAsync(cmd); + outputFilePath: string, +) => + command.map((segment) => { + if (segment == ffmpegPathPlaceholder) { + return ffmpegBinaryPath(); + } else if (segment == inputPathPlaceholder) { + return inputFilePath; + } else if (segment == outputPathPlaceholder) { + return outputFilePath; } else { - await promiseWithTimeout(execAsync(cmd), 30 * 1000); + return segment; } - - if (!existsSync(tempOutputFilePath)) { - throw new Error("ffmpeg output file not found"); - } - const outputFile = await fs.readFile(tempOutputFilePath); - return new Uint8Array(outputFile); - } catch (e) { - log.error("FFMPEG command failed", e); - throw e; - } finally { - await deleteTempFile(tempOutputFilePath); - } -} + }); /** * Return the path to the `ffmpeg` binary. * - * At runtime, the ffmpeg binary is present in a path like (macOS example): + * At runtime, the FFmpeg binary is present in a path like (macOS example): * `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg` */ const ffmpegBinaryPath = () => { // This substitution of app.asar by app.asar.unpacked is suggested by the // ffmpeg-static library author themselves: // https://github.com/eugeneware/ffmpeg-static/issues/16 - return pathToFfmpeg.replace("app.asar", "app.asar.unpacked"); -}; - -export async function writeTempFile(fileStream: Uint8Array, fileName: string) { - const tempFilePath = await generateTempFilePath(fileName); - await fs.writeFile(tempFilePath, fileStream); - return tempFilePath; -} - -export async function deleteTempFile(tempFilePath: string) { - const tempDirPath = await getTempDirPath(); - if (!tempFilePath.startsWith(tempDirPath)) - log.error("Attempting to delete a non-temp file ${tempFilePath}"); - await fs.rm(tempFilePath, { force: true }); -} - -const promiseWithTimeout = async ( - request: Promise, - timeout: number, -): Promise => { - const timeoutRef: { - current: NodeJS.Timeout; - } = { current: null }; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutRef.current = setTimeout( - () => reject(new Error("Operation timed out")), - timeout, - ); - }); - const requestWithTimeOutCancellation = async () => { - const resp = await request; - clearTimeout(timeoutRef.current); - return resp; - }; - return await Promise.race([ - requestWithTimeOutCancellation(), - rejectOnTimeout, - ]); + return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked"); }; diff --git a/desktop/src/main/services/fs.ts b/desktop/src/main/services/fs.ts index 7a29d581b9..4570a4a33a 100644 --- a/desktop/src/main/services/fs.ts +++ b/desktop/src/main/services/fs.ts @@ -1,190 +1,30 @@ -import StreamZip from "node-stream-zip"; +/** + * @file file system related functions exposed over the context bridge. + */ + import { existsSync } from "node:fs"; import fs from "node:fs/promises"; -import path from "node:path"; -import { ElectronFile } from "../../types/ipc"; -import log from "../log"; -const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024; +export const fsExists = (path: string) => existsSync(path); -export async function getDirFiles(dirPath: string) { - const files = await getDirFilePaths(dirPath); - const electronFiles = await Promise.all(files.map(getElectronFile)); - return electronFiles; -} +export const fsRename = (oldPath: string, newPath: string) => + fs.rename(oldPath, newPath); -// https://stackoverflow.com/a/63111390 -export const getDirFilePaths = async (dirPath: string) => { - if (!(await fs.stat(dirPath)).isDirectory()) { - return [dirPath]; - } +export const fsMkdirIfNeeded = (dirPath: string) => + fs.mkdir(dirPath, { recursive: true }); - let files: string[] = []; - const filePaths = await fs.readdir(dirPath); +export const fsRmdir = (path: string) => fs.rmdir(path); - for (const filePath of filePaths) { - const absolute = path.join(dirPath, filePath); - files = [...files, ...(await getDirFilePaths(absolute))]; - } +export const fsRm = (path: string) => fs.rm(path); - return files; -}; - -const getFileStream = async (filePath: string) => { - const file = await fs.open(filePath, "r"); - let offset = 0; - const readableStream = new ReadableStream({ - async pull(controller) { - try { - const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE); - const bytesRead = (await file.read( - buff, - 0, - FILE_STREAM_CHUNK_SIZE, - offset, - )) as unknown as number; - offset += bytesRead; - if (bytesRead === 0) { - controller.close(); - await file.close(); - } else { - controller.enqueue(buff.slice(0, bytesRead)); - } - } catch (e) { - await file.close(); - } - }, - async cancel() { - await file.close(); - }, - }); - return readableStream; -}; - -export async function getElectronFile(filePath: string): Promise { - const fileStats = await fs.stat(filePath); - return { - path: filePath.split(path.sep).join(path.posix.sep), - name: path.basename(filePath), - size: fileStats.size, - lastModified: fileStats.mtime.valueOf(), - stream: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - return await getFileStream(filePath); - }, - blob: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - const blob = await fs.readFile(filePath); - return new Blob([new Uint8Array(blob)]); - }, - arrayBuffer: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - const blob = await fs.readFile(filePath); - return new Uint8Array(blob); - }, - }; -} - -export const getValidPaths = (paths: string[]) => { - if (!paths) { - return [] as string[]; - } - return paths.filter(async (path) => { - try { - await fs.stat(path).then((stat) => stat.isFile()); - } catch (e) { - return false; - } - }); -}; - -export const getZipFileStream = async ( - zip: StreamZip.StreamZipAsync, - filePath: string, -) => { - const stream = await zip.stream(filePath); - const done = { - current: false, - }; - const inProgress = { - current: false, - }; - // eslint-disable-next-line no-unused-vars - let resolveObj: (value?: any) => void = null; - // eslint-disable-next-line no-unused-vars - let rejectObj: (reason?: any) => void = null; - stream.on("readable", () => { - try { - if (resolveObj) { - inProgress.current = true; - const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer; - if (chunk) { - resolveObj(new Uint8Array(chunk)); - resolveObj = null; - } - inProgress.current = false; - } - } catch (e) { - rejectObj(e); - } - }); - stream.on("end", () => { - try { - done.current = true; - if (resolveObj && !inProgress.current) { - resolveObj(null); - resolveObj = null; - } - } catch (e) { - rejectObj(e); - } - }); - stream.on("error", (e) => { - try { - done.current = true; - if (rejectObj) { - rejectObj(e); - rejectObj = null; - } - } catch (e) { - rejectObj(e); - } - }); - - const readStreamData = async () => { - return new Promise((resolve, reject) => { - const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer; - - if (chunk || done.current) { - resolve(chunk); - } else { - resolveObj = resolve; - rejectObj = reject; - } - }); - }; - - const readableStream = new ReadableStream({ - async pull(controller) { - try { - const data = await readStreamData(); - - if (data) { - controller.enqueue(data); - } else { - controller.close(); - } - } catch (e) { - log.error("Failed to pull from readableStream", e); - controller.close(); - } - }, - }); - return readableStream; +export const fsReadTextFile = async (filePath: string) => + fs.readFile(filePath, "utf-8"); + +export const fsWriteFile = (path: string, contents: string) => + fs.writeFile(path, contents); + +export const fsIsDir = async (dirPath: string) => { + if (!existsSync(dirPath)) return false; + const stat = await fs.stat(dirPath); + return stat.isDirectory(); }; diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts new file mode 100644 index 0000000000..957fe81200 --- /dev/null +++ b/desktop/src/main/services/image.ts @@ -0,0 +1,159 @@ +/** @file Image format conversions and thumbnail generation */ + +import fs from "node:fs/promises"; +import path from "node:path"; +import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; +import log from "../log"; +import { execAsync, isDev } from "../utils/electron"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils/temp"; + +export const convertToJPEG = async (imageData: Uint8Array) => { + const inputFilePath = await makeTempFilePath(); + const outputFilePath = await makeTempFilePath("jpeg"); + + // Construct the command first, it may throw NotAvailable on win32. + const command = convertToJPEGCommand(inputFilePath, outputFilePath); + + try { + await fs.writeFile(inputFilePath, imageData); + await execAsync(command); + return new Uint8Array(await fs.readFile(outputFilePath)); + } finally { + try { + await deleteTempFile(inputFilePath); + await deleteTempFile(outputFilePath); + } catch (e) { + log.error("Could not clean up temp files", e); + } + } +}; + +const convertToJPEGCommand = ( + inputFilePath: string, + outputFilePath: string, +) => { + switch (process.platform) { + case "darwin": + return [ + "sips", + "-s", + "format", + "jpeg", + inputFilePath, + "--out", + outputFilePath, + ]; + + case "linux": + return [ + imageMagickPath(), + inputFilePath, + "-quality", + "100%", + outputFilePath, + ]; + + default: // "win32" + throw new Error(CustomErrorMessage.NotAvailable); + } +}; + +/** Path to the Linux image-magick executable bundled with our app */ +const imageMagickPath = () => + path.join(isDev ? "build" : process.resourcesPath, "image-magick"); + +export const generateImageThumbnail = async ( + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + maxDimension: number, + maxSize: number, +): Promise => { + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); + + const outputFilePath = await makeTempFilePath("jpeg"); + + // Construct the command first, it may throw `NotAvailable` on win32. + let quality = 70; + let command = generateImageThumbnailCommand( + inputFilePath, + outputFilePath, + maxDimension, + quality, + ); + + try { + await writeToTemporaryInputFile(); + + let thumbnail: Uint8Array; + do { + await execAsync(command); + thumbnail = new Uint8Array(await fs.readFile(outputFilePath)); + quality -= 10; + command = generateImageThumbnailCommand( + inputFilePath, + outputFilePath, + maxDimension, + quality, + ); + } while (thumbnail.length > maxSize && quality > 50); + return thumbnail; + } finally { + try { + if (isInputFileTemporary) await deleteTempFile(inputFilePath); + await deleteTempFile(outputFilePath); + } catch (e) { + log.error("Could not clean up temp files", e); + } + } +}; + +const generateImageThumbnailCommand = ( + inputFilePath: string, + outputFilePath: string, + maxDimension: number, + quality: number, +) => { + switch (process.platform) { + case "darwin": + return [ + "sips", + "-s", + "format", + "jpeg", + "-s", + "formatOptions", + `${quality}`, + "-Z", + `${maxDimension}`, + inputFilePath, + "--out", + outputFilePath, + ]; + + case "linux": + return [ + imageMagickPath(), + inputFilePath, + "-auto-orient", + "-define", + `jpeg:size=${2 * maxDimension}x${2 * maxDimension}`, + "-thumbnail", + `${maxDimension}x${maxDimension}>`, + "-unsharp", + "0x.5", + "-quality", + `${quality}`, + outputFilePath, + ]; + + default: // "win32" + throw new Error(CustomErrorMessage.NotAvailable); + } +}; diff --git a/desktop/src/main/services/imageProcessor.ts b/desktop/src/main/services/imageProcessor.ts deleted file mode 100644 index 696119d80f..0000000000 --- a/desktop/src/main/services/imageProcessor.ts +++ /dev/null @@ -1,294 +0,0 @@ -import { existsSync } from "fs"; -import fs from "node:fs/promises"; -import path from "path"; -import { CustomErrors, ElectronFile } from "../../types/ipc"; -import log from "../log"; -import { isPlatform } from "../platform"; -import { writeStream } from "../stream"; -import { generateTempFilePath } from "../temp"; -import { execAsync, isDev } from "../util"; -import { deleteTempFile } from "./ffmpeg"; - -const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK"; -const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION"; -const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE"; -const INPUT_PATH_PLACEHOLDER = "INPUT"; -const OUTPUT_PATH_PLACEHOLDER = "OUTPUT"; -const QUALITY_PLACEHOLDER = "QUALITY"; - -const MAX_QUALITY = 70; -const MIN_QUALITY = 50; - -const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [ - "sips", - "-s", - "format", - "jpeg", - INPUT_PATH_PLACEHOLDER, - "--out", - OUTPUT_PATH_PLACEHOLDER, -]; - -const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [ - "sips", - "-s", - "format", - "jpeg", - "-s", - "formatOptions", - QUALITY_PLACEHOLDER, - "-Z", - MAX_DIMENSION_PLACEHOLDER, - INPUT_PATH_PLACEHOLDER, - "--out", - OUTPUT_PATH_PLACEHOLDER, -]; - -const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [ - IMAGE_MAGICK_PLACEHOLDER, - INPUT_PATH_PLACEHOLDER, - "-quality", - "100%", - OUTPUT_PATH_PLACEHOLDER, -]; - -const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [ - IMAGE_MAGICK_PLACEHOLDER, - INPUT_PATH_PLACEHOLDER, - "-auto-orient", - "-define", - `jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`, - "-thumbnail", - `${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`, - "-unsharp", - "0x.5", - "-quality", - QUALITY_PLACEHOLDER, - OUTPUT_PATH_PLACEHOLDER, -]; - -function getImageMagickStaticPath() { - return isDev - ? "resources/image-magick" - : path.join(process.resourcesPath, "image-magick"); -} - -export async function convertToJPEG( - fileData: Uint8Array, - filename: string, -): Promise { - if (isPlatform("windows")) { - throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED); - } - const convertedFileData = await convertToJPEG_(fileData, filename); - return convertedFileData; -} - -async function convertToJPEG_( - fileData: Uint8Array, - filename: string, -): Promise { - let tempInputFilePath: string; - let tempOutputFilePath: string; - try { - tempInputFilePath = await generateTempFilePath(filename); - tempOutputFilePath = await generateTempFilePath("output.jpeg"); - - await fs.writeFile(tempInputFilePath, fileData); - - await execAsync( - constructConvertCommand(tempInputFilePath, tempOutputFilePath), - ); - - return new Uint8Array(await fs.readFile(tempOutputFilePath)); - } catch (e) { - log.error("Failed to convert HEIC", e); - throw e; - } finally { - try { - await fs.rm(tempInputFilePath, { force: true }); - } catch (e) { - log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e); - } - try { - await fs.rm(tempOutputFilePath, { force: true }); - } catch (e) { - log.error( - `Failed to remove tempOutputFile ${tempOutputFilePath}`, - e, - ); - } - } -} - -function constructConvertCommand( - tempInputFilePath: string, - tempOutputFilePath: string, -) { - let convertCmd: string[]; - if (isPlatform("mac")) { - convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => { - if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return tempInputFilePath; - } - if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } - return cmdPart; - }); - } else if (isPlatform("linux")) { - convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map( - (cmdPart) => { - if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) { - return getImageMagickStaticPath(); - } - if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return tempInputFilePath; - } - if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } - return cmdPart; - }, - ); - } else { - throw new Error(`Unsupported OS ${process.platform}`); - } - return convertCmd; -} - -export async function generateImageThumbnail( - inputFile: File | ElectronFile, - maxDimension: number, - maxSize: number, -): Promise { - let inputFilePath = null; - let createdTempInputFile = null; - try { - if (isPlatform("windows")) { - throw Error( - CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED, - ); - } - if (!existsSync(inputFile.path)) { - const tempFilePath = await generateTempFilePath(inputFile.name); - await writeStream(tempFilePath, await inputFile.stream()); - inputFilePath = tempFilePath; - createdTempInputFile = true; - } else { - inputFilePath = inputFile.path; - } - const thumbnail = await generateImageThumbnail_( - inputFilePath, - maxDimension, - maxSize, - ); - return thumbnail; - } finally { - if (createdTempInputFile) { - try { - await deleteTempFile(inputFilePath); - } catch (e) { - log.error(`Failed to deleteTempFile ${inputFilePath}`, e); - } - } - } -} - -async function generateImageThumbnail_( - inputFilePath: string, - width: number, - maxSize: number, -): Promise { - let tempOutputFilePath: string; - let quality = MAX_QUALITY; - try { - tempOutputFilePath = await generateTempFilePath("thumb.jpeg"); - let thumbnail: Uint8Array; - do { - await execAsync( - constructThumbnailGenerationCommand( - inputFilePath, - tempOutputFilePath, - width, - quality, - ), - ); - thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath)); - quality -= 10; - } while (thumbnail.length > maxSize && quality > MIN_QUALITY); - return thumbnail; - } catch (e) { - log.error("Failed to generate image thumbnail", e); - throw e; - } finally { - try { - await fs.rm(tempOutputFilePath, { force: true }); - } catch (e) { - log.error( - `Failed to remove tempOutputFile ${tempOutputFilePath}`, - e, - ); - } - } -} - -function constructThumbnailGenerationCommand( - inputFilePath: string, - tempOutputFilePath: string, - maxDimension: number, - quality: number, -) { - let thumbnailGenerationCmd: string[]; - if (isPlatform("mac")) { - thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map( - (cmdPart) => { - if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return inputFilePath; - } - if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } - if (cmdPart === MAX_DIMENSION_PLACEHOLDER) { - return maxDimension.toString(); - } - if (cmdPart === QUALITY_PLACEHOLDER) { - return quality.toString(); - } - return cmdPart; - }, - ); - } else if (isPlatform("linux")) { - thumbnailGenerationCmd = - IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => { - if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) { - return getImageMagickStaticPath(); - } - if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return inputFilePath; - } - if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } - if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) { - return cmdPart.replaceAll( - SAMPLE_SIZE_PLACEHOLDER, - (2 * maxDimension).toString(), - ); - } - if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) { - return cmdPart.replaceAll( - MAX_DIMENSION_PLACEHOLDER, - maxDimension.toString(), - ); - } - if (cmdPart === QUALITY_PLACEHOLDER) { - return quality.toString(); - } - return cmdPart; - }); - } else { - throw new Error(`Unsupported OS ${process.platform}`); - } - return thumbnailGenerationCmd; -} diff --git a/desktop/src/main/services/ml-clip.ts b/desktop/src/main/services/ml-clip.ts index 46af2552bc..e3dd99204a 100644 --- a/desktop/src/main/services/ml-clip.ts +++ b/desktop/src/main/services/ml-clip.ts @@ -5,117 +5,25 @@ * * @see `web/apps/photos/src/services/clip-service.ts` for more details. */ -import { existsSync } from "fs"; import jpeg from "jpeg-js"; import fs from "node:fs/promises"; import * as ort from "onnxruntime-node"; import Tokenizer from "../../thirdparty/clip-bpe-ts/mod"; -import { CustomErrors } from "../../types/ipc"; import log from "../log"; import { writeStream } from "../stream"; -import { generateTempFilePath } from "../temp"; -import { deleteTempFile } from "./ffmpeg"; -import { - createInferenceSession, - downloadModel, - modelPathDownloadingIfNeeded, - modelSavePath, -} from "./ml"; +import { ensure } from "../utils/common"; +import { deleteTempFile, makeTempFilePath } from "../utils/temp"; +import { makeCachedInferenceSession } from "./ml"; -const textModelName = "clip-text-vit-32-uint8.onnx"; -const textModelByteSize = 64173509; // 61.2 MB - -const imageModelName = "clip-image-vit-32-float32.onnx"; -const imageModelByteSize = 351468764; // 335.2 MB - -let activeImageModelDownload: Promise | undefined; - -const imageModelPathDownloadingIfNeeded = async () => { - try { - if (activeImageModelDownload) { - log.info("Waiting for CLIP image model download to finish"); - await activeImageModelDownload; - } else { - activeImageModelDownload = modelPathDownloadingIfNeeded( - imageModelName, - imageModelByteSize, - ); - return await activeImageModelDownload; - } - } finally { - activeImageModelDownload = undefined; - } -}; - -let textModelDownloadInProgress = false; - -/* TODO(MR): use the generic method. Then we can remove the exports for the - internal details functions that we use here */ -const textModelPathDownloadingIfNeeded = async () => { - if (textModelDownloadInProgress) - throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING); - - const modelPath = modelSavePath(textModelName); - if (!existsSync(modelPath)) { - log.info("CLIP text model not found, downloading"); - textModelDownloadInProgress = true; - downloadModel(modelPath, textModelName) - .catch((e) => { - // log but otherwise ignore - log.error("CLIP text model download failed", e); - }) - .finally(() => { - textModelDownloadInProgress = false; - }); - throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING); - } else { - const localFileSize = (await fs.stat(modelPath)).size; - if (localFileSize !== textModelByteSize) { - log.error( - `CLIP text model size ${localFileSize} does not match the expected size, downloading again`, - ); - textModelDownloadInProgress = true; - downloadModel(modelPath, textModelName) - .catch((e) => { - // log but otherwise ignore - log.error("CLIP text model download failed", e); - }) - .finally(() => { - textModelDownloadInProgress = false; - }); - throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING); - } - } - - return modelPath; -}; - -let imageSessionPromise: Promise | undefined; - -const onnxImageSession = async () => { - if (!imageSessionPromise) { - imageSessionPromise = (async () => { - const modelPath = await imageModelPathDownloadingIfNeeded(); - return createInferenceSession(modelPath); - })(); - } - return imageSessionPromise; -}; - -let _textSession: any = null; - -const onnxTextSession = async () => { - if (!_textSession) { - const modelPath = await textModelPathDownloadingIfNeeded(); - _textSession = await createInferenceSession(modelPath); - } - return _textSession; -}; +const cachedCLIPImageSession = makeCachedInferenceSession( + "clip-image-vit-32-float32.onnx", + 351468764 /* 335.2 MB */, +); export const clipImageEmbedding = async (jpegImageData: Uint8Array) => { - const tempFilePath = await generateTempFilePath(""); + const tempFilePath = await makeTempFilePath(); const imageStream = new Response(jpegImageData.buffer).body; - await writeStream(tempFilePath, imageStream); + await writeStream(tempFilePath, ensure(imageStream)); try { return await clipImageEmbedding_(tempFilePath); } finally { @@ -124,42 +32,43 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => { }; const clipImageEmbedding_ = async (jpegFilePath: string) => { - const imageSession = await onnxImageSession(); + const session = await cachedCLIPImageSession(); const t1 = Date.now(); const rgbData = await getRGBData(jpegFilePath); const feeds = { input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]), }; const t2 = Date.now(); - const results = await imageSession.run(feeds); + const results = await session.run(feeds); log.debug( () => `onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); - const imageEmbedding = results["output"].data; // Float32Array + /* Need these model specific casts to type the result */ + const imageEmbedding = ensure(results.output).data as Float32Array; return normalizeEmbedding(imageEmbedding); }; -const getRGBData = async (jpegFilePath: string) => { +const getRGBData = async (jpegFilePath: string): Promise => { const jpegData = await fs.readFile(jpegFilePath); const rawImageData = jpeg.decode(jpegData, { useTArray: true, formatAsRGBA: false, }); - const nx: number = rawImageData.width; - const ny: number = rawImageData.height; - const inputImage: Uint8Array = rawImageData.data; + const nx = rawImageData.width; + const ny = rawImageData.height; + const inputImage = rawImageData.data; - const nx2: number = 224; - const ny2: number = 224; - const totalSize: number = 3 * nx2 * ny2; + const nx2 = 224; + const ny2 = 224; + const totalSize = 3 * nx2 * ny2; - const result: number[] = Array(totalSize).fill(0); - const scale: number = Math.max(nx, ny) / 224; + const result = Array(totalSize).fill(0); + const scale = Math.max(nx, ny) / 224; - const nx3: number = Math.round(nx / scale); - const ny3: number = Math.round(ny / scale); + const nx3 = Math.round(nx / scale); + const ny3 = Math.round(ny / scale); const mean: number[] = [0.48145466, 0.4578275, 0.40821073]; const std: number[] = [0.26862954, 0.26130258, 0.27577711]; @@ -168,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => { for (let x = 0; x < nx3; x++) { for (let c = 0; c < 3; c++) { // Linear interpolation - const sx: number = (x + 0.5) * scale - 0.5; - const sy: number = (y + 0.5) * scale - 0.5; + const sx = (x + 0.5) * scale - 0.5; + const sy = (y + 0.5) * scale - 0.5; - const x0: number = Math.max(0, Math.floor(sx)); - const y0: number = Math.max(0, Math.floor(sy)); + const x0 = Math.max(0, Math.floor(sx)); + const y0 = Math.max(0, Math.floor(sy)); - const x1: number = Math.min(x0 + 1, nx - 1); - const y1: number = Math.min(y0 + 1, ny - 1); + const x1 = Math.min(x0 + 1, nx - 1); + const y1 = Math.min(y0 + 1, ny - 1); - const dx: number = sx - x0; - const dy: number = sy - y0; + const dx = sx - x0; + const dy = sy - y0; - const j00: number = 3 * (y0 * nx + x0) + c; - const j01: number = 3 * (y0 * nx + x1) + c; - const j10: number = 3 * (y1 * nx + x0) + c; - const j11: number = 3 * (y1 * nx + x1) + c; + const j00 = 3 * (y0 * nx + x0) + c; + const j01 = 3 * (y0 * nx + x1) + c; + const j10 = 3 * (y1 * nx + x0) + c; + const j11 = 3 * (y1 * nx + x1) + c; - const v00: number = inputImage[j00]; - const v01: number = inputImage[j01]; - const v10: number = inputImage[j10]; - const v11: number = inputImage[j11]; + const v00 = inputImage[j00] ?? 0; + const v01 = inputImage[j01] ?? 0; + const v10 = inputImage[j10] ?? 0; + const v11 = inputImage[j11] ?? 0; - const v0: number = v00 * (1 - dx) + v01 * dx; - const v1: number = v10 * (1 - dx) + v11 * dx; + const v0 = v00 * (1 - dx) + v01 * dx; + const v1 = v10 * (1 - dx) + v11 * dx; - const v: number = v0 * (1 - dy) + v1 * dy; + const v = v0 * (1 - dy) + v1 * dy; - const v2: number = Math.min(Math.max(Math.round(v), 0), 255); + const v2 = Math.min(Math.max(Math.round(v), 0), 255); // createTensorWithDataList is dumb compared to reshape and // hence has to be given with one channel after another - const i: number = y * nx3 + x + (c % 3) * 224 * 224; + const i = y * nx3 + x + (c % 3) * 224 * 224; - result[i] = (v2 / 255 - mean[c]) / std[c]; + result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1); } } } @@ -211,26 +120,41 @@ const getRGBData = async (jpegFilePath: string) => { const normalizeEmbedding = (embedding: Float32Array) => { let normalization = 0; - for (let index = 0; index < embedding.length; index++) { - normalization += embedding[index] * embedding[index]; - } + for (const v of embedding) normalization += v * v; + const sqrtNormalization = Math.sqrt(normalization); - for (let index = 0; index < embedding.length; index++) { - embedding[index] = embedding[index] / sqrtNormalization; - } + for (let index = 0; index < embedding.length; index++) + embedding[index] = ensure(embedding[index]) / sqrtNormalization; + return embedding; }; -let _tokenizer: Tokenizer = null; +const cachedCLIPTextSession = makeCachedInferenceSession( + "clip-text-vit-32-uint8.onnx", + 64173509 /* 61.2 MB */, +); + +let _tokenizer: Tokenizer | undefined; const getTokenizer = () => { - if (!_tokenizer) { - _tokenizer = new Tokenizer(); - } + if (!_tokenizer) _tokenizer = new Tokenizer(); return _tokenizer; }; -export const clipTextEmbedding = async (text: string) => { - const imageSession = await onnxTextSession(); +export const clipTextEmbeddingIfAvailable = async (text: string) => { + const sessionOrStatus = await Promise.race([ + cachedCLIPTextSession(), + "downloading-model", + ]); + + // Don't wait for the download to complete + if (typeof sessionOrStatus == "string") { + log.info( + "Ignoring CLIP text embedding request because model download is pending", + ); + return undefined; + } + + const session = sessionOrStatus; const t1 = Date.now(); const tokenizer = getTokenizer(); const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text)); @@ -238,11 +162,11 @@ export const clipTextEmbedding = async (text: string) => { input: new ort.Tensor("int32", tokenizedText, [1, 77]), }; const t2 = Date.now(); - const results = await imageSession.run(feeds); + const results = await session.run(feeds); log.debug( () => `onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); - const textEmbedding = results["output"].data; + const textEmbedding = ensure(results.output).data as Float32Array; return normalizeEmbedding(textEmbedding); }; diff --git a/desktop/src/main/services/ml-face.ts b/desktop/src/main/services/ml-face.ts index 1f007c5fd8..9765252555 100644 --- a/desktop/src/main/services/ml-face.ts +++ b/desktop/src/main/services/ml-face.ts @@ -8,87 +8,30 @@ */ import * as ort from "onnxruntime-node"; import log from "../log"; -import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml"; +import { ensure } from "../utils/common"; +import { makeCachedInferenceSession } from "./ml"; -const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx"; -const faceDetectionModelByteSize = 30762872; // 29.3 MB - -const faceEmbeddingModelName = "mobilefacenet_opset15.onnx"; -const faceEmbeddingModelByteSize = 5286998; // 5 MB - -let activeFaceDetectionModelDownload: Promise | undefined; - -const faceDetectionModelPathDownloadingIfNeeded = async () => { - try { - if (activeFaceDetectionModelDownload) { - log.info("Waiting for face detection model download to finish"); - await activeFaceDetectionModelDownload; - } else { - activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded( - faceDetectionModelName, - faceDetectionModelByteSize, - ); - return await activeFaceDetectionModelDownload; - } - } finally { - activeFaceDetectionModelDownload = undefined; - } -}; - -let _faceDetectionSession: Promise | undefined; - -const faceDetectionSession = async () => { - if (!_faceDetectionSession) { - _faceDetectionSession = - faceDetectionModelPathDownloadingIfNeeded().then((modelPath) => - createInferenceSession(modelPath), - ); - } - return _faceDetectionSession; -}; - -let activeFaceEmbeddingModelDownload: Promise | undefined; - -const faceEmbeddingModelPathDownloadingIfNeeded = async () => { - try { - if (activeFaceEmbeddingModelDownload) { - log.info("Waiting for face embedding model download to finish"); - await activeFaceEmbeddingModelDownload; - } else { - activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded( - faceEmbeddingModelName, - faceEmbeddingModelByteSize, - ); - return await activeFaceEmbeddingModelDownload; - } - } finally { - activeFaceEmbeddingModelDownload = undefined; - } -}; - -let _faceEmbeddingSession: Promise | undefined; - -const faceEmbeddingSession = async () => { - if (!_faceEmbeddingSession) { - _faceEmbeddingSession = - faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) => - createInferenceSession(modelPath), - ); - } - return _faceEmbeddingSession; -}; +const cachedFaceDetectionSession = makeCachedInferenceSession( + "yolov5s_face_640_640_dynamic.onnx", + 30762872 /* 29.3 MB */, +); export const detectFaces = async (input: Float32Array) => { - const session = await faceDetectionSession(); + const session = await cachedFaceDetectionSession(); const t = Date.now(); const feeds = { input: new ort.Tensor("float32", input, [1, 3, 640, 640]), }; const results = await session.run(feeds); log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`); - return results["output"].data; + return ensure(results.output).data; }; +const cachedFaceEmbeddingSession = makeCachedInferenceSession( + "mobilefacenet_opset15.onnx", + 5286998 /* 5 MB */, +); + export const faceEmbedding = async (input: Float32Array) => { // Dimension of each face (alias) const mobileFaceNetFaceSize = 112; @@ -98,11 +41,12 @@ export const faceEmbedding = async (input: Float32Array) => { const n = Math.round(input.length / (z * z * 3)); const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]); - const session = await faceEmbeddingSession(); + const session = await cachedFaceEmbeddingSession(); const t = Date.now(); const feeds = { img_inputs: inputTensor }; const results = await session.run(feeds); log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`); - // TODO: What's with this type? It works in practice, but double check. - return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array; + /* Need these model specific casts to extract and type the result */ + return (results.embeddings as unknown as Record) + .cpuData as Float32Array; }; diff --git a/desktop/src/main/services/ml.ts b/desktop/src/main/services/ml.ts index 60e8241e16..6b38bc74dc 100644 --- a/desktop/src/main/services/ml.ts +++ b/desktop/src/main/services/ml.ts @@ -1,5 +1,5 @@ /** - * @file AI/ML related functionality. + * @file AI/ML related functionality, generic layer. * * @see also `ml-clip.ts`, `ml-face.ts`. * @@ -18,6 +18,50 @@ import * as ort from "onnxruntime-node"; import log from "../log"; import { writeStream } from "../stream"; +/** + * Return a function that can be used to trigger a download of the specified + * model, and the creating of an ONNX inference session initialized using it. + * + * Multiple parallel calls to the returned function are fine, it ensures that + * the the model will be downloaded and the session created using it only once. + * All pending calls to it meanwhile will just await on the same promise. + * + * And once the promise is resolved, the create ONNX inference session will be + * cached, so subsequent calls to the returned function will just reuse the same + * session. + * + * {@link makeCachedInferenceSession} can itself be called anytime, it doesn't + * actively trigger a download until the returned function is called. + * + * @param modelName The name of the model to download. + * + * @param modelByteSize The size in bytes that we expect the model to have. If + * the size of the downloaded model does not match the expected size, then we + * will redownload it. + * + * @returns a function. calling that function returns a promise to an ONNX + * session. + */ +export const makeCachedInferenceSession = ( + modelName: string, + modelByteSize: number, +) => { + let session: Promise | undefined; + + const download = () => + modelPathDownloadingIfNeeded(modelName, modelByteSize); + + const createSession = (modelPath: string) => + createInferenceSession(modelPath); + + const cachedInferenceSession = () => { + if (!session) session = download().then(createSession); + return session; + }; + + return cachedInferenceSession; +}; + /** * Download the model named {@link modelName} if we don't already have it. * @@ -26,7 +70,7 @@ import { writeStream } from "../stream"; * * @returns the path to the model on the local machine. */ -export const modelPathDownloadingIfNeeded = async ( +const modelPathDownloadingIfNeeded = async ( modelName: string, expectedByteSize: number, ) => { @@ -49,31 +93,33 @@ export const modelPathDownloadingIfNeeded = async ( }; /** Return the path where the given {@link modelName} is meant to be saved */ -export const modelSavePath = (modelName: string) => +const modelSavePath = (modelName: string) => path.join(app.getPath("userData"), "models", modelName); -export const downloadModel = async (saveLocation: string, name: string) => { +const downloadModel = async (saveLocation: string, name: string) => { // `mkdir -p` the directory where we want to save the model. const saveDir = path.dirname(saveLocation); await fs.mkdir(saveDir, { recursive: true }); - // Download + // Download. log.info(`Downloading ML model from ${name}`); const url = `https://models.ente.io/${name}`; const res = await net.fetch(url); if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`); - // Save - await writeStream(saveLocation, res.body); + const body = res.body; + if (!body) throw new Error(`Received an null response for ${url}`); + // Save. + await writeStream(saveLocation, body); log.info(`Downloaded CLIP model ${name}`); }; /** * Crete an ONNX {@link InferenceSession} with some defaults. */ -export const createInferenceSession = async (modelPath: string) => { +const createInferenceSession = async (modelPath: string) => { return await ort.InferenceSession.create(modelPath, { - // Restrict the number of threads to 1 + // Restrict the number of threads to 1. intraOpNumThreads: 1, - // Be more conservative with RAM usage + // Be more conservative with RAM usage. enableCpuMemArena: false, }); }; diff --git a/desktop/src/main/services/store.ts b/desktop/src/main/services/store.ts index a484080f53..471928d76c 100644 --- a/desktop/src/main/services/store.ts +++ b/desktop/src/main/services/store.ts @@ -1,25 +1,37 @@ import { safeStorage } from "electron/main"; -import { keysStore } from "../stores/keys.store"; -import { safeStorageStore } from "../stores/safeStorage.store"; -import { uploadStatusStore } from "../stores/upload.store"; -import { watchStore } from "../stores/watch.store"; +import { safeStorageStore } from "../stores/safe-storage"; +import { uploadStatusStore } from "../stores/upload-status"; +import { watchStore } from "../stores/watch"; +/** + * Clear all stores except user preferences. + * + * This is useful to reset state when the user logs out. + */ export const clearStores = () => { - uploadStatusStore.clear(); - keysStore.clear(); safeStorageStore.clear(); + uploadStatusStore.clear(); watchStore.clear(); }; -export const saveEncryptionKey = async (encryptionKey: string) => { - const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey); +/** + * [Note: Safe storage keys] + * + * On macOS, `safeStorage` stores our data under a Keychain entry named + * " Safe Storage". Which resolves to: + * + * - Electron Safe Storage (dev) + * - ente Safe Storage (prod) + */ +export const saveEncryptionKey = (encryptionKey: string) => { + const encryptedKey = safeStorage.encryptString(encryptionKey); const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64"); safeStorageStore.set("encryptionKey", b64EncryptedKey); }; -export const encryptionKey = async (): Promise => { +export const encryptionKey = (): string | undefined => { const b64EncryptedKey = safeStorageStore.get("encryptionKey"); if (!b64EncryptedKey) return undefined; const keyBuffer = Buffer.from(b64EncryptedKey, "base64"); - return await safeStorage.decryptString(keyBuffer); + return safeStorage.decryptString(keyBuffer); }; diff --git a/desktop/src/main/services/upload.ts b/desktop/src/main/services/upload.ts index e3fbc16e62..f7d0436c0b 100644 --- a/desktop/src/main/services/upload.ts +++ b/desktop/src/main/services/upload.ts @@ -1,107 +1,149 @@ import StreamZip from "node-stream-zip"; -import path from "path"; -import { ElectronFile, FILE_PATH_TYPE } from "../../types/ipc"; -import { FILE_PATH_KEYS } from "../../types/main"; -import { uploadStatusStore } from "../stores/upload.store"; -import { getElectronFile, getValidPaths, getZipFileStream } from "./fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { existsSync } from "original-fs"; +import type { PendingUploads, ZipItem } from "../../types/ipc"; +import { uploadStatusStore } from "../stores/upload-status"; -export const getPendingUploads = async () => { - const filePaths = getSavedFilePaths(FILE_PATH_TYPE.FILES); - const zipPaths = getSavedFilePaths(FILE_PATH_TYPE.ZIPS); - const collectionName = uploadStatusStore.get("collectionName"); - - let files: ElectronFile[] = []; - let type: FILE_PATH_TYPE; - if (zipPaths.length) { - type = FILE_PATH_TYPE.ZIPS; - for (const zipPath of zipPaths) { - files = [ - ...files, - ...(await getElectronFilesFromGoogleZip(zipPath)), - ]; - } - const pendingFilePaths = new Set(filePaths); - files = files.filter((file) => pendingFilePaths.has(file.path)); - } else if (filePaths.length) { - type = FILE_PATH_TYPE.FILES; - files = await Promise.all(filePaths.map(getElectronFile)); - } - return { - files, - collectionName, - type, - }; -}; - -export const getSavedFilePaths = (type: FILE_PATH_TYPE) => { - const paths = - getValidPaths( - uploadStatusStore.get(FILE_PATH_KEYS[type]) as string[], - ) ?? []; - - setToUploadFiles(type, paths); - return paths; -}; - -export async function getZipEntryAsElectronFile( - zipName: string, - zip: StreamZip.StreamZipAsync, - entry: StreamZip.ZipEntry, -): Promise { - return { - path: path - .join(zipName, entry.name) - .split(path.sep) - .join(path.posix.sep), - name: path.basename(entry.name), - size: entry.size, - lastModified: entry.time, - stream: async () => { - return await getZipFileStream(zip, entry.name); - }, - blob: async () => { - const buffer = await zip.entryData(entry.name); - return new Blob([new Uint8Array(buffer)]); - }, - arrayBuffer: async () => { - const buffer = await zip.entryData(entry.name); - return new Uint8Array(buffer); - }, - }; -} - -export const setToUploadFiles = (type: FILE_PATH_TYPE, filePaths: string[]) => { - const key = FILE_PATH_KEYS[type]; - if (filePaths) { - uploadStatusStore.set(key, filePaths); - } else { - uploadStatusStore.delete(key); - } -}; - -export const setToUploadCollection = (collectionName: string) => { - if (collectionName) { - uploadStatusStore.set("collectionName", collectionName); - } else { - uploadStatusStore.delete("collectionName"); - } -}; - -export const getElectronFilesFromGoogleZip = async (filePath: string) => { - const zip = new StreamZip.async({ - file: filePath, - }); - const zipName = path.basename(filePath, ".zip"); +export const listZipItems = async (zipPath: string): Promise => { + const zip = new StreamZip.async({ file: zipPath }); const entries = await zip.entries(); - const files: ElectronFile[] = []; + const entryNames: string[] = []; for (const entry of Object.values(entries)) { const basename = path.basename(entry.name); - if (entry.isFile && basename.length > 0 && basename[0] !== ".") { - files.push(await getZipEntryAsElectronFile(zipName, zip, entry)); + // Ignore "hidden" files (files whose names begins with a dot). + if (entry.isFile && !basename.startsWith(".")) { + // `entry.name` is the path within the zip. + entryNames.push(entry.name); } } - return files; + await zip.close(); + + return entryNames.map((entryName) => [zipPath, entryName]); }; + +export const pathOrZipItemSize = async ( + pathOrZipItem: string | ZipItem, +): Promise => { + if (typeof pathOrZipItem == "string") { + const stat = await fs.stat(pathOrZipItem); + return stat.size; + } else { + const [zipPath, entryName] = pathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) + throw new Error( + `An entry with name ${entryName} does not exist in the zip file at ${zipPath}`, + ); + const size = entry.size; + await zip.close(); + return size; + } +}; + +export const pendingUploads = async (): Promise => { + const collectionName = uploadStatusStore.get("collectionName") ?? undefined; + + const allFilePaths = uploadStatusStore.get("filePaths") ?? []; + const filePaths = allFilePaths.filter((f) => existsSync(f)); + + const allZipItems = uploadStatusStore.get("zipItems"); + let zipItems: typeof allZipItems; + + // Migration code - May 2024. Remove after a bit. + // + // The older store formats will not have zipItems and instead will have + // zipPaths. If we find such a case, read the zipPaths and enqueue all of + // their files as zipItems in the result. + // + // This potentially can be cause us to try reuploading an already uploaded + // file, but the dedup logic will kick in at that point so no harm will come + // of it. + if (allZipItems === undefined) { + const allZipPaths = uploadStatusStore.get("filePaths") ?? []; + const zipPaths = allZipPaths.filter((f) => existsSync(f)); + zipItems = []; + for (const zip of zipPaths) + zipItems = zipItems.concat(await listZipItems(zip)); + } else { + zipItems = allZipItems.filter(([z]) => existsSync(z)); + } + + if (filePaths.length == 0 && zipItems.length == 0) return undefined; + + return { + collectionName, + filePaths, + zipItems, + }; +}; + +/** + * [Note: Missing values in electron-store] + * + * Suppose we were to create a store like this: + * + * const store = new Store({ + * schema: { + * foo: { type: "string" }, + * bars: { type: "array", items: { type: "string" } }, + * }, + * }); + * + * If we fetch `store.get("foo")` or `store.get("bars")`, we get `undefined`. + * But if we try to set these back to `undefined`, say `store.set("foo", + * someUndefValue)`, we get asked to + * + * TypeError: Use `delete()` to clear values + * + * This happens even if we do bulk object updates, e.g. with a JS object that + * has undefined keys: + * + * > TypeError: Setting a value of type `undefined` for key `collectionName` is + * > not allowed as it's not supported by JSON + * + * So what should the TypeScript type for "foo" be? + * + * If it is were to not include the possibility of `undefined`, then the type + * would lie because `store.get("foo")` can indeed be `undefined. But if we were + * to include the possibility of `undefined`, then trying to `store.set("foo", + * someUndefValue)` will throw. + * + * The approach we take is to rely on false-y values (empty strings and empty + * arrays) to indicate missing values, and then converting those to `undefined` + * when reading from the store, and converting `undefined` to the corresponding + * false-y value when writing. + */ +export const setPendingUploads = ({ + collectionName, + filePaths, + zipItems, +}: PendingUploads) => { + uploadStatusStore.set({ + collectionName: collectionName ?? "", + filePaths: filePaths, + zipItems: zipItems, + }); +}; + +export const markUploadedFiles = (paths: string[]) => { + const existing = uploadStatusStore.get("filePaths") ?? []; + const updated = existing.filter((p) => !paths.includes(p)); + uploadStatusStore.set("filePaths", updated); +}; + +export const markUploadedZipItems = ( + items: [zipPath: string, entryName: string][], +) => { + const existing = uploadStatusStore.get("zipItems") ?? []; + const updated = existing.filter( + (z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]), + ); + uploadStatusStore.set("zipItems", updated); +}; + +export const clearPendingUploads = () => uploadStatusStore.clear(); diff --git a/desktop/src/main/services/watch.ts b/desktop/src/main/services/watch.ts index 1d466d4156..de66dcca1c 100644 --- a/desktop/src/main/services/watch.ts +++ b/desktop/src/main/services/watch.ts @@ -1,101 +1,156 @@ -import type { FSWatcher } from "chokidar"; -import ElectronLog from "electron-log"; -import { FolderWatch, WatchStoreType } from "../../types/ipc"; -import { watchStore } from "../stores/watch.store"; +import chokidar, { type FSWatcher } from "chokidar"; +import { BrowserWindow } from "electron/main"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { FolderWatch, type CollectionMapping } from "../../types/ipc"; +import log from "../log"; +import { watchStore } from "../stores/watch"; +import { posixPath } from "../utils/electron"; +import { fsIsDir } from "./fs"; -export const addWatchMapping = async ( - watcher: FSWatcher, - rootFolderName: string, - folderPath: string, - uploadStrategy: number, -) => { - ElectronLog.log(`Adding watch mapping: ${folderPath}`); - const watchMappings = getWatchMappings(); - if (isMappingPresent(watchMappings, folderPath)) { - throw new Error(`Watch mapping already exists`); +/** + * Create and return a new file system watcher. + * + * Internally this uses the watcher from the chokidar package. + * + * @param mainWindow The window handle is used to notify the renderer process of + * pertinent file system events. + */ +export const createWatcher = (mainWindow: BrowserWindow) => { + const send = (eventName: string) => (path: string) => + mainWindow.webContents.send(eventName, ...eventData(path)); + + const folderPaths = folderWatches().map((watch) => watch.folderPath); + + const watcher = chokidar.watch(folderPaths, { + awaitWriteFinish: true, + }); + + watcher + .on("add", send("watchAddFile")) + .on("unlink", send("watchRemoveFile")) + .on("unlinkDir", send("watchRemoveDir")) + .on("error", (error) => log.error("Error while watching files", error)); + + return watcher; +}; + +const eventData = (platformPath: string): [string, FolderWatch] => { + const path = posixPath(platformPath); + + const watch = folderWatches().find((watch) => + path.startsWith(watch.folderPath + "/"), + ); + + if (!watch) throw new Error(`No folder watch was found for path ${path}`); + + return [path, watch]; +}; + +export const watchGet = async (watcher: FSWatcher): Promise => { + const valid: FolderWatch[] = []; + const deletedPaths: string[] = []; + for (const watch of folderWatches()) { + if (await fsIsDir(watch.folderPath)) valid.push(watch); + else deletedPaths.push(watch.folderPath); } + if (deletedPaths.length) { + await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p))); + setFolderWatches(valid); + } + return valid; +}; - watcher.add(folderPath); +const folderWatches = (): FolderWatch[] => watchStore.get("mappings") ?? []; - watchMappings.push({ - rootFolderName, - uploadStrategy, +const setFolderWatches = (watches: FolderWatch[]) => + watchStore.set("mappings", watches); + +export const watchAdd = async ( + watcher: FSWatcher, + folderPath: string, + collectionMapping: CollectionMapping, +) => { + const watches = folderWatches(); + + if (!(await fsIsDir(folderPath))) + throw new Error( + `Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`, + ); + + if (watches.find((watch) => watch.folderPath == folderPath)) + throw new Error( + `A folder watch with the given folder path ${folderPath} already exists`, + ); + + watches.push({ folderPath, + collectionMapping, syncedFiles: [], ignoredFiles: [], }); - setWatchMappings(watchMappings); + setFolderWatches(watches); + + watcher.add(folderPath); + + return watches; }; -function isMappingPresent(watchMappings: FolderWatch[], folderPath: string) { - const watchMapping = watchMappings?.find( - (mapping) => mapping.folderPath === folderPath, - ); - return !!watchMapping; -} +export const watchRemove = (watcher: FSWatcher, folderPath: string) => { + const watches = folderWatches(); + const filtered = watches.filter((watch) => watch.folderPath != folderPath); + if (watches.length == filtered.length) + throw new Error( + `Attempting to remove a non-existing folder watch for folder path ${folderPath}`, + ); + setFolderWatches(filtered); + watcher.unwatch(folderPath); + return filtered; +}; -export const removeWatchMapping = async ( - watcher: FSWatcher, +export const watchUpdateSyncedFiles = ( + syncedFiles: FolderWatch["syncedFiles"], folderPath: string, ) => { - let watchMappings = getWatchMappings(); - const watchMapping = watchMappings.find( - (mapping) => mapping.folderPath === folderPath, + setFolderWatches( + folderWatches().map((watch) => { + if (watch.folderPath == folderPath) { + watch.syncedFiles = syncedFiles; + } + return watch; + }), ); - - if (!watchMapping) { - throw new Error(`Watch mapping does not exist`); - } - - watcher.unwatch(watchMapping.folderPath); - - watchMappings = watchMappings.filter( - (mapping) => mapping.folderPath !== watchMapping.folderPath, - ); - - setWatchMappings(watchMappings); }; -export function updateWatchMappingSyncedFiles( +export const watchUpdateIgnoredFiles = ( + ignoredFiles: FolderWatch["ignoredFiles"], folderPath: string, - files: FolderWatch["syncedFiles"], -): void { - const watchMappings = getWatchMappings(); - const watchMapping = watchMappings.find( - (mapping) => mapping.folderPath === folderPath, +) => { + setFolderWatches( + folderWatches().map((watch) => { + if (watch.folderPath == folderPath) { + watch.ignoredFiles = ignoredFiles; + } + return watch; + }), ); +}; - if (!watchMapping) { - throw Error(`Watch mapping not found`); +export const watchFindFiles = async (dirPath: string) => { + const items = await fs.readdir(dirPath, { withFileTypes: true }); + let paths: string[] = []; + for (const item of items) { + const itemPath = path.posix.join(dirPath, item.name); + if (item.isFile()) { + paths.push(itemPath); + } else if (item.isDirectory()) { + paths = [...paths, ...(await watchFindFiles(itemPath))]; + } } + return paths; +}; - watchMapping.syncedFiles = files; - setWatchMappings(watchMappings); -} - -export function updateWatchMappingIgnoredFiles( - folderPath: string, - files: FolderWatch["ignoredFiles"], -): void { - const watchMappings = getWatchMappings(); - const watchMapping = watchMappings.find( - (mapping) => mapping.folderPath === folderPath, - ); - - if (!watchMapping) { - throw Error(`Watch mapping not found`); - } - - watchMapping.ignoredFiles = files; - setWatchMappings(watchMappings); -} - -export function getWatchMappings() { - const mappings = watchStore.get("mappings") ?? []; - return mappings; -} - -function setWatchMappings(watchMappings: WatchStoreType["mappings"]) { - watchStore.set("mappings", watchMappings); -} +export const watchReset = (watcher: FSWatcher) => { + watcher.unwatch(folderWatches().map((watch) => watch.folderPath)); +}; diff --git a/desktop/src/main/stores/keys.store.ts b/desktop/src/main/stores/keys.store.ts deleted file mode 100644 index 4f8618cea8..0000000000 --- a/desktop/src/main/stores/keys.store.ts +++ /dev/null @@ -1,18 +0,0 @@ -import Store, { Schema } from "electron-store"; -import type { KeysStoreType } from "../../types/main"; - -const keysStoreSchema: Schema = { - AnonymizeUserID: { - type: "object", - properties: { - id: { - type: "string", - }, - }, - }, -}; - -export const keysStore = new Store({ - name: "keys", - schema: keysStoreSchema, -}); diff --git a/desktop/src/main/stores/safeStorage.store.ts b/desktop/src/main/stores/safe-storage.ts similarity index 63% rename from desktop/src/main/stores/safeStorage.store.ts rename to desktop/src/main/stores/safe-storage.ts index da95df3be0..040af1f3ed 100644 --- a/desktop/src/main/stores/safeStorage.store.ts +++ b/desktop/src/main/stores/safe-storage.ts @@ -1,7 +1,10 @@ import Store, { Schema } from "electron-store"; -import type { SafeStorageStoreType } from "../../types/main"; -const safeStorageSchema: Schema = { +interface SafeStorageStore { + encryptionKey?: string; +} + +const safeStorageSchema: Schema = { encryptionKey: { type: "string", }, diff --git a/desktop/src/main/stores/upload-status.ts b/desktop/src/main/stores/upload-status.ts new file mode 100644 index 0000000000..8cb2410df6 --- /dev/null +++ b/desktop/src/main/stores/upload-status.ts @@ -0,0 +1,54 @@ +import Store, { Schema } from "electron-store"; + +export interface UploadStatusStore { + /** + * The collection to which we're uploading, or the root collection. + * + * Not all pending uploads will have an associated collection. + */ + collectionName?: string; + /** + * Paths to regular files that are pending upload. + */ + filePaths?: string[]; + /** + * Each item is the path to a zip file and the name of an entry within it. + */ + zipItems?: [zipPath: string, entryName: string][]; + /** + * @deprecated Legacy paths to zip files, now subsumed into zipItems. + */ + zipPaths?: string[]; +} + +const uploadStatusSchema: Schema = { + collectionName: { + type: "string", + }, + filePaths: { + type: "array", + items: { + type: "string", + }, + }, + zipItems: { + type: "array", + items: { + type: "array", + items: { + type: "string", + }, + }, + }, + zipPaths: { + type: "array", + items: { + type: "string", + }, + }, +}; + +export const uploadStatusStore = new Store({ + name: "upload-status", + schema: uploadStatusSchema, +}); diff --git a/desktop/src/main/stores/upload.store.ts b/desktop/src/main/stores/upload.store.ts deleted file mode 100644 index 20b1f419d5..0000000000 --- a/desktop/src/main/stores/upload.store.ts +++ /dev/null @@ -1,25 +0,0 @@ -import Store, { Schema } from "electron-store"; -import type { UploadStoreType } from "../../types/main"; - -const uploadStoreSchema: Schema = { - filePaths: { - type: "array", - items: { - type: "string", - }, - }, - zipPaths: { - type: "array", - items: { - type: "string", - }, - }, - collectionName: { - type: "string", - }, -}; - -export const uploadStatusStore = new Store({ - name: "upload-status", - schema: uploadStoreSchema, -}); diff --git a/desktop/src/main/stores/user-preferences.ts b/desktop/src/main/stores/user-preferences.ts index a305f1a99b..f3b1929892 100644 --- a/desktop/src/main/stores/user-preferences.ts +++ b/desktop/src/main/stores/user-preferences.ts @@ -1,12 +1,12 @@ import Store, { Schema } from "electron-store"; -interface UserPreferencesSchema { - hideDockIcon: boolean; +interface UserPreferences { + hideDockIcon?: boolean; skipAppVersion?: string; muteUpdateNotificationVersion?: string; } -const userPreferencesSchema: Schema = { +const userPreferencesSchema: Schema = { hideDockIcon: { type: "boolean", }, diff --git a/desktop/src/main/stores/watch.store.ts b/desktop/src/main/stores/watch.store.ts deleted file mode 100644 index 55470ce868..0000000000 --- a/desktop/src/main/stores/watch.store.ts +++ /dev/null @@ -1,47 +0,0 @@ -import Store, { Schema } from "electron-store"; -import { WatchStoreType } from "../../types/ipc"; - -const watchStoreSchema: Schema = { - mappings: { - type: "array", - items: { - type: "object", - properties: { - rootFolderName: { - type: "string", - }, - uploadStrategy: { - type: "number", - }, - folderPath: { - type: "string", - }, - syncedFiles: { - type: "array", - items: { - type: "object", - properties: { - path: { - type: "string", - }, - id: { - type: "number", - }, - }, - }, - }, - ignoredFiles: { - type: "array", - items: { - type: "string", - }, - }, - }, - }, - }, -}; - -export const watchStore = new Store({ - name: "watch-status", - schema: watchStoreSchema, -}); diff --git a/desktop/src/main/stores/watch.ts b/desktop/src/main/stores/watch.ts new file mode 100644 index 0000000000..59032c9acd --- /dev/null +++ b/desktop/src/main/stores/watch.ts @@ -0,0 +1,77 @@ +import Store, { Schema } from "electron-store"; +import { type FolderWatch } from "../../types/ipc"; +import log from "../log"; + +interface WatchStore { + mappings?: FolderWatchWithLegacyFields[]; +} + +type FolderWatchWithLegacyFields = FolderWatch & { + /** @deprecated Only retained for migration, do not use in other code */ + rootFolderName?: string; + /** @deprecated Only retained for migration, do not use in other code */ + uploadStrategy?: number; +}; + +const watchStoreSchema: Schema = { + mappings: { + type: "array", + items: { + type: "object", + properties: { + rootFolderName: { type: "string" }, + collectionMapping: { type: "string" }, + uploadStrategy: { type: "number" }, + folderPath: { type: "string" }, + syncedFiles: { + type: "array", + items: { + type: "object", + properties: { + path: { type: "string" }, + uploadedFileID: { type: "number" }, + collectionID: { type: "number" }, + }, + }, + }, + ignoredFiles: { + type: "array", + items: { type: "string" }, + }, + }, + }, + }, +}; + +export const watchStore = new Store({ + name: "watch-status", + schema: watchStoreSchema, +}); + +/** + * Previous versions of the store used to store an integer to indicate the + * collection mapping, migrate these to the new schema if we encounter them. + */ +export const migrateLegacyWatchStoreIfNeeded = () => { + let needsUpdate = false; + const updatedWatches = []; + for (const watch of watchStore.get("mappings") ?? []) { + let collectionMapping = watch.collectionMapping; + // The required type defines the latest schema, but before migration + // this'll be undefined, so tell ESLint to calm down. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!collectionMapping) { + collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root"; + needsUpdate = true; + } + if (watch.rootFolderName) { + delete watch.rootFolderName; + needsUpdate = true; + } + updatedWatches.push({ ...watch, collectionMapping }); + } + if (needsUpdate) { + watchStore.set("mappings", updatedWatches); + log.info("Migrated legacy watch store data to new schema"); + } +}; diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index 8ddb80dc6a..bae13aa121 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -1,15 +1,19 @@ /** * @file stream data to-from renderer using a custom protocol handler. */ -import { protocol } from "electron/main"; +import { net, protocol } from "electron/main"; +import StreamZip from "node-stream-zip"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; +import { ReadableStream } from "node:stream/web"; +import { pathToFileURL } from "node:url"; import log from "./log"; +import { ensure } from "./utils/common"; /** * Register a protocol handler that we use for streaming large files between the - * main process (node) and the renderer process (browser) layer. + * main (Node.js) and renderer (Chromium) processes. * * [Note: IPC streams] * @@ -17,11 +21,14 @@ import log from "./log"; * across IPC. And passing the entire contents of the file is not feasible for * large video files because of the memory pressure the copying would entail. * - * As an alternative, we register a custom protocol handler that can provided a + * As an alternative, we register a custom protocol handler that can provides a * bi-directional stream. The renderer can stream data to the node side by * streaming the request. The node side can stream to the renderer side by * streaming the response. * + * The stream is not full duplex - while both reads and writes can be streamed, + * they need to be streamed separately. + * * See also: [Note: Transferring large amount of data over IPC] * * Depends on {@link registerPrivilegedSchemes}. @@ -29,88 +36,148 @@ import log from "./log"; export const registerStreamProtocol = () => { protocol.handle("stream", async (request: Request) => { const url = request.url; - const { host, pathname } = new URL(url); - // Convert e.g. "%20" to spaces. - const path = decodeURIComponent(pathname); + // The request URL contains the command to run as the host, and the + // pathname of the file(s) as the search params. + const { host, searchParams } = new URL(url); switch (host) { - /* stream://write/path/to/file */ - /* host-pathname----- */ + case "read": + return handleRead(ensure(searchParams.get("path"))); + case "read-zip": + return handleReadZip( + ensure(searchParams.get("zipPath")), + ensure(searchParams.get("entryName")), + ); case "write": - try { - await writeStream(path, request.body); - return new Response("", { status: 200 }); - } catch (e) { - log.error(`Failed to write stream for ${url}`, e); - return new Response( - `Failed to write stream: ${e.message}`, - { status: 500 }, - ); - } + return handleWrite(ensure(searchParams.get("path")), request); default: return new Response("", { status: 404 }); } }); }; +const handleRead = async (path: string) => { + try { + const res = await net.fetch(pathToFileURL(path).toString()); + if (res.ok) { + // net.fetch already seems to add "Content-Type" and "Last-Modified" + // headers, but I couldn't find documentation for this. In any case, + // since we already are stat-ting the file for the "Content-Length", + // we explicitly add the "X-Last-Modified-Ms" too, + // + // 1. Guaranteeing its presence, + // + // 2. Having it be in the exact format we want (no string <-> date + // conversions), + // + // 3. Retaining milliseconds. + + const stat = await fs.stat(path); + + // Add the file's size as the Content-Length header. + const fileSize = stat.size; + res.headers.set("Content-Length", `${fileSize}`); + + // Add the file's last modified time (as epoch milliseconds). + const mtimeMs = stat.mtimeMs; + res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + } + return res; + } catch (e) { + log.error(`Failed to read stream at ${path}`, e); + return new Response(`Failed to read stream: ${String(e)}`, { + status: 500, + }); + } +}; + +const handleReadZip = async (zipPath: string, entryName: string) => { + try { + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) return new Response("", { status: 404 }); + + // This returns an "old style" NodeJS.ReadableStream. + const stream = await zip.stream(entry); + // Convert it into a new style NodeJS.Readable. + const nodeReadable = new Readable().wrap(stream); + // Then convert it into a Web stream. + const webReadableStreamAny = Readable.toWeb(nodeReadable); + // However, we get a ReadableStream now. This doesn't go into the + // `BodyInit` expected by the Response constructor, which wants a + // ReadableStream. Force a cast. + const webReadableStream = + webReadableStreamAny as ReadableStream; + + // Close the zip handle when the underlying stream closes. + stream.on("end", () => void zip.close()); + + return new Response(webReadableStream, { + headers: { + // We don't know the exact type, but it doesn't really matter, + // just set it to a generic binary content-type so that the + // browser doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + // While it is documented that entry.time is the modification + // time, the units are not mentioned. By seeing the source code, + // we can verify that it is indeed epoch milliseconds. See + // `parseZipTime` in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + "X-Last-Modified-Ms": `${entry.time}`, + }, + }); + } catch (e) { + log.error( + `Failed to read entry ${entryName} from zip file at ${zipPath}`, + e, + ); + return new Response(`Failed to read stream: ${String(e)}`, { + status: 500, + }); + } +}; + +const handleWrite = async (path: string, request: Request) => { + try { + await writeStream(path, ensure(request.body)); + return new Response("", { status: 200 }); + } catch (e) { + log.error(`Failed to write stream to ${path}`, e); + return new Response(`Failed to write stream: ${String(e)}`, { + status: 500, + }); + } +}; + /** * Write a (web) ReadableStream to a file at the given {@link filePath}. * * The returned promise resolves when the write completes. * * @param filePath The local filesystem path where the file should be written. - * @param readableStream A [web - * ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) + * + * @param readableStream A web + * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream). */ export const writeStream = (filePath: string, readableStream: ReadableStream) => - writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream)); + writeNodeStream(filePath, Readable.fromWeb(readableStream)); -/** - * Convert a Web ReadableStream into a Node.js ReadableStream - * - * This can be used to, for example, write a ReadableStream obtained via - * `net.fetch` into a file using the Node.js `fs` APIs - */ -const convertWebReadableStreamToNode = (readableStream: ReadableStream) => { - const reader = readableStream.getReader(); - const rs = new Readable(); - - rs._read = async () => { - try { - const result = await reader.read(); - - if (!result.done) { - rs.push(Buffer.from(result.value)); - } else { - rs.push(null); - return; - } - } catch (e) { - rs.emit("error", e); - } - }; - - return rs; -}; - -const writeNodeStream = async ( - filePath: string, - fileStream: NodeJS.ReadableStream, -) => { +const writeNodeStream = async (filePath: string, fileStream: Readable) => { const writeable = createWriteStream(filePath); - fileStream.on("error", (error) => { - writeable.destroy(error); // Close the writable stream with an error + fileStream.on("error", (err) => { + writeable.destroy(err); // Close the writable stream with an error }); fileStream.pipe(writeable); await new Promise((resolve, reject) => { writeable.on("finish", resolve); - writeable.on("error", async (e: unknown) => { + writeable.on("error", (err) => { if (existsSync(filePath)) { - await fs.unlink(filePath); + void fs.unlink(filePath); } - reject(e); + reject(err); }); }); }; diff --git a/desktop/src/main/temp.ts b/desktop/src/main/temp.ts deleted file mode 100644 index 489e5cbd47..0000000000 --- a/desktop/src/main/temp.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { app } from "electron/main"; -import { existsSync } from "node:fs"; -import fs from "node:fs/promises"; -import path from "path"; - -const CHARACTERS = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - -export async function getTempDirPath() { - const tempDirPath = path.join(app.getPath("temp"), "ente"); - await fs.mkdir(tempDirPath, { recursive: true }); - return tempDirPath; -} - -function generateTempName(length: number) { - let result = ""; - - const charactersLength = CHARACTERS.length; - for (let i = 0; i < length; i++) { - result += CHARACTERS.charAt( - Math.floor(Math.random() * charactersLength), - ); - } - return result; -} - -export async function generateTempFilePath(formatSuffix: string) { - let tempFilePath: string; - do { - const tempDirPath = await getTempDirPath(); - const namePrefix = generateTempName(10); - tempFilePath = path.join(tempDirPath, namePrefix + "-" + formatSuffix); - } while (existsSync(tempFilePath)); - return tempFilePath; -} diff --git a/desktop/src/main/utils/common.ts b/desktop/src/main/utils/common.ts new file mode 100644 index 0000000000..1f5016e617 --- /dev/null +++ b/desktop/src/main/utils/common.ts @@ -0,0 +1,44 @@ +/** + * @file grab bag of utility functions. + * + * These are verbatim copies of functions from web code since there isn't + * currently a common package that both of them share. + */ + +/** + * Throw an exception if the given value is `null` or `undefined`. + */ +export const ensure = (v: T | null | undefined): T => { + if (v === null) throw new Error("Required value was null"); + if (v === undefined) throw new Error("Required value was not found"); + return v; +}; + +/** + * Wait for {@link ms} milliseconds + * + * This function is a promisified `setTimeout`. It returns a promise that + * resolves after {@link ms} milliseconds. + */ +export const wait = (ms: number) => + new Promise((resolve) => setTimeout(resolve, ms)); + +/** + * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it + * does not resolve within {@link timeoutMS}, then reject with a timeout error. + */ +export const withTimeout = async (promise: Promise, ms: number) => { + let timeoutId: ReturnType; + const rejectOnTimeout = new Promise((_, reject) => { + timeoutId = setTimeout( + () => reject(new Error("Operation timed out")), + ms, + ); + }); + const promiseAndCancelTimeout = async () => { + const result = await promise; + clearTimeout(timeoutId); + return result; + }; + return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); +}; diff --git a/desktop/src/main/util.ts b/desktop/src/main/utils/electron.ts similarity index 51% rename from desktop/src/main/util.ts rename to desktop/src/main/utils/electron.ts index d0c6699e9a..93e8565ef2 100644 --- a/desktop/src/main/util.ts +++ b/desktop/src/main/utils/electron.ts @@ -1,14 +1,35 @@ import shellescape from "any-shell-escape"; -import { shell } from "electron"; /* TODO(MR): Why is this not in /main? */ import { app } from "electron/main"; import { exec } from "node:child_process"; import path from "node:path"; import { promisify } from "node:util"; -import log from "./log"; +import log from "../log"; /** `true` if the app is running in development mode. */ export const isDev = !app.isPackaged; +/** + * Convert a file system {@link platformPath} that uses the local system + * specific path separators into a path that uses POSIX file separators. + * + * For all paths that we persist or pass over the IPC boundary, we always use + * POSIX paths, even on Windows. + * + * Windows recognizes both forward and backslashes. This also works with drive + * names. c:\foo\bar and c:/foo/bar are both valid. + * + * > Almost all paths passed to Windows APIs are normalized. During + * > normalization, Windows performs the following steps: ... All forward + * > slashes (/) are converted into the standard Windows separator, the back + * > slash (\). + * > + * > https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats + */ +export const posixPath = (platformPath: string) => + path.sep == path.posix.sep + ? platformPath + : platformPath.split(path.sep).join(path.posix.sep); + /** * Run a shell command asynchronously. * @@ -33,49 +54,11 @@ export const execAsync = (command: string | string[]) => { ? shellescape(command) : command; const startTime = Date.now(); - log.debug(() => `Running shell command: ${escapedCommand}`); const result = execAsync_(escapedCommand); log.debug( - () => - `Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`, + () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`, ); return result; }; const execAsync_ = promisify(exec); - -/** - * Open the given {@link dirPath} in the system's folder viewer. - * - * For example, on macOS this'll open {@link dirPath} in Finder. - */ -export const openDirectory = async (dirPath: string) => { - const res = await shell.openPath(path.normalize(dirPath)); - // shell.openPath resolves with a string containing the error message - // corresponding to the failure if a failure occurred, otherwise "". - if (res) throw new Error(`Failed to open directory ${dirPath}: res`); -}; - -/** - * Return the path where the logs for the app are saved. - * - * [Note: Electron app paths] - * - * By default, these paths are at the following locations: - * - * - macOS: `~/Library/Application Support/ente` - * - Linux: `~/.config/ente` - * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local\ente` - * - Windows: C:\Users\\AppData\Local\ - * - * https://www.electronjs.org/docs/latest/api/app - * - */ -const logDirectoryPath = () => app.getPath("logs"); - -/** - * Open the app's log directory in the system's folder viewer. - * - * @see {@link openDirectory} - */ -export const openLogDirectory = () => openDirectory(logDirectoryPath()); diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts new file mode 100644 index 0000000000..11f7a5d845 --- /dev/null +++ b/desktop/src/main/utils/temp.ts @@ -0,0 +1,125 @@ +import { app } from "electron/main"; +import StreamZip from "node-stream-zip"; +import { existsSync } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import type { ZipItem } from "../../types/ipc"; +import { ensure } from "./common"; + +/** + * Our very own directory within the system temp directory. Go crazy, but + * remember to clean up, especially in exception handlers. + */ +const enteTempDirPath = async () => { + const result = path.join(app.getPath("temp"), "ente"); + await fs.mkdir(result, { recursive: true }); + return result; +}; + +/** Generate a random string suitable for being used as a file name prefix */ +const randomPrefix = () => { + const ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + const randomChar = () => ensure(ch[Math.floor(Math.random() * ch.length)]); + + return Array(10).fill("").map(randomChar).join(""); +}; + +/** + * Return the path to a temporary file with the given {@link suffix}. + * + * The function returns the path to a file in the system temp directory (in an + * Ente specific folder therin) with a random prefix and an (optional) + * {@link extension}. + * + * It ensures that there is no existing item with the same name already. + * + * Use {@link deleteTempFile} to remove this file when you're done. + */ +export const makeTempFilePath = async (extension?: string) => { + const tempDir = await enteTempDirPath(); + const suffix = extension ? "." + extension : ""; + let result: string; + do { + result = path.join(tempDir, randomPrefix() + suffix); + } while (existsSync(result)); + return result; +}; + +/** + * Delete a temporary file at the given path if it exists. + * + * This is the same as a vanilla {@link fs.rm}, except it first checks that the + * given path is within the Ente specific directory in the system temp + * directory. This acts as an additional safety check. + * + * @param tempFilePath The path to the temporary file to delete. This path + * should've been previously created using {@link makeTempFilePath}. + */ +export const deleteTempFile = async (tempFilePath: string) => { + const tempDir = await enteTempDirPath(); + if (!tempFilePath.startsWith(tempDir)) + throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`); + await fs.rm(tempFilePath, { force: true }); +}; + +/** The result of {@link makeFileForDataOrPathOrZipItem}. */ +interface FileForDataOrPathOrZipItem { + /** + * The path to the file (possibly temporary). + */ + path: string; + /** + * `true` if {@link path} points to a temporary file which should be deleted + * once we are done processing. + */ + isFileTemporary: boolean; + /** + * A function that can be called to actually write the contents of the + * source `Uint8Array | string | ZipItem` into the file at {@link path}. + * + * It will do nothing in the case when the source is already a path. In the + * other two cases this function will write the data or zip item into the + * file at {@link path}. + */ + writeToTemporaryFile: () => Promise; +} + +/** + * Return the path to a file, a boolean indicating if this is a temporary path + * that needs to be deleted after processing, and a function to write the given + * {@link dataOrPathOrZipItem} into that temporary file if needed. + * + * @param dataOrPathOrZipItem The contents of the file, or the path to an + * existing file, or a (path to a zip file, name of an entry within that zip + * file) tuple. + */ +export const makeFileForDataOrPathOrZipItem = async ( + dataOrPathOrZipItem: Uint8Array | string | ZipItem, +): Promise => { + let path: string; + let isFileTemporary: boolean; + let writeToTemporaryFile = async () => { + /* no-op */ + }; + + if (typeof dataOrPathOrZipItem == "string") { + path = dataOrPathOrZipItem; + isFileTemporary = false; + } else { + path = await makeTempFilePath(); + isFileTemporary = true; + if (dataOrPathOrZipItem instanceof Uint8Array) { + writeToTemporaryFile = () => + fs.writeFile(path, dataOrPathOrZipItem); + } else { + writeToTemporaryFile = async () => { + const [zipPath, entryName] = dataOrPathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + await zip.extract(entryName, path); + await zip.close(); + }; + } + } + + return { path, isFileTemporary, writeToTemporaryFile }; +}; diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index 2749fa50d2..f9147e2883 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -37,36 +37,37 @@ * - [main] desktop/src/main/ipc.ts contains impl */ -import { contextBridge, ipcRenderer } from "electron/renderer"; +import { contextBridge, ipcRenderer, webUtils } from "electron/renderer"; // While we can't import other code, we can import types since they're just -// needed when compiling and will not be needed / looked around for at runtime. +// needed when compiling and will not be needed or looked around for at runtime. import type { - AppUpdateInfo, - ElectronFile, - FILE_PATH_TYPE, + AppUpdate, + CollectionMapping, FolderWatch, + PendingUploads, + ZipItem, } from "./types/ipc"; // - General -const appVersion = (): Promise => ipcRenderer.invoke("appVersion"); +const appVersion = () => ipcRenderer.invoke("appVersion"); const logToDisk = (message: string): void => ipcRenderer.send("logToDisk", message); -const openDirectory = (dirPath: string): Promise => +const openDirectory = (dirPath: string) => ipcRenderer.invoke("openDirectory", dirPath); -const openLogDirectory = (): Promise => - ipcRenderer.invoke("openLogDirectory"); +const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory"); + +const selectDirectory = () => ipcRenderer.invoke("selectDirectory"); const clearStores = () => ipcRenderer.send("clearStores"); -const encryptionKey = (): Promise => - ipcRenderer.invoke("encryptionKey"); +const encryptionKey = () => ipcRenderer.invoke("encryptionKey"); -const saveEncryptionKey = (encryptionKey: string): Promise => +const saveEncryptionKey = (encryptionKey: string) => ipcRenderer.invoke("saveEncryptionKey", encryptionKey); const onMainWindowFocus = (cb?: () => void) => { @@ -77,12 +78,12 @@ const onMainWindowFocus = (cb?: () => void) => { // - App update const onAppUpdateAvailable = ( - cb?: ((updateInfo: AppUpdateInfo) => void) | undefined, + cb?: ((update: AppUpdate) => void) | undefined, ) => { ipcRenderer.removeAllListeners("appUpdateAvailable"); if (cb) { - ipcRenderer.on("appUpdateAvailable", (_, updateInfo: AppUpdateInfo) => - cb(updateInfo), + ipcRenderer.on("appUpdateAvailable", (_, update: AppUpdate) => + cb(update), ); } }; @@ -98,223 +99,228 @@ const skipAppUpdate = (version: string) => { // - FS -const fsExists = (path: string): Promise => - ipcRenderer.invoke("fsExists", path); +const fsExists = (path: string) => ipcRenderer.invoke("fsExists", path); -const fsMkdirIfNeeded = (dirPath: string): Promise => +const fsMkdirIfNeeded = (dirPath: string) => ipcRenderer.invoke("fsMkdirIfNeeded", dirPath); -const fsRename = (oldPath: string, newPath: string): Promise => +const fsRename = (oldPath: string, newPath: string) => ipcRenderer.invoke("fsRename", oldPath, newPath); -const fsRmdir = (path: string): Promise => - ipcRenderer.invoke("fsRmdir", path); +const fsRmdir = (path: string) => ipcRenderer.invoke("fsRmdir", path); -const fsRm = (path: string): Promise => ipcRenderer.invoke("fsRm", path); +const fsRm = (path: string) => ipcRenderer.invoke("fsRm", path); -const fsReadTextFile = (path: string): Promise => +const fsReadTextFile = (path: string) => ipcRenderer.invoke("fsReadTextFile", path); -const fsWriteFile = (path: string, contents: string): Promise => +const fsWriteFile = (path: string, contents: string) => ipcRenderer.invoke("fsWriteFile", path, contents); -// - AUDIT below this +const fsIsDir = (dirPath: string) => ipcRenderer.invoke("fsIsDir", dirPath); // - Conversion -const convertToJPEG = ( - fileData: Uint8Array, - filename: string, -): Promise => - ipcRenderer.invoke("convertToJPEG", fileData, filename); +const convertToJPEG = (imageData: Uint8Array) => + ipcRenderer.invoke("convertToJPEG", imageData); const generateImageThumbnail = ( - inputFile: File | ElectronFile, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, -): Promise => +) => ipcRenderer.invoke( "generateImageThumbnail", - inputFile, + dataOrPathOrZipItem, maxDimension, maxSize, ); -const runFFmpegCmd = ( - cmd: string[], - inputFile: File | ElectronFile, - outputFileName: string, - dontTimeout?: boolean, -): Promise => +const ffmpegExec = ( + command: string[], + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + outputFileExtension: string, + timeoutMS: number, +) => ipcRenderer.invoke( - "runFFmpegCmd", - cmd, - inputFile, - outputFileName, - dontTimeout, + "ffmpegExec", + command, + dataOrPathOrZipItem, + outputFileExtension, + timeoutMS, ); // - ML -const clipImageEmbedding = (jpegImageData: Uint8Array): Promise => +const clipImageEmbedding = (jpegImageData: Uint8Array) => ipcRenderer.invoke("clipImageEmbedding", jpegImageData); -const clipTextEmbedding = (text: string): Promise => - ipcRenderer.invoke("clipTextEmbedding", text); +const clipTextEmbeddingIfAvailable = (text: string) => + ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text); -const detectFaces = (input: Float32Array): Promise => +const detectFaces = (input: Float32Array) => ipcRenderer.invoke("detectFaces", input); -const faceEmbedding = (input: Float32Array): Promise => +const faceEmbedding = (input: Float32Array) => ipcRenderer.invoke("faceEmbedding", input); -// - File selection - -// TODO: Deprecated - use dialogs on the renderer process itself - -const selectDirectory = (): Promise => - ipcRenderer.invoke("selectDirectory"); - -const showUploadFilesDialog = (): Promise => - ipcRenderer.invoke("showUploadFilesDialog"); - -const showUploadDirsDialog = (): Promise => - ipcRenderer.invoke("showUploadDirsDialog"); - -const showUploadZipDialog = (): Promise<{ - zipPaths: string[]; - files: ElectronFile[]; -}> => ipcRenderer.invoke("showUploadZipDialog"); +const legacyFaceCrop = (faceID: string) => + ipcRenderer.invoke("legacyFaceCrop", faceID); // - Watch -const registerWatcherFunctions = ( - addFile: (file: ElectronFile) => Promise, - removeFile: (path: string) => Promise, - removeFolder: (folderPath: string) => Promise, -) => { - ipcRenderer.removeAllListeners("watch-add"); - ipcRenderer.removeAllListeners("watch-unlink"); - ipcRenderer.removeAllListeners("watch-unlink-dir"); - ipcRenderer.on("watch-add", (_, file: ElectronFile) => addFile(file)); - ipcRenderer.on("watch-unlink", (_, filePath: string) => - removeFile(filePath), - ); - ipcRenderer.on("watch-unlink-dir", (_, folderPath: string) => - removeFolder(folderPath), +const watchGet = () => ipcRenderer.invoke("watchGet"); + +const watchAdd = (folderPath: string, collectionMapping: CollectionMapping) => + ipcRenderer.invoke("watchAdd", folderPath, collectionMapping); + +const watchRemove = (folderPath: string) => + ipcRenderer.invoke("watchRemove", folderPath); + +const watchUpdateSyncedFiles = ( + syncedFiles: FolderWatch["syncedFiles"], + folderPath: string, +) => ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath); + +const watchUpdateIgnoredFiles = ( + ignoredFiles: FolderWatch["ignoredFiles"], + folderPath: string, +) => ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath); + +const watchOnAddFile = (f: (path: string, watch: FolderWatch) => void) => { + ipcRenderer.removeAllListeners("watchAddFile"); + ipcRenderer.on("watchAddFile", (_, path: string, watch: FolderWatch) => + f(path, watch), ); }; -const addWatchMapping = ( - collectionName: string, - folderPath: string, - uploadStrategy: number, -): Promise => - ipcRenderer.invoke( - "addWatchMapping", - collectionName, - folderPath, - uploadStrategy, +const watchOnRemoveFile = (f: (path: string, watch: FolderWatch) => void) => { + ipcRenderer.removeAllListeners("watchRemoveFile"); + ipcRenderer.on("watchRemoveFile", (_, path: string, watch: FolderWatch) => + f(path, watch), ); +}; -const removeWatchMapping = (folderPath: string): Promise => - ipcRenderer.invoke("removeWatchMapping", folderPath); +const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => { + ipcRenderer.removeAllListeners("watchRemoveDir"); + ipcRenderer.on("watchRemoveDir", (_, path: string, watch: FolderWatch) => + f(path, watch), + ); +}; -const getWatchMappings = (): Promise => - ipcRenderer.invoke("getWatchMappings"); +const watchFindFiles = (folderPath: string) => + ipcRenderer.invoke("watchFindFiles", folderPath); -const updateWatchMappingSyncedFiles = ( - folderPath: string, - files: FolderWatch["syncedFiles"], -): Promise => - ipcRenderer.invoke("updateWatchMappingSyncedFiles", folderPath, files); - -const updateWatchMappingIgnoredFiles = ( - folderPath: string, - files: FolderWatch["ignoredFiles"], -): Promise => - ipcRenderer.invoke("updateWatchMappingIgnoredFiles", folderPath, files); - -// - FS Legacy - -const isFolder = (dirPath: string): Promise => - ipcRenderer.invoke("isFolder", dirPath); +const watchReset = async () => { + ipcRenderer.removeAllListeners("watchAddFile"); + ipcRenderer.removeAllListeners("watchRemoveFile"); + ipcRenderer.removeAllListeners("watchRemoveDir"); + await ipcRenderer.invoke("watchReset"); +}; // - Upload -const getPendingUploads = (): Promise<{ - files: ElectronFile[]; - collectionName: string; - type: string; -}> => ipcRenderer.invoke("getPendingUploads"); +const pathForFile = (file: File) => { + const path = webUtils.getPathForFile(file); + // The path that we get back from `webUtils.getPathForFile` on Windows uses + // "/" as the path separator. Convert them to POSIX separators. + // + // Note that we do not have access to the path or the os module in the + // preload script, thus this hand rolled transformation. -const setToUploadFiles = ( - type: FILE_PATH_TYPE, - filePaths: string[], -): Promise => ipcRenderer.invoke("setToUploadFiles", type, filePaths); + // However that makes TypeScript fidgety since we it cannot find navigator, + // as we haven't included "lib": ["dom"] in our tsconfig to avoid making DOM + // APIs available to our main Node.js code. We could create a separate + // tsconfig just for the preload script, but for now let's go with a cast. + // + // @ts-expect-error navigator is not defined. + const platform = (navigator as { platform: string }).platform; + return platform.toLowerCase().includes("win") + ? path.split("\\").join("/") + : path; +}; -const getElectronFilesFromGoogleZip = ( - filePath: string, -): Promise => - ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath); +const listZipItems = (zipPath: string) => + ipcRenderer.invoke("listZipItems", zipPath); -const setToUploadCollection = (collectionName: string): Promise => - ipcRenderer.invoke("setToUploadCollection", collectionName); +const pathOrZipItemSize = (pathOrZipItem: string | ZipItem) => + ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem); -const getDirFiles = (dirPath: string): Promise => - ipcRenderer.invoke("getDirFiles", dirPath); +const pendingUploads = () => ipcRenderer.invoke("pendingUploads"); -// These objects exposed here will become available to the JS code in our -// renderer (the web/ code) as `window.ElectronAPIs.*` -// -// There are a few related concepts at play here, and it might be worthwhile to -// read their (excellent) documentation to get an understanding; -//` -// - ContextIsolation: -// https://www.electronjs.org/docs/latest/tutorial/context-isolation -// -// - IPC https://www.electronjs.org/docs/latest/tutorial/ipc -// -// [Note: Transferring large amount of data over IPC] -// -// Electron's IPC implementation uses the HTML standard Structured Clone -// Algorithm to serialize objects passed between processes. -// https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization -// -// In particular, ArrayBuffer is eligible for structured cloning. -// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm -// -// Also, ArrayBuffer is "transferable", which means it is a zero-copy operation -// operation when it happens across threads. -// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects -// -// In our case though, we're not dealing with threads but separate processes. So -// the ArrayBuffer will be copied: -// > "parameters, errors and return values are **copied** when they're sent over -// the bridge". -// https://www.electronjs.org/docs/latest/api/context-bridge#methods -// -// The copy itself is relatively fast, but the problem with transfering large -// amounts of data is potentially running out of memory during the copy. For an -// alternative, see [Note: IPC streams]. +const setPendingUploads = (pendingUploads: PendingUploads) => + ipcRenderer.invoke("setPendingUploads", pendingUploads); + +const markUploadedFiles = (paths: PendingUploads["filePaths"]) => + ipcRenderer.invoke("markUploadedFiles", paths); + +const markUploadedZipItems = (items: PendingUploads["zipItems"]) => + ipcRenderer.invoke("markUploadedZipItems", items); + +const clearPendingUploads = () => ipcRenderer.invoke("clearPendingUploads"); + +/** + * These objects exposed here will become available to the JS code in our + * renderer (the web/ code) as `window.ElectronAPIs.*` + * + * There are a few related concepts at play here, and it might be worthwhile to + * read their (excellent) documentation to get an understanding; + *` + * - ContextIsolation: + * https://www.electronjs.org/docs/latest/tutorial/context-isolation + * + * - IPC https://www.electronjs.org/docs/latest/tutorial/ipc + * + * --- + * + * [Note: Transferring large amount of data over IPC] + * + * Electron's IPC implementation uses the HTML standard Structured Clone + * Algorithm to serialize objects passed between processes. + * https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization + * + * In particular, ArrayBuffer is eligible for structured cloning. + * https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm + * + * Also, ArrayBuffer is "transferable", which means it is a zero-copy operation + * operation when it happens across threads. + * https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects + * + * In our case though, we're not dealing with threads but separate processes. So + * the ArrayBuffer will be copied: + * + * > "parameters, errors and return values are **copied** when they're sent over + * > the bridge". + * > + * > https://www.electronjs.org/docs/latest/api/context-bridge#methods + * + * The copy itself is relatively fast, but the problem with transfering large + * amounts of data is potentially running out of memory during the copy. + * + * For an alternative, see [Note: IPC streams]. + */ contextBridge.exposeInMainWorld("electron", { // - General + appVersion, logToDisk, openDirectory, openLogDirectory, + selectDirectory, clearStores, encryptionKey, saveEncryptionKey, onMainWindowFocus, // - App update + onAppUpdateAvailable, updateAndRestart, updateOnNextRestart, skipAppUpdate, // - FS + fs: { exists: fsExists, rename: fsRename, @@ -323,42 +329,46 @@ contextBridge.exposeInMainWorld("electron", { rm: fsRm, readTextFile: fsReadTextFile, writeFile: fsWriteFile, + isDir: fsIsDir, }, // - Conversion + convertToJPEG, generateImageThumbnail, - runFFmpegCmd, + ffmpegExec, // - ML + clipImageEmbedding, - clipTextEmbedding, + clipTextEmbeddingIfAvailable, detectFaces, faceEmbedding, - - // - File selection - selectDirectory, - showUploadFilesDialog, - showUploadDirsDialog, - showUploadZipDialog, + legacyFaceCrop, // - Watch - registerWatcherFunctions, - addWatchMapping, - removeWatchMapping, - getWatchMappings, - updateWatchMappingSyncedFiles, - updateWatchMappingIgnoredFiles, - // - FS legacy - // TODO: Move these into fs + document + rename if needed - isFolder, + watch: { + get: watchGet, + add: watchAdd, + remove: watchRemove, + updateSyncedFiles: watchUpdateSyncedFiles, + updateIgnoredFiles: watchUpdateIgnoredFiles, + onAddFile: watchOnAddFile, + onRemoveFile: watchOnRemoveFile, + onRemoveDir: watchOnRemoveDir, + findFiles: watchFindFiles, + reset: watchReset, + }, // - Upload - getPendingUploads, - setToUploadFiles, - getElectronFilesFromGoogleZip, - setToUploadCollection, - getDirFiles, + pathForFile, + listZipItems, + pathOrZipItemSize, + pendingUploads, + setPendingUploads, + markUploadedFiles, + markUploadedZipItems, + clearPendingUploads, }); diff --git a/desktop/src/thirdparty/clip-bpe-ts/mod.ts b/desktop/src/thirdparty/clip-bpe-ts/mod.ts index 6cdf246f75..4d00eef0e4 100644 --- a/desktop/src/thirdparty/clip-bpe-ts/mod.ts +++ b/desktop/src/thirdparty/clip-bpe-ts/mod.ts @@ -1,3 +1,5 @@ +/* eslint-disable */ + import * as htmlEntities from "html-entities"; import bpeVocabData from "./bpe_simple_vocab_16e6"; // import ftfy from "https://deno.land/x/ftfy_pyodide@v0.1.1/mod.js"; @@ -410,6 +412,7 @@ export default class { newWord.push(first + second); i += 2; } else { + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" newWord.push(word[i]); i += 1; } @@ -434,6 +437,7 @@ export default class { .map((b) => this.byteEncoder[b.charCodeAt(0) as number]) .join(""); bpeTokens.push( + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" ...this.bpe(token) .split(" ") .map((bpeToken: string) => this.encoder[bpeToken]), @@ -458,6 +462,7 @@ export default class { .join(""); text = [...text] .map((c) => this.byteDecoder[c]) + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" .map((v) => String.fromCharCode(v)) .join("") .replace(/<\/w>/g, " "); diff --git a/desktop/src/types/ipc.ts b/desktop/src/types/ipc.ts index 3dae605a8e..f4985bfc71 100644 --- a/desktop/src/types/ipc.ts +++ b/desktop/src/types/ipc.ts @@ -5,78 +5,40 @@ * See [Note: types.ts <-> preload.ts <-> ipc.ts] */ +export interface AppUpdate { + autoUpdatable: boolean; + version: string; +} + export interface FolderWatch { - rootFolderName: string; - uploadStrategy: number; + collectionMapping: CollectionMapping; folderPath: string; syncedFiles: FolderWatchSyncedFile[]; ignoredFiles: string[]; } +export type CollectionMapping = "root" | "parent"; + export interface FolderWatchSyncedFile { path: string; uploadedFileID: number; collectionID: number; } +export type ZipItem = [zipPath: string, entryName: string]; + +export interface PendingUploads { + collectionName: string | undefined; + filePaths: string[]; + zipItems: ZipItem[]; +} + /** - * Errors that have special semantics on the web side. + * See: [Note: Custom errors across Electron/Renderer boundary] * - * [Note: Custom errors across Electron/Renderer boundary] - * - * We need to use the `message` field to disambiguate between errors thrown by - * the main process when invoked from the renderer process. This is because: - * - * > Errors thrown throw `handle` in the main process are not transparent as - * > they are serialized and only the `message` property from the original error - * > is provided to the renderer process. - * > - * > - https://www.electronjs.org/docs/latest/tutorial/ipc - * > - * > Ref: https://github.com/electron/electron/issues/24427 + * Note: this is not a type, and cannot be used in preload.js; it is only meant + * for use in the main process code. */ -export const CustomErrors = { - WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED: - "Windows native image processing is not supported", - UNSUPPORTED_PLATFORM: (platform: string, arch: string) => - `Unsupported platform - ${platform} ${arch}`, - MODEL_DOWNLOAD_PENDING: - "Model download pending, skipping clip search request", +export const CustomErrorMessage = { + NotAvailable: "This feature in not available on the current OS/arch", }; - -/** - * Deprecated - Use File + webUtils.getPathForFile instead - * - * Electron used to augment the standard web - * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object with an - * additional `path` property. This is now deprecated, and will be removed in a - * future release. - * https://www.electronjs.org/docs/latest/api/file-object - * - * The alternative to the `path` property is to use `webUtils.getPathForFile` - * https://www.electronjs.org/docs/latest/api/web-utils - */ -export interface ElectronFile { - name: string; - path: string; - size: number; - lastModified: number; - stream: () => Promise>; - blob: () => Promise; - arrayBuffer: () => Promise; -} - -export interface WatchStoreType { - mappings: FolderWatch[]; -} - -export enum FILE_PATH_TYPE { - /* eslint-disable no-unused-vars */ - FILES = "files", - ZIPS = "zips", -} - -export interface AppUpdateInfo { - autoUpdatable: boolean; - version: string; -} diff --git a/desktop/src/types/main.ts b/desktop/src/types/main.ts deleted file mode 100644 index 546749c54b..0000000000 --- a/desktop/src/types/main.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { FILE_PATH_TYPE } from "./ipc"; - -export interface AutoLauncherClient { - isEnabled: () => Promise; - toggleAutoLaunch: () => Promise; - wasAutoLaunched: () => Promise; -} - -export interface UploadStoreType { - filePaths: string[]; - zipPaths: string[]; - collectionName: string; -} - -export interface KeysStoreType { - AnonymizeUserID: { - id: string; - }; -} - -/* eslint-disable no-unused-vars */ -export const FILE_PATH_KEYS: { - [k in FILE_PATH_TYPE]: keyof UploadStoreType; -} = { - [FILE_PATH_TYPE.ZIPS]: "zipPaths", - [FILE_PATH_TYPE.FILES]: "filePaths", -}; - -export interface SafeStorageStoreType { - encryptionKey: string; -} diff --git a/desktop/tsconfig.json b/desktop/tsconfig.json index 700ea3fa00..7806cd93a7 100644 --- a/desktop/tsconfig.json +++ b/desktop/tsconfig.json @@ -3,71 +3,34 @@ into JavaScript that'll then be loaded and run by the main (node) process of our Electron app. */ + /* + * Recommended target, lib and other settings for code running in the + * version of Node.js bundled with Electron. + * + * Currently, with Electron 30, this is Node.js 20.11.1. + * https://www.electronjs.org/blog/electron-30-0 + */ + "extends": "@tsconfig/node20/tsconfig.json", + /* TSConfig docs: https://aka.ms/tsconfig.json */ - "compilerOptions": { - /* Recommended target, lib and other settings for code running in the - version of Node.js bundled with Electron. - - Currently, with Electron 29, this is Node.js 20.9 - https://www.electronjs.org/blog/electron-29-0 - - Note that we cannot do - - "extends": "@tsconfig/node20/tsconfig.json", - - because that sets "lib": ["es2023"]. However (and I don't fully - understand what's going on here), that breaks our compilation since - tsc can then not find type definitions of things like ReadableStream. - - Adding "dom" to "lib" (e.g. `"lib": ["es2023", "dom"]`) fixes the - issue, but that doesn't sound correct - the main Electron process - isn't running in a browser context. - - It is possible that we're using some of the types incorrectly. For - now, we just omit the "lib" definition and rely on the defaults for - the "target" we've chosen. This is also what the current - electron-forge starter does: - - yarn create electron-app electron-forge-starter -- --template=webpack-typescript - - Enhancement: Can revisit this later. - - Refs: - - https://github.com/electron/electron/issues/27092 - - https://github.com/electron/electron/issues/16146 - */ - - "target": "es2022", - "module": "node16", - - /* Enable various workarounds to play better with CJS libraries */ - "esModuleInterop": true, - /* Speed things up by not type checking `node_modules` */ - "skipLibCheck": true, - /* Emit the generated JS into `app/` */ "outDir": "app", - /* Temporary overrides to get things to compile with the older config */ - "strict": false, - "noImplicitAny": true - - /* Below is the state we want */ - /* Enable these one by one */ - // "strict": true, - /* Require the `type` modifier when importing types */ - // "verbatimModuleSyntax": true + /* We want this, but it causes "ESM syntax is not allowed in a CommonJS + module when 'verbatimModuleSyntax' is enabled" currently */ + /* "verbatimModuleSyntax": true, */ + "strict": true, /* Stricter than strict */ - // "noImplicitReturns": true, - // "noUnusedParameters": true, - // "noUnusedLocals": true, - // "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noUnusedParameters": true, + "noUnusedLocals": true, + "noFallthroughCasesInSwitch": true, /* e.g. makes array indexing returns undefined */ - // "noUncheckedIndexedAccess": true, - // "exactOptionalPropertyTypes": true, + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true }, /* Transpile all `.ts` files in `src/` */ "include": ["src/**/*.ts"] diff --git a/desktop/yarn.lock b/desktop/yarn.lock index a5b86f1eb3..833b623a7e 100644 --- a/desktop/yarn.lock +++ b/desktop/yarn.lock @@ -7,11 +7,6 @@ resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d" integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A== -"@aashutoshrathi/word-wrap@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" - integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== - "@babel/code-frame@^7.0.0": version "7.24.2" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae" @@ -20,25 +15,25 @@ "@babel/highlight" "^7.24.2" picocolors "^1.0.0" -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-identifier@^7.24.5": + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62" + integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA== "@babel/highlight@^7.24.2": - version "7.24.2" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.2.tgz#3f539503efc83d3c59080a10e6634306e0370d26" - integrity sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA== + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e" + integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw== dependencies: - "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-validator-identifier" "^7.24.5" chalk "^2.4.2" js-tokens "^4.0.0" picocolors "^1.0.0" "@babel/runtime@^7.21.0": - version "7.24.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.0.tgz#584c450063ffda59697021430cb47101b085951e" - integrity sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw== + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" + integrity sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g== dependencies: regenerator-runtime "^0.14.0" @@ -60,10 +55,10 @@ ajv "^6.12.0" ajv-keywords "^3.4.1" -"@electron/asar@^3.2.1": - version "3.2.9" - resolved "https://registry.yarnpkg.com/@electron/asar/-/asar-3.2.9.tgz#7b3a1fd677b485629f334dd80ced8c85353ba7e7" - integrity sha512-Vu2P3X2gcZ3MY9W7yH72X9+AMXwUQZEJBrsPIbX0JsdllLtoh62/Q8Wg370/DawIEVKOyfD6KtTLo645ezqxUA== +"@electron/asar@^3.2.7": + version "3.2.10" + resolved "https://registry.yarnpkg.com/@electron/asar/-/asar-3.2.10.tgz#615cf346b734b23cafa4e0603551010bd0e50aa8" + integrity sha512-mvBSwIBUeiRscrCeJE1LwctAriBj65eUDm0Pc11iE5gRwzkmsdbS7FnZ1XUWjpSeQWL1L5g12Fc/SchPM9DUOw== dependencies: commander "^5.0.0" glob "^7.1.6" @@ -84,10 +79,10 @@ optionalDependencies: global-agent "^3.0.0" -"@electron/notarize@2.2.1": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@electron/notarize/-/notarize-2.2.1.tgz#d0aa6bc43cba830c41bfd840b85dbe0e273f59fe" - integrity sha512-aL+bFMIkpR0cmmj5Zgy0LMKEpgy43/hw5zadEArgmAMWWlKc5buwFvFT9G/o/YJkvXAJm5q3iuTuLaiaXW39sg== +"@electron/notarize@2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@electron/notarize/-/notarize-2.3.0.tgz#9659cf6c92563dd69411afce229f52f9f7196227" + integrity sha512-EiTBU0BwE7HZZjAG1fFWQaiQpCuPrVGn7jPss1kUjD6eTTdXXd29RiZqEqkgN7xqt/Pgn4g3I7Saqovanrfj3w== dependencies: debug "^4.1.1" fs-extra "^9.0.1" @@ -105,18 +100,38 @@ minimist "^1.2.6" plist "^3.0.5" -"@electron/universal@1.5.1": - version "1.5.1" - resolved "https://registry.yarnpkg.com/@electron/universal/-/universal-1.5.1.tgz#f338bc5bcefef88573cf0ab1d5920fac10d06ee5" - integrity sha512-kbgXxyEauPJiQQUNG2VgUeyfQNFk6hBF11ISN2PNI6agUgPl55pv4eQmaqHzTAzchBvqZ2tQuRVaPStGf0mxGw== +"@electron/rebuild@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@electron/rebuild/-/rebuild-3.6.0.tgz#60211375a5f8541a71eb07dd2f97354ad0b2b96f" + integrity sha512-zF4x3QupRU3uNGaP5X1wjpmcjfw1H87kyqZ00Tc3HvriV+4gmOGuvQjGNkrJuXdsApssdNyVwLsy+TaeTGGcVw== dependencies: - "@electron/asar" "^3.2.1" - "@malept/cross-spawn-promise" "^1.1.0" + "@malept/cross-spawn-promise" "^2.0.0" + chalk "^4.0.0" + debug "^4.1.1" + detect-libc "^2.0.1" + fs-extra "^10.0.0" + got "^11.7.0" + node-abi "^3.45.0" + node-api-version "^0.2.0" + node-gyp "^9.0.0" + ora "^5.1.0" + read-binary-file-arch "^1.0.6" + semver "^7.3.5" + tar "^6.0.5" + yargs "^17.0.1" + +"@electron/universal@2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@electron/universal/-/universal-2.0.1.tgz#7b070ab355e02957388f3dbd68e2c3cd08c448ae" + integrity sha512-fKpv9kg4SPmt+hY7SVBnIYULE9QJl8L3sCfcBsnqbJwwBwAeTLokJ9TRt9y7bK0JAzIW2y78TVVjvnQEms/yyA== + dependencies: + "@electron/asar" "^3.2.7" + "@malept/cross-spawn-promise" "^2.0.0" debug "^4.3.1" - dir-compare "^3.0.0" - fs-extra "^9.0.1" - minimatch "^3.0.4" - plist "^3.0.4" + dir-compare "^4.2.0" + fs-extra "^11.1.1" + minimatch "^9.0.3" + plist "^3.1.0" "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" @@ -150,6 +165,11 @@ resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f" integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g== +"@gar/promisify@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" + integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw== + "@humanwhocodes/config-array@^0.11.14": version "0.11.14" resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.14.tgz#d78e481a039f7566ecc9660b4ea7fe6b1fec442b" @@ -165,26 +185,21 @@ integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz#d9fae00a2d5cb40f92cfe64b47ad749fbc38f917" - integrity sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw== + version "2.0.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== -"@isaacs/cliui@^8.0.2": - version "8.0.2" - resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" - integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== dependencies: - string-width "^5.1.2" - string-width-cjs "npm:string-width@^4.2.0" - strip-ansi "^7.0.1" - strip-ansi-cjs "npm:strip-ansi@^6.0.1" - wrap-ansi "^8.1.0" - wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + minipass "^7.0.4" -"@malept/cross-spawn-promise@^1.1.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@malept/cross-spawn-promise/-/cross-spawn-promise-1.1.1.tgz#504af200af6b98e198bce768bc1730c6936ae01d" - integrity sha512-RTBGWL5FWQcg9orDOCcp4LvItNzUPcyEU9bwaeJX0rJ1IQxzucC48Y0/sQLp/g6t99IQgAlGIaesJS+gTn7tVQ== +"@malept/cross-spawn-promise@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@malept/cross-spawn-promise/-/cross-spawn-promise-2.0.0.tgz#d0772de1aa680a0bfb9ba2f32b4c828c7857cb9d" + integrity sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg== dependencies: cross-spawn "^7.0.1" @@ -219,6 +234,22 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@npmcli/fs@^2.1.0": + version "2.1.2" + resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-2.1.2.tgz#a9e2541a4a2fec2e69c29b35e6060973da79b865" + integrity sha512-yOJKRvohFOaLqipNtwYB9WugyZKhC/DZC4VYPmpaCzDBrA8YpK3qHZ8/HGscMnE4GqbkLNuVcCnxkeQEdGt6LQ== + dependencies: + "@gar/promisify" "^1.1.3" + semver "^7.3.5" + +"@npmcli/move-file@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-2.0.1.tgz#26f6bdc379d87f75e55739bab89db525b06100e4" + integrity sha512-mJd2Z5TjYWq/ttPLLGqArdtnC74J6bOzg4rMDnN+p1xTacZ2yPRCk2y0oSWQtygLR9YVQXgOcONrwtnk3JupxQ== + dependencies: + mkdirp "^1.0.4" + rimraf "^3.0.2" + "@pkgjs/parseargs@^0.11.0": version "0.11.0" resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" @@ -246,6 +277,11 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== +"@tsconfig/node20@^20.1.4": + version "20.1.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node20/-/node20-20.1.4.tgz#3457d42eddf12d3bde3976186ab0cd22b85df928" + integrity sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg== + "@types/auto-launch@^5.0": version "5.0.5" resolved "https://registry.yarnpkg.com/@types/auto-launch/-/auto-launch-5.0.5.tgz#439ed36aaaea501e2e2cfbddd8a20c366c34863b" @@ -352,15 +388,15 @@ "@types/node" "*" "@typescript-eslint/eslint-plugin@^7": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.6.0.tgz#1f5df5cda490a0bcb6fbdd3382e19f1241024242" - integrity sha512-gKmTNwZnblUdnTIJu3e9kmeRRzV2j1a/LUO27KNNAnIC5zjy1aSvXSRp4rVNlmAoHlQ7HzX42NbKpcSr4jF80A== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz#c78e309fe967cb4de05b85cdc876fb95f8e01b6f" + integrity sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg== dependencies: "@eslint-community/regexpp" "^4.10.0" - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/type-utils" "7.6.0" - "@typescript-eslint/utils" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/type-utils" "7.8.0" + "@typescript-eslint/utils" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" graphemer "^1.4.0" ignore "^5.3.1" @@ -369,46 +405,46 @@ ts-api-utils "^1.3.0" "@typescript-eslint/parser@^7": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.6.0.tgz#0aca5de3045d68b36e88903d15addaf13d040a95" - integrity sha512-usPMPHcwX3ZoPWnBnhhorc14NJw9J4HpSXQX4urF2TPKG0au0XhJoZyX62fmvdHONUkmyUe74Hzm1//XA+BoYg== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.8.0.tgz#1e1db30c8ab832caffee5f37e677dbcb9357ddc8" + integrity sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ== dependencies: - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/typescript-estree" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" -"@typescript-eslint/scope-manager@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.6.0.tgz#1e9972f654210bd7500b31feadb61a233f5b5e9d" - integrity sha512-ngttyfExA5PsHSx0rdFgnADMYQi+Zkeiv4/ZxGYUWd0nLs63Ha0ksmp8VMxAIC0wtCFxMos7Lt3PszJssG/E6w== +"@typescript-eslint/scope-manager@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz#bb19096d11ec6b87fb6640d921df19b813e02047" + integrity sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g== dependencies: - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" -"@typescript-eslint/type-utils@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.6.0.tgz#644f75075f379827d25fe0713e252ccd4e4a428c" - integrity sha512-NxAfqAPNLG6LTmy7uZgpK8KcuiS2NZD/HlThPXQRGwz6u7MDBWRVliEEl1Gj6U7++kVJTpehkhZzCJLMK66Scw== +"@typescript-eslint/type-utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz#9de166f182a6e4d1c5da76e94880e91831e3e26f" + integrity sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A== dependencies: - "@typescript-eslint/typescript-estree" "7.6.0" - "@typescript-eslint/utils" "7.6.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/utils" "7.8.0" debug "^4.3.4" ts-api-utils "^1.3.0" -"@typescript-eslint/types@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.6.0.tgz#53dba7c30c87e5f10a731054266dd905f1fbae38" - integrity sha512-h02rYQn8J+MureCvHVVzhl69/GAfQGPQZmOMjG1KfCl7o3HtMSlPaPUAPu6lLctXI5ySRGIYk94clD/AUMCUgQ== +"@typescript-eslint/types@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.8.0.tgz#1fd2577b3ad883b769546e2d1ef379f929a7091d" + integrity sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw== -"@typescript-eslint/typescript-estree@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.6.0.tgz#112a3775563799fd3f011890ac8322f80830ac17" - integrity sha512-+7Y/GP9VuYibecrCQWSKgl3GvUM5cILRttpWtnAu8GNL9j11e4tbuGZmZjJ8ejnKYyBRb2ddGQ3rEFCq3QjMJw== +"@typescript-eslint/typescript-estree@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz#b028a9226860b66e623c1ee55cc2464b95d2987c" + integrity sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg== dependencies: - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" @@ -416,25 +452,25 @@ semver "^7.6.0" ts-api-utils "^1.3.0" -"@typescript-eslint/utils@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.6.0.tgz#e400d782280b6f724c8a1204269d984c79202282" - integrity sha512-x54gaSsRRI+Nwz59TXpCsr6harB98qjXYzsRxGqvA5Ue3kQH+FxS7FYU81g/omn22ML2pZJkisy6Q+ElK8pBCA== +"@typescript-eslint/utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.8.0.tgz#57a79f9c0c0740ead2f622e444cfaeeb9fd047cd" + integrity sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ== dependencies: "@eslint-community/eslint-utils" "^4.4.0" "@types/json-schema" "^7.0.15" "@types/semver" "^7.5.8" - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/typescript-estree" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" semver "^7.6.0" -"@typescript-eslint/visitor-keys@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.6.0.tgz#d1ce13145844379021e1f9bd102c1d78946f4e76" - integrity sha512-4eLB7t+LlNUmXzfOu1VAIAdkjbu5xNSerURS9X/S5TUKWFRpXRQZbmtPqgKmYx8bj3J0irtQXSiWAOY82v+cgw== +"@typescript-eslint/visitor-keys@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz#7285aab991da8bee411a42edbd5db760d22fdd91" + integrity sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA== dependencies: - "@typescript-eslint/types" "7.6.0" + "@typescript-eslint/types" "7.8.0" eslint-visitor-keys "^3.4.3" "@ungap/structured-clone@^1.2.0": @@ -447,6 +483,11 @@ resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.10.tgz#a1337ca426aa61cef9fe15b5b28e340a72f6fa99" integrity sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw== +abbrev@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" + integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== + acorn-jsx@^5.3.2: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" @@ -457,13 +498,28 @@ acorn@^8.9.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== -agent-base@6: +agent-base@6, agent-base@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== dependencies: debug "4" +agentkeepalive@^4.2.1: + version "4.5.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" + integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + dependencies: + humanize-ms "^1.2.1" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-formats@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-2.1.1.tgz#6e669400659eb74973bbf2e33327180a0996b520" @@ -487,25 +543,20 @@ ajv@^6.10.0, ajv@^6.12.0, ajv@^6.12.4: uri-js "^4.2.2" ajv@^8.0.0, ajv@^8.6.3: - version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== + version "8.13.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.13.0.tgz#a3939eaec9fb80d217ddf0c3376948c023f28c91" + integrity sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA== dependencies: - fast-deep-equal "^3.1.1" + fast-deep-equal "^3.1.3" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" - uri-js "^4.2.2" + uri-js "^4.4.1" ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -520,11 +571,6 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" -ansi-styles@^6.1.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" - integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== - any-shell-escape@^0.1: version "0.1.1" resolved "https://registry.yarnpkg.com/any-shell-escape/-/any-shell-escape-0.1.1.tgz#d55ab972244c71a9a5e1ab0879f30bf110806959" @@ -543,25 +589,26 @@ app-builder-bin@4.0.0: resolved "https://registry.yarnpkg.com/app-builder-bin/-/app-builder-bin-4.0.0.tgz#1df8e654bd1395e4a319d82545c98667d7eed2f0" integrity sha512-xwdG0FJPQMe0M0UA4Tz0zEB8rBJTRA5a476ZawAqiBkMv16GRK5xpXThOjMaEOFnZ6zabejjG4J3da0SXG63KA== -app-builder-lib@24.13.3: - version "24.13.3" - resolved "https://registry.yarnpkg.com/app-builder-lib/-/app-builder-lib-24.13.3.tgz#36e47b65fecb8780bb73bff0fee4e0480c28274b" - integrity sha512-FAzX6IBit2POXYGnTCT8YHFO/lr5AapAII6zzhQO3Rw4cEDOgK+t1xhLc5tNcKlicTHlo9zxIwnYCX9X2DLkig== +app-builder-lib@25.0.0-alpha.6: + version "25.0.0-alpha.6" + resolved "https://registry.yarnpkg.com/app-builder-lib/-/app-builder-lib-25.0.0-alpha.6.tgz#3edb49843b249a1dd52b32a80f9787677bc5a32b" + integrity sha512-kXveR7MFTJXBwb2xB2geKWeWP+YGcJ3IzWRgTEV96zwyo4IxzE5xRXcndSQQglmlzw/VoM5Mx322E9ErYbMCVg== dependencies: "@develar/schema-utils" "~2.6.5" - "@electron/notarize" "2.2.1" + "@electron/notarize" "2.3.0" "@electron/osx-sign" "1.0.5" - "@electron/universal" "1.5.1" + "@electron/rebuild" "3.6.0" + "@electron/universal" "2.0.1" "@malept/flatpak-bundler" "^0.4.0" "@types/fs-extra" "9.0.13" async-exit-hook "^2.0.1" bluebird-lst "^1.0.9" - builder-util "24.13.1" - builder-util-runtime "9.2.4" + builder-util "25.0.0-alpha.6" + builder-util-runtime "9.2.5-alpha.2" chromium-pickle-js "^0.2.0" debug "^4.3.4" ejs "^3.1.8" - electron-publish "24.13.1" + electron-publish "25.0.0-alpha.6" form-data "^4.0.0" fs-extra "^10.1.0" hosted-git-info "^4.1.0" @@ -581,6 +628,19 @@ applescript@^1.0.0: resolved "https://registry.yarnpkg.com/applescript/-/applescript-1.0.0.tgz#bb87af568cad034a4e48c4bdaf6067a3a2701317" integrity sha512-yvtNHdWvtbYEiIazXAdp/NY+BBb65/DAseqlNiJQjOx9DynuzOYDbVLBJvuc0ve0VL9x6B3OHF6eH52y9hCBtQ== +"aproba@^1.0.3 || ^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" + integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== + +are-we-there-yet@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz#679df222b278c64f2cdba1175cdc00b0d96164bd" + integrity sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg== + dependencies: + delegates "^1.0.0" + readable-stream "^3.6.0" + argparse@^1.0.7: version "1.0.10" resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" @@ -659,6 +719,15 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + bluebird-lst@^1.0.9: version "1.0.9" resolved "https://registry.yarnpkg.com/bluebird-lst/-/bluebird-lst-1.0.9.tgz#a64a0e4365658b9ab5fe875eb9dfb694189bb41c" @@ -703,17 +772,12 @@ buffer-crc32@~0.2.3: resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== -buffer-equal@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-1.0.1.tgz#2f7651be5b1b3f057fcd6e7ee16cf34767077d90" - integrity sha512-QoV3ptgEaQpvVwbXdSO39iqPQTCxSF7A5U99AxbHYqUdCizL/lH2Z0A2y6nbZucxMEOtNyZfG2s6gsVugGpKkg== - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== -buffer@^5.1.0: +buffer@^5.1.0, buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -729,24 +793,24 @@ builder-util-runtime@9.2.3: debug "^4.3.4" sax "^1.2.4" -builder-util-runtime@9.2.4: - version "9.2.4" - resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.2.4.tgz#13cd1763da621e53458739a1e63f7fcba673c42a" - integrity sha512-upp+biKpN/XZMLim7aguUyW8s0FUpDvOtK6sbanMFDAMBzpHDqdhgVYm6zc9HJ6nWo7u2Lxk60i2M6Jd3aiNrA== +builder-util-runtime@9.2.5-alpha.2: + version "9.2.5-alpha.2" + resolved "https://registry.yarnpkg.com/builder-util-runtime/-/builder-util-runtime-9.2.5-alpha.2.tgz#b0a1737996717d7ae0b71e5efdf0bfbd1dd2c21e" + integrity sha512-/Ln2ddejGj2HNMJ+X66mKHRcOvmRzUO/dSi8t4hSV64J7IA+DE+mqDb+zogIE2gin7p7YwcGiOkKny4nwPPPXg== dependencies: debug "^4.3.4" sax "^1.2.4" -builder-util@24.13.1: - version "24.13.1" - resolved "https://registry.yarnpkg.com/builder-util/-/builder-util-24.13.1.tgz#4a4c4f9466b016b85c6990a0ea15aa14edec6816" - integrity sha512-NhbCSIntruNDTOVI9fdXz0dihaqX2YuE1D6zZMrwiErzH4ELZHE6mdiB40wEgZNprDia+FghRFgKoAqMZRRjSA== +builder-util@25.0.0-alpha.6: + version "25.0.0-alpha.6" + resolved "https://registry.yarnpkg.com/builder-util/-/builder-util-25.0.0-alpha.6.tgz#4ac5e13d9e6c750987efc9cd9c1eace58622a30b" + integrity sha512-ghT1XcP6JI926AArlBcPHRRKYCsVWbT/ywnXPwW5X1ani2jmnddPpnwm92xRvCPWGBmeXd2diF69FV5rBJxhRQ== dependencies: "7zip-bin" "~5.2.0" "@types/debug" "^4.1.6" app-builder-bin "4.0.0" bluebird-lst "^1.0.9" - builder-util-runtime "9.2.4" + builder-util-runtime "9.2.5-alpha.2" chalk "^4.1.2" cross-spawn "^7.0.3" debug "^4.3.4" @@ -759,6 +823,30 @@ builder-util@24.13.1: stat-mode "^1.0.0" temp-file "^3.4.0" +cacache@^16.1.0: + version "16.1.3" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-16.1.3.tgz#a02b9f34ecfaf9a78c9f4bc16fceb94d5d67a38e" + integrity sha512-/+Emcj9DAXxX4cwlLmRI9c166RuL3w30zp4R7Joiv2cQTtTtA+jeuCAjH3ZlGnYS3tKENSrKhAzVVP9GVyzeYQ== + dependencies: + "@npmcli/fs" "^2.1.0" + "@npmcli/move-file" "^2.0.0" + chownr "^2.0.0" + fs-minipass "^2.1.0" + glob "^8.0.1" + infer-owner "^1.0.4" + lru-cache "^7.7.1" + minipass "^3.1.6" + minipass-collect "^1.0.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + mkdirp "^1.0.4" + p-map "^4.0.0" + promise-inflight "^1.0.1" + rimraf "^3.0.2" + ssri "^9.0.0" + tar "^6.1.11" + unique-filename "^2.0.0" + cacheable-lookup@^5.0.3: version "5.0.4" resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" @@ -796,7 +884,7 @@ chalk@^2.4.2: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -824,6 +912,11 @@ chownr@^2.0.0: resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + chromium-pickle-js@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/chromium-pickle-js/-/chromium-pickle-js-0.2.0.tgz#04a106672c18b085ab774d983dfa3ea138f22205" @@ -834,6 +927,23 @@ ci-info@^3.2.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + +cli-spinners@^2.5.0: + version "2.9.2" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" + integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== + cli-truncate@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" @@ -858,6 +968,11 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== + color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -882,6 +997,11 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-support@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== + combined-stream@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" @@ -958,6 +1078,11 @@ config-file-ts@^0.2.4: glob "^10.3.10" typescript "^5.3.3" +console-control-strings@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" + integrity sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== + core-util-is@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -993,7 +1118,7 @@ debounce-fn@^4.0.0: dependencies: mimic-fn "^3.0.0" -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -1012,6 +1137,13 @@ deep-is@^0.1.3: resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" + defer-to-connect@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" @@ -1026,7 +1158,7 @@ define-data-property@^1.0.1: es-errors "^1.3.0" gopd "^1.0.1" -define-properties@^1.1.3: +define-properties@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== @@ -1040,11 +1172,21 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== +delegates@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" + integrity sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ== + detect-indent@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-7.0.1.tgz#cbb060a12842b9c4d333f1cac4aa4da1bb66bc25" integrity sha512-Mc7QhQ8s+cLrnUfU/Ji94vG/r8M26m8f++vyres4ZoojaRDpZ1eSIh/EpzLNwlWuvzSZ3UbDFspjFvTDXe6e/g== +detect-libc@^2.0.1: + version "2.0.3" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.3.tgz#f0cd503b40f9939b894697d19ad50895e30cf700" + integrity sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw== + detect-newline@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-4.0.1.tgz#fcefdb5713e1fb8cb2839b8b6ee22e6716ab8f23" @@ -1055,13 +1197,13 @@ detect-node@^2.0.4: resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== -dir-compare@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/dir-compare/-/dir-compare-3.3.0.tgz#2c749f973b5c4b5d087f11edaae730db31788416" - integrity sha512-J7/et3WlGUCxjdnD3HAAzQ6nsnc0WL6DD7WcwJb7c39iH1+AWfg+9OqzJNaI6PkBwBvm1mhZNL9iY/nRiZXlPg== +dir-compare@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/dir-compare/-/dir-compare-4.2.0.tgz#d1d4999c14fbf55281071fdae4293b3b9ce86f19" + integrity sha512-2xMCmOoMrdQIPHdsTawECdNPwlVFB9zGcz3kuhmBO6U3oU+UQjsue0i8ayLKpgBcm+hcXPMVSGUN9d+pvJ6+VQ== dependencies: - buffer-equal "^1.0.0" - minimatch "^3.0.4" + minimatch "^3.0.5" + p-limit "^3.1.0 " dir-glob@^3.0.1: version "3.0.1" @@ -1070,14 +1212,14 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" -dmg-builder@24.13.3: - version "24.13.3" - resolved "https://registry.yarnpkg.com/dmg-builder/-/dmg-builder-24.13.3.tgz#95d5b99c587c592f90d168a616d7ec55907c7e55" - integrity sha512-rcJUkMfnJpfCboZoOOPf4L29TRtEieHNOeAbYPWPxlaBw/Z1RKrRA86dOI9rwaI4tQSc/RD82zTNHprfUHXsoQ== +dmg-builder@25.0.0-alpha.6: + version "25.0.0-alpha.6" + resolved "https://registry.yarnpkg.com/dmg-builder/-/dmg-builder-25.0.0-alpha.6.tgz#1a13008de0543c3080595534ab294cde2a5e57e8" + integrity sha512-GStVExwsuumGN6rPGJksA5bLN5n5QEQd5iQrGKyBSxuwR1+LWidFkM+anroXnANIyTwbppk2S7+808vHjT/Eyw== dependencies: - app-builder-lib "24.13.3" - builder-util "24.13.1" - builder-util-runtime "9.2.4" + app-builder-lib "25.0.0-alpha.6" + builder-util "25.0.0-alpha.6" + builder-util-runtime "9.2.5-alpha.2" fs-extra "^10.1.0" iconv-lite "^0.6.2" js-yaml "^4.1.0" @@ -1127,15 +1269,10 @@ dotenv@^9.0.2: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05" integrity sha512-I9OvvrHp4pIARv4+x9iuewrWycX6CcZtoAu1XrzPxc5UygMJXJZYmBsynku8IkrJwgypE5DGNjDPmPRhDCptUg== -eastasianwidth@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" - integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== - ejs@^3.1.8: - version "3.1.9" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" - integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== + version "3.1.10" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b" + integrity sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== dependencies: jake "^10.8.5" @@ -1149,16 +1286,16 @@ electron-builder-notarize@^1.5: js-yaml "^3.14.0" read-pkg-up "^7.0.0" -electron-builder@^24: - version "24.13.3" - resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-24.13.3.tgz#c506dfebd36d9a50a83ee8aa32d803d83dbe4616" - integrity sha512-yZSgVHft5dNVlo31qmJAe4BVKQfFdwpRw7sFp1iQglDRCDD6r22zfRJuZlhtB5gp9FHUxCMEoWGq10SkCnMAIg== +electron-builder@25.0.0-alpha.6: + version "25.0.0-alpha.6" + resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-25.0.0-alpha.6.tgz#a72f96f7029539ac28f92ce5c83f872ba3b6e7c1" + integrity sha512-qXzzdID2W9hhx3TXddwXv1C5HsqjF6bKnftUtywAB/gtDwu+neifPZvnXDNHI4ZamRrZpJJH59esfkqkc2KNSQ== dependencies: - app-builder-lib "24.13.3" - builder-util "24.13.1" - builder-util-runtime "9.2.4" + app-builder-lib "25.0.0-alpha.6" + builder-util "25.0.0-alpha.6" + builder-util-runtime "9.2.5-alpha.2" chalk "^4.1.2" - dmg-builder "24.13.3" + dmg-builder "25.0.0-alpha.6" fs-extra "^10.1.0" is-ci "^3.0.0" lazy-val "^1.0.5" @@ -1179,14 +1316,14 @@ electron-notarize@^1.1.1: debug "^4.1.1" fs-extra "^9.0.1" -electron-publish@24.13.1: - version "24.13.1" - resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-24.13.1.tgz#57289b2f7af18737dc2ad134668cdd4a1b574a0c" - integrity sha512-2ZgdEqJ8e9D17Hwp5LEq5mLQPjqU3lv/IALvgp+4W8VeNhryfGhYEQC/PgDPMrnWUp+l60Ou5SJLsu+k4mhQ8A== +electron-publish@25.0.0-alpha.6: + version "25.0.0-alpha.6" + resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-25.0.0-alpha.6.tgz#8af3cb6e2435c00b8c71de43c330483808df5924" + integrity sha512-Hin+6j+jiXBc5g6Wlv9JB5Xu7MADBHxZAndt4WE7luCw7b3+OJdQeDvD/uYiCLpiG8cc2NLxu4MyBSVu86MrJA== dependencies: "@types/fs-extra" "^9.0.11" - builder-util "24.13.1" - builder-util-runtime "9.2.4" + builder-util "25.0.0-alpha.6" + builder-util-runtime "9.2.5-alpha.2" chalk "^4.1.2" fs-extra "^10.1.0" lazy-val "^1.0.5" @@ -1214,10 +1351,10 @@ electron-updater@^6.1: semver "^7.3.8" tiny-typed-emitter "^2.1.0" -electron@^29: - version "29.3.0" - resolved "https://registry.yarnpkg.com/electron/-/electron-29.3.0.tgz#8e65cb08e9c0952c66d3196e1b5c811c43b8c5b0" - integrity sha512-ZxFKm0/v48GSoBuO3DdnMlCYXefEUKUHLMsKxyXY4nZGgzbBKpF/X8haZa2paNj23CLfsCKBOtfc2vsEQiOOsA== +electron@^30: + version "30.0.2" + resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733" + integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ== dependencies: "@electron/get" "^2.0.0" "@types/node" "^20.9.0" @@ -1228,10 +1365,12 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== +encoding@^0.1.13: + version "0.1.13" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" + integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== + dependencies: + iconv-lite "^0.6.2" end-of-stream@^1.1.0: version "1.4.4" @@ -1384,6 +1523,11 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== +exponential-backoff@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/exponential-backoff/-/exponential-backoff-3.1.1.tgz#64ac7526fe341ab18a39016cd22c787d01e00bf6" + integrity sha512-dX7e/LHVJ6W3DE1MHWi9S1EYzDESENfLrYohG2G++ovZrYOkm4Knwa0mc1cn84xJOR4KEU0WSchhLbd0UklbHw== + extract-zip@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" @@ -1495,17 +1639,18 @@ find-up@^5.0.0: path-exists "^4.0.0" flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== dependencies: - flatted "^3.1.0" + flatted "^3.2.9" + keyv "^4.5.3" rimraf "^3.0.2" -flatted@^3.1.0: - version "3.2.6" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2" - integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ== +flatted@^3.2.9: + version "3.3.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a" + integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw== foreground-child@^3.1.0: version "3.1.1" @@ -1533,6 +1678,15 @@ fs-extra@^10.0.0, fs-extra@^10.1.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@^11.1.1: + version "11.2.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b" + integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0" @@ -1552,7 +1706,7 @@ fs-extra@^9.0.0, fs-extra@^9.0.1: jsonfile "^6.0.1" universalify "^2.0.0" -fs-minipass@^2.0.0: +fs-minipass@^2.0.0, fs-minipass@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== @@ -1574,6 +1728,20 @@ function-bind@^1.1.2: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== +gauge@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce" + integrity sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg== + dependencies: + aproba "^1.0.3 || ^2.0.0" + color-support "^1.1.3" + console-control-strings "^1.1.0" + has-unicode "^2.0.1" + signal-exit "^3.0.7" + string-width "^4.2.3" + strip-ansi "^6.0.1" + wide-align "^1.1.5" + get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" @@ -1621,18 +1789,18 @@ glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" -glob@^10.3.10: - version "10.3.10" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" - integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== +glob@^10.3.10, glob@^10.3.7: + version "10.3.12" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.12.tgz#3a65c363c2e9998d220338e88a5f6ac97302960b" + integrity sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg== dependencies: foreground-child "^3.1.0" - jackspeak "^2.3.5" + jackspeak "^2.3.6" minimatch "^9.0.1" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - path-scurry "^1.10.1" + minipass "^7.0.4" + path-scurry "^1.10.2" -glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: +glob@^7.0.0, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -1644,6 +1812,17 @@ glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" +glob@^8.0.1: + version "8.1.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.1.0.tgz#d388f656593ef708ee3e34640fdfb99a9fd1c33e" + integrity sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + global-agent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/global-agent/-/global-agent-3.0.0.tgz#ae7cd31bd3583b93c5a16437a1afe27cc33a1ab6" @@ -1664,11 +1843,12 @@ globals@^13.19.0: type-fest "^0.20.2" globalthis@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== dependencies: - define-properties "^1.1.3" + define-properties "^1.2.1" + gopd "^1.0.1" globby@^11.1.0: version "11.1.0" @@ -1700,7 +1880,7 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -got@^11.8.5: +got@^11.7.0, got@^11.8.5: version "11.8.6" resolved "https://registry.yarnpkg.com/got/-/got-11.8.6.tgz#276e827ead8772eddbcfc97170590b841823233a" integrity sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g== @@ -1717,7 +1897,7 @@ got@^11.8.5: p-cancelable "^2.0.0" responselike "^2.0.0" -graceful-fs@^4.1.6, graceful-fs@^4.2.0: +graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -1754,6 +1934,11 @@ has-symbols@^1.0.3: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== +has-unicode@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" + integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ== + hasown@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" @@ -1778,7 +1963,7 @@ html-entities@^2.5: resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.5.2.tgz#201a3cf95d3a15be7099521620d19dfb4f65359f" integrity sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA== -http-cache-semantics@^4.0.0: +http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== @@ -1815,6 +2000,13 @@ https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1: agent-base "6" debug "4" +humanize-ms@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" + integrity sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ== + dependencies: + ms "^2.0.0" + iconv-corefoundation@^1.1.7: version "1.1.7" resolved "https://registry.yarnpkg.com/iconv-corefoundation/-/iconv-corefoundation-1.1.7.tgz#31065e6ab2c9272154c8b0821151e2c88f1b002a" @@ -1853,6 +2045,16 @@ imurmurhash@^0.1.4: resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +infer-owner@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" + integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== + inflight@^1.0.4: version "1.0.6" resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" @@ -1861,7 +2063,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@^2.0.3: +inherits@2, inherits@^2.0.3, inherits@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -1871,6 +2073,14 @@ interpret@^1.0.0: resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== +ip-address@^9.0.5: + version "9.0.5" + resolved "https://registry.yarnpkg.com/ip-address/-/ip-address-9.0.5.tgz#117a960819b08780c3bd1f14ef3c1cc1d3f3ea5a" + integrity sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g== + dependencies: + jsbn "1.1.0" + sprintf-js "^1.1.3" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -1914,6 +2124,16 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + +is-lambda@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" + integrity sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ== + is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" @@ -1934,6 +2154,11 @@ is-plain-obj@^4.1.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + isbinaryfile@^4.0.8: version "4.0.10" resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-4.0.10.tgz#0c5b5e30c2557a2f06febd37b7322946aaee42b3" @@ -1949,12 +2174,12 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== -jackspeak@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" - integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== +jackspeak@2.1.1, jackspeak@^2.3.6: + version "2.1.1" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.1.1.tgz#2a42db4cfbb7e55433c28b6f75d8b796af9669cd" + integrity sha512-juf9stUEwUaILepraGOWIJTLwg48bUnBmRqd2ln2Os1sW987zeoj/hzhbvRB95oMuS2ZTpjULmdwHNX4rzZIZw== dependencies: - "@isaacs/cliui" "^8.0.2" + cliui "^8.0.1" optionalDependencies: "@pkgjs/parseargs" "^0.11.0" @@ -1993,6 +2218,11 @@ js-yaml@^4.1.0: dependencies: argparse "^2.0.1" +jsbn@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-1.1.0.tgz#b01307cb29b618a1ed26ec79e911f803c4da0040" + integrity sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A== + json-buffer@3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" @@ -2049,7 +2279,7 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" -keyv@^4.0.0: +keyv@^4.0.0, keyv@^4.5.3: version "4.5.4" resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== @@ -2116,11 +2346,24 @@ lodash@^4.17.15, lodash@^4.17.21: resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== +log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + lowercase-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lru-cache@^10.2.0: + version "10.2.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.2.tgz#48206bc114c1252940c41b25b41af5b545aca878" + integrity sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ== + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -2128,10 +2371,32 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" -"lru-cache@^9.1.1 || ^10.0.0": - version "10.2.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3" - integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q== +lru-cache@^7.7.1: + version "7.18.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.18.3.tgz#f793896e0fd0e954a59dfdd82f0773808df6aa89" + integrity sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== + +make-fetch-happen@^10.0.3: + version "10.2.1" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-10.2.1.tgz#f5e3835c5e9817b617f2770870d9492d28678164" + integrity sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w== + dependencies: + agentkeepalive "^4.2.1" + cacache "^16.1.0" + http-cache-semantics "^4.1.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + is-lambda "^1.0.1" + lru-cache "^7.7.1" + minipass "^3.1.6" + minipass-collect "^1.0.2" + minipass-fetch "^2.0.3" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + negotiator "^0.6.3" + promise-retry "^2.0.1" + socks-proxy-agent "^7.0.0" + ssri "^9.0.0" matcher@^3.0.0: version "3.0.0" @@ -2204,14 +2469,7 @@ minimatch@^5.0.1, minimatch@^5.1.1: dependencies: brace-expansion "^2.0.1" -minimatch@^9.0.1: - version "9.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" - integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^9.0.4: +minimatch@^9.0.1, minimatch@^9.0.3, minimatch@^9.0.4: version "9.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== @@ -2223,7 +2481,46 @@ minimist@^1.2.3, minimist@^1.2.6: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -minipass@^3.0.0: +minipass-collect@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" + integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== + dependencies: + minipass "^3.0.0" + +minipass-fetch@^2.0.3: + version "2.1.2" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-2.1.2.tgz#95560b50c472d81a3bc76f20ede80eaed76d8add" + integrity sha512-LT49Zi2/WMROHYoqGgdlQIZh8mLPZmOrN2NdJjMXxYe4nkN6FUyuPuOAOedNJDrx0IRGg9+4guZewtp8hE6TxA== + dependencies: + minipass "^3.1.6" + minipass-sized "^1.0.3" + minizlib "^2.1.2" + optionalDependencies: + encoding "^0.1.13" + +minipass-flush@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" + integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== + dependencies: + minipass "^3.0.0" + +minipass-pipeline@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" + integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== + dependencies: + minipass "^3.0.0" + +minipass-sized@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/minipass-sized/-/minipass-sized-1.0.3.tgz#70ee5a7c5052070afacfbc22977ea79def353b70" + integrity sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g== + dependencies: + minipass "^3.0.0" + +minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6: version "3.3.6" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== @@ -2235,12 +2532,12 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4: version "7.0.4" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== -minizlib@^2.1.1: +minizlib@^2.1.1, minizlib@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== @@ -2248,6 +2545,14 @@ minizlib@^2.1.1: minipass "^3.0.0" yallist "^4.0.0" +minizlib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.1.tgz#46d5329d1eb3c83924eff1d3b858ca0a31581012" + integrity sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg== + dependencies: + minipass "^7.0.4" + rimraf "^5.0.5" + mkdirp@^0.5.1: version "0.5.6" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" @@ -2255,36 +2560,89 @@ mkdirp@^0.5.1: dependencies: minimist "^1.2.6" -mkdirp@^1.0.3: +mkdirp@^1.0.3, mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== +ms@^2.0.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + natural-compare@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== +negotiator@^0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + next-electron-server@^1: version "1.0.0" resolved "https://registry.yarnpkg.com/next-electron-server/-/next-electron-server-1.0.0.tgz#03e133ed64a5ef671b6c6409f908c4901b1828cb" integrity sha512-fTUaHwT0Jry2fbdUSIkAiIqgDAInI5BJFF4/j90/okvZCYlyx6yxpXB30KpzmOG6TN/ESwyvsFJVvS2WHT8PAA== +node-abi@^3.45.0: + version "3.62.0" + resolved "https://registry.yarnpkg.com/node-abi/-/node-abi-3.62.0.tgz#017958ed120f89a3a14a7253da810f5d724e3f36" + integrity sha512-CPMcGa+y33xuL1E0TcNIu4YyaZCxnnvkVaEXrsosR3FxN+fV8xvb7Mzpb7IgKler10qeMkE6+Dp8qJhpzdq35g== + dependencies: + semver "^7.3.5" + node-addon-api@^1.6.3: version "1.7.2" resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-1.7.2.tgz#3df30b95720b53c24e59948b49532b662444f54d" integrity sha512-ibPK3iA+vaY1eEjESkQkM0BbCqFOaZMiXRTtdB0u7b4djtY6JnsjvPdUHVMg6xQt3B8fpTTWHI9A+ADjM9frzg== +node-api-version@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/node-api-version/-/node-api-version-0.2.0.tgz#5177441da2b1046a4d4547ab9e0972eed7b1ac1d" + integrity sha512-fthTTsi8CxaBXMaBAD7ST2uylwvsnYxh2PfaScwpMhos6KlSFajXQPcM4ogNE1q2s3Lbz9GCGqeIHC+C6OZnKg== + dependencies: + semver "^7.3.5" + +node-gyp@^9.0.0: + version "9.4.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-9.4.1.tgz#8a1023e0d6766ecb52764cc3a734b36ff275e185" + integrity sha512-OQkWKbjQKbGkMf/xqI1jjy3oCTgMKJac58G2+bjZb3fza6gW2YrCSdMQYaoTb70crvE//Gngr4f0AgVHmqHvBQ== + dependencies: + env-paths "^2.2.0" + exponential-backoff "^3.1.1" + glob "^7.1.4" + graceful-fs "^4.2.6" + make-fetch-happen "^10.0.3" + nopt "^6.0.0" + npmlog "^6.0.0" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.2" + which "^2.0.2" + node-stream-zip@^1.15: version "1.15.0" resolved "https://registry.yarnpkg.com/node-stream-zip/-/node-stream-zip-1.15.0.tgz#158adb88ed8004c6c49a396b50a6a5de3bca33ea" integrity sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw== +nopt@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/nopt/-/nopt-6.0.0.tgz#245801d8ebf409c6df22ab9d95b65e1309cdb16d" + integrity sha512-ZwLpbTgdhuZUnZzjd7nb1ZV+4DoiC6/sfiVKok72ym/4Tlf+DFdlHYmT2JPmcNNWV6Pi3SDf1kT+A4r9RTuT9g== + dependencies: + abbrev "^1.0.0" + normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" @@ -2305,6 +2663,16 @@ normalize-url@^6.0.1: resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== +npmlog@^6.0.0: + version "6.0.2" + resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" + integrity sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg== + dependencies: + are-we-there-yet "^3.0.0" + console-control-strings "^1.1.0" + gauge "^4.0.3" + set-blocking "^2.0.0" + object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" @@ -2317,36 +2685,52 @@ once@^1.3.0, once@^1.3.1, once@^1.4.0: dependencies: wrappy "1" -onetime@^5.1.2: +onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" -onnxruntime-common@1.17.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.17.0.tgz#b2534ce021b1c1b19182bec39aaea8d547d2013e" - integrity sha512-Vq1remJbCPITjDMJ04DA7AklUTnbYUp4vbnm6iL7ukSt+7VErH0NGYfekRSTjxxurEtX7w41PFfnQlE6msjPJw== +onnxruntime-common@1.17.3: + version "1.17.3" + resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.17.3.tgz#aadc456477873a540ee3d611ae9cd4f3de7c43e5" + integrity sha512-IkbaDelNVX8cBfHFgsNADRIq2TlXMFWW+nG55mwWvQT4i0NZb32Jf35Pf6h9yjrnK78RjcnlNYaI37w394ovMw== onnxruntime-node@^1.17: - version "1.17.0" - resolved "https://registry.yarnpkg.com/onnxruntime-node/-/onnxruntime-node-1.17.0.tgz#38af0ba527cb44c1afb639bdcb4e549edba029a1" - integrity sha512-pRxdqSP3a6wtiFVkVX1V3/gsEMwBRUA9D2oYmcN3cjF+j+ILS+SIY2L7KxdWapsG6z64i5rUn8ijFZdIvbojBg== + version "1.17.3" + resolved "https://registry.yarnpkg.com/onnxruntime-node/-/onnxruntime-node-1.17.3.tgz#53b8b7ef68bf3834bba9d7be592e4c2d718d2018" + integrity sha512-NtbN1pfApTSEjVq46LrJ396aPP2Gjhy+oYZi5Bu1leDXAEvVap/BQ8CZELiLs7z0UnXy3xjJW23HiB4P3//FIw== dependencies: - onnxruntime-common "1.17.0" + onnxruntime-common "1.17.3" + tar "^7.0.1" optionator@^0.9.3: - version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" - integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== + version "0.9.4" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" + integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== dependencies: - "@aashutoshrathi/word-wrap" "^1.2.3" deep-is "^0.1.3" fast-levenshtein "^2.0.6" levn "^0.4.1" prelude-ls "^1.2.1" type-check "^0.4.0" + word-wrap "^1.2.5" + +ora@^5.1.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" p-cancelable@^2.0.0: version "2.1.1" @@ -2360,7 +2744,7 @@ p-limit@^2.0.0, p-limit@^2.2.0: dependencies: p-try "^2.0.0" -p-limit@^3.0.2: +p-limit@^3.0.2, "p-limit@^3.1.0 ": version "3.1.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== @@ -2388,6 +2772,13 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -2440,12 +2831,12 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-scurry@^1.10.1: - version "1.10.1" - resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" - integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== +path-scurry@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.2.tgz#8f6357eb1239d5fa1da8b9f70e9c080675458ba7" + integrity sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA== dependencies: - lru-cache "^9.1.1 || ^10.0.0" + lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-type@^4.0.0: @@ -2475,7 +2866,7 @@ pkg-up@^3.1.0: dependencies: find-up "^3.0.0" -plist@^3.0.4, plist@^3.0.5: +plist@^3.0.4, plist@^3.0.5, plist@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/plist/-/plist-3.1.0.tgz#797a516a93e62f5bde55e0b9cc9c967f860893c9" integrity sha512-uysumyrvkUX0rX/dEVqt8gC3sTBzd4zoWfLeS29nb53imdaXVvLINYXTI2GNqzaMuvacNx4uJQ8+b3zXR0pkgQ== @@ -2512,6 +2903,11 @@ progress@^2.0.3: resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== +promise-inflight@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" + integrity sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g== + promise-retry@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-2.0.1.tgz#ff747a13620ab57ba688f5fc67855410c370da22" @@ -2543,6 +2939,13 @@ quick-lru@^5.1.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== +read-binary-file-arch@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/read-binary-file-arch/-/read-binary-file-arch-1.0.6.tgz#959c4637daa932280a9b911b1a6766a7e44288fc" + integrity sha512-BNg9EN3DD3GsDXX7Aa8O4p92sryjkmzYYgmgTAc6CA4uGLEDzFfxOxugu21akOxpcXHiEgsYkC6nPsQvLLLmEg== + dependencies: + debug "^4.3.4" + read-config-file@6.3.2: version "6.3.2" resolved "https://registry.yarnpkg.com/read-config-file/-/read-config-file-6.3.2.tgz#556891aa6ffabced916ed57457cb192e61880411" @@ -2574,7 +2977,7 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -readable-stream@^3.0.2: +readable-stream@^3.0.2, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== @@ -2638,6 +3041,14 @@ responselike@^2.0.0: dependencies: lowercase-keys "^2.0.0" +restore-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" @@ -2655,6 +3066,13 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" +rimraf@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.5.tgz#9be65d2d6e683447d2e9013da2bf451139a61ccf" + integrity sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A== + dependencies: + glob "^10.3.7" + roarr@^2.15.3: version "2.15.4" resolved "https://registry.yarnpkg.com/roarr/-/roarr-2.15.4.tgz#f5fe795b7b838ccfe35dc608e0282b9eba2e7afd" @@ -2732,6 +3150,11 @@ serialize-error@^7.0.1: dependencies: type-fest "^0.13.1" +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== + shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -2766,6 +3189,11 @@ shx@^0.3: minimist "^1.2.3" shelljs "^0.8.5" +signal-exit@^3.0.2, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + signal-exit@^4.0.1: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" @@ -2797,11 +3225,28 @@ slice-ansi@^3.0.0: astral-regex "^2.0.0" is-fullwidth-code-point "^3.0.0" -smart-buffer@^4.0.2: +smart-buffer@^4.0.2, smart-buffer@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== +socks-proxy-agent@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz#dc069ecf34436621acb41e3efa66ca1b5fed15b6" + integrity sha512-Fgl0YPZ902wEsAyiQ+idGd1A7rSFx/ayC1CQVMw5P+EQx2V0SgpGtf6OKFhVjPflPUl9YMmEOnmfjCdMUsygww== + dependencies: + agent-base "^6.0.2" + debug "^4.3.3" + socks "^2.6.2" + +socks@^2.6.2: + version "2.8.3" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.8.3.tgz#1ebd0f09c52ba95a09750afe3f3f9f724a800cb5" + integrity sha512-l5x7VUUWbjVFbafGLxPWkYsHIhEvmF85tbIeFZWc8ZPtoMyybuEhL7Jye/ooC4/d48FgOjSJXgsF/AJPYCW8Zw== + dependencies: + ip-address "^9.0.5" + smart-buffer "^4.2.0" + sort-object-keys@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" @@ -2865,7 +3310,7 @@ spdx-license-ids@^3.0.0: resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c" integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg== -sprintf-js@^1.1.2: +sprintf-js@^1.1.2, sprintf-js@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== @@ -2875,12 +3320,19 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +ssri@^9.0.0: + version "9.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057" + integrity sha512-o57Wcn66jMQvfHG1FlYbWeZWW/dHZhJXjpIcTfXldXEk5nz5lStPo3mK0OJQfGR3RbZUlbISexbljkJzuEj/8Q== + dependencies: + minipass "^3.1.1" + stat-mode@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/stat-mode/-/stat-mode-1.0.0.tgz#68b55cb61ea639ff57136f36b216a291800d1465" integrity sha512-jH9EhtKIjuXZ2cWxmXS8ZP80XyC3iasQxMDV8jzhNJpfDb7VbQLVW4Wvsxz9QZvzV+G4YoSfBUVKDOyxLzi/sg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -2889,15 +3341,6 @@ stat-mode@^1.0.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^5.0.1, string-width@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" - integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== - dependencies: - eastasianwidth "^0.2.0" - emoji-regex "^9.2.2" - strip-ansi "^7.0.1" - string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" @@ -2905,20 +3348,13 @@ string_decoder@^1.1.1: dependencies: safe-buffer "~5.2.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" @@ -2965,7 +3401,7 @@ synckit@0.9.0: "@pkgr/core" "^0.1.0" tslib "^2.6.2" -tar@^6.1.12: +tar@^6.0.5, tar@^6.1.11, tar@^6.1.12, tar@^6.1.2: version "6.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== @@ -2977,6 +3413,18 @@ tar@^6.1.12: mkdirp "^1.0.3" yallist "^4.0.0" +tar@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.0.1.tgz#8f6ccebcd91b69e9767a6fc4892799e8b0e606d5" + integrity sha512-IjMhdQMZFpKsHEQT3woZVxBtCQY+0wk3CVxdRkGXEgyGa0dNS/ehPvOMr2nmfC7x5Zj2N+l6yZUpmICjLGS35w== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^5.0.0" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + temp-file@^3.4.0: version "3.4.0" resolved "https://registry.yarnpkg.com/temp-file/-/temp-file-3.4.0.tgz#766ea28911c683996c248ef1a20eea04d51652c7" @@ -3031,12 +3479,7 @@ ts-api-utils@^1.3.0: resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== -tslib@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tslib@^2.6.2: +tslib@^2.1.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== @@ -3079,15 +3522,29 @@ typedarray@^0.0.6: integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== typescript@^5, typescript@^5.3.3: - version "5.4.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.3.tgz#5c6fedd4c87bee01cd7a528a30145521f8e0feff" - integrity sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg== + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== undici-types@~5.26.4: version "5.26.5" resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +unique-filename@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-2.0.1.tgz#e785f8675a9a7589e0ac77e0b5c34d2eaeac6da2" + integrity sha512-ODWHtkkdx3IAR+veKxFV+VBkUMcN+FaqzUUd7IZzt+0zhDZFPFxhlqwPF3YQvMHx1TD0tdgYl+kuPnJ8E6ql7A== + dependencies: + unique-slug "^3.0.0" + +unique-slug@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-3.0.0.tgz#6d347cf57c8a7a7a6044aabd0e2d74e4d76dc7c9" + integrity sha512-8EyMynh679x/0gqE9fT9oilG+qEt+ibFyqjuVTsZn1+CMxH+XLlpvr2UZx4nVcCwTpx81nICr2JQFkM+HPLq4w== + dependencies: + imurmurhash "^0.1.4" + universalify@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" @@ -3103,7 +3560,7 @@ untildify@^3.0.2: resolved "https://registry.yarnpkg.com/untildify/-/untildify-3.0.3.tgz#1e7b42b140bcfd922b22e70ca1265bfe3634c7c9" integrity sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA== -uri-js@^4.2.2: +uri-js@^4.2.2, uri-js@^4.4.1: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== @@ -3137,19 +3594,38 @@ verror@^1.10.0: core-util-is "1.0.2" extsprintf "^1.2.0" -which@^2.0.1: +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + +which@^2.0.1, which@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" +wide-align@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" + integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== + dependencies: + string-width "^1.0.2 || 2 || 3 || 4" + winreg@1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/winreg/-/winreg-1.2.4.tgz#ba065629b7a925130e15779108cf540990e98d1b" integrity sha512-IHpzORub7kYlb8A43Iig3reOvlcBJGX9gZ0WycHhghHtA65X0LYnMRuJs+aH1abVnMJztQkvQNlltnbPi5aGIA== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +word-wrap@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" + integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== + +wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -3158,15 +3634,6 @@ winreg@1.2.4: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" - integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== - dependencies: - ansi-styles "^6.1.0" - string-width "^5.0.1" - strip-ansi "^7.0.1" - wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" @@ -3187,12 +3654,17 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== + yargs-parser@^21.1.1: version "21.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== -yargs@^17.6.2, yargs@^17.7.2: +yargs@^17.0.1, yargs@^17.6.2, yargs@^17.7.2: version "17.7.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== diff --git a/docs/docs/auth/migration-guides/authy/index.md b/docs/docs/auth/migration-guides/authy/index.md index 48ce3965d9..1a92285472 100644 --- a/docs/docs/auth/migration-guides/authy/index.md +++ b/docs/docs/auth/migration-guides/authy/index.md @@ -18,7 +18,7 @@ A guide written by Green, an ente.io lover Migrating from Authy can be tiring, as you cannot export your 2FA codes through the app, meaning that you would have to reconfigure 2FA for all of your accounts for your new 2FA authenticator. However, easier ways exist to export your codes -out of Authy. This guide will cover two of the most used methods for mograting +out of Authy. This guide will cover two of the most used methods for migrating from Authy to Ente Authenticator. > [!CAUTION] diff --git a/docs/docs/photos/faq/general.md b/docs/docs/photos/faq/general.md index c20bebbc41..b95b7c1d9d 100644 --- a/docs/docs/photos/faq/general.md +++ b/docs/docs/photos/faq/general.md @@ -110,10 +110,10 @@ or "dog playing at the beach". Check the sections within the upload progress bar for "Failed Uploads," "Ignored Uploads," and "Unsuccessful Uploads." -## How do i keep NAS and Ente photos synced? +## How do I keep NAS and Ente photos synced? Please try using our CLI to pull data into your NAS -https://github.com/ente-io/ente/tree/main/cli#readme . +https://github.com/ente-io/ente/tree/main/cli#readme. ## Is there a way to view all albums on the map view? diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md index bf695af308..8e16004a12 100644 --- a/docs/docs/self-hosting/guides/custom-server/index.md +++ b/docs/docs/self-hosting/guides/custom-server/index.md @@ -25,10 +25,26 @@ configure the endpoint the app should be connecting to. > You can download the CLI from > [here](https://github.com/ente-io/ente/releases?q=tag%3Acli-v0) -Define a config.yaml and put it either in the same directory as CLI or path -defined in env variable `ENTE_CLI_CONFIG_PATH` +Define a config.yaml and put it either in the same directory as where you run +the CLI from ("current working directory"), or in the path defined in env +variable `ENTE_CLI_CONFIG_PATH`: ```yaml endpoint: api: "http://localhost:8080" ``` + +(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) + +## Web appps and Photos desktop app + +You will need to build the app from source and use the +`NEXT_PUBLIC_ENTE_ENDPOINT` environment variable to tell it which server to +connect to. For example: + +```sh +NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos +``` + +For more details, see [hosting the web +app](https://help.ente.io/self-hosting/guides/web-app). diff --git a/mobile/README.md b/mobile/README.md index 662e714033..fc17f6b26e 100644 --- a/mobile/README.md +++ b/mobile/README.md @@ -46,7 +46,7 @@ You can alternatively install the build from PlayStore or F-Droid. ## 🧑‍💻 Building from source -1. [Install Flutter v3.19.5](https://flutter.dev/docs/get-started/install). +1. [Install Flutter v3.19.3](https://flutter.dev/docs/get-started/install). 2. Pull in all submodules with `git submodule update --init --recursive` diff --git a/mobile/fastlane/metadata/ios/ru/name.txt b/mobile/fastlane/metadata/ios/ru/name.txt index 44e95b9fc5..45bf4920f4 100644 --- a/mobile/fastlane/metadata/ios/ru/name.txt +++ b/mobile/fastlane/metadata/ios/ru/name.txt @@ -1 +1 @@ -ente фотографии +ente Фото diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock index 102c04e6ae..7315149574 100644 --- a/mobile/ios/Podfile.lock +++ b/mobile/ios/Podfile.lock @@ -10,19 +10,19 @@ PODS: - Flutter - file_saver (0.0.1): - Flutter - - Firebase/CoreOnly (10.22.0): - - FirebaseCore (= 10.22.0) - - Firebase/Messaging (10.22.0): + - Firebase/CoreOnly (10.24.0): + - FirebaseCore (= 10.24.0) + - Firebase/Messaging (10.24.0): - Firebase/CoreOnly - - FirebaseMessaging (~> 10.22.0) - - firebase_core (2.29.0): - - Firebase/CoreOnly (= 10.22.0) + - FirebaseMessaging (~> 10.24.0) + - firebase_core (2.30.0): + - Firebase/CoreOnly (= 10.24.0) - Flutter - - firebase_messaging (14.7.19): - - Firebase/Messaging (= 10.22.0) + - firebase_messaging (14.8.1): + - Firebase/Messaging (= 10.24.0) - firebase_core - Flutter - - FirebaseCore (10.22.0): + - FirebaseCore (10.24.0): - FirebaseCoreInternal (~> 10.0) - GoogleUtilities/Environment (~> 7.12) - GoogleUtilities/Logger (~> 7.12) @@ -33,7 +33,7 @@ PODS: - GoogleUtilities/Environment (~> 7.8) - GoogleUtilities/UserDefaults (~> 7.8) - PromisesObjC (~> 2.1) - - FirebaseMessaging (10.22.0): + - FirebaseMessaging (10.24.0): - FirebaseCore (~> 10.0) - FirebaseInstallations (~> 10.0) - GoogleDataTransport (~> 9.3) @@ -108,8 +108,6 @@ PODS: - FlutterMacOS - integration_test (0.0.1): - Flutter - - isar_flutter_libs (1.0.0): - - Flutter - libwebp (1.3.2): - libwebp/demux (= 1.3.2) - libwebp/mux (= 1.3.2) @@ -175,7 +173,7 @@ PODS: - SDWebImage (5.19.1): - SDWebImage/Core (= 5.19.1) - SDWebImage/Core (5.19.1) - - SDWebImageWebPCoder (0.14.5): + - SDWebImageWebPCoder (0.14.6): - libwebp (~> 1.0) - SDWebImage/Core (~> 5.17) - Sentry/HybridSDK (8.21.0): @@ -193,14 +191,14 @@ PODS: - sqflite (0.0.3): - Flutter - FlutterMacOS - - sqlite3 (3.45.1): - - sqlite3/common (= 3.45.1) - - sqlite3/common (3.45.1) - - sqlite3/fts5 (3.45.1): + - "sqlite3 (3.45.3+1)": + - "sqlite3/common (= 3.45.3+1)" + - "sqlite3/common (3.45.3+1)" + - "sqlite3/fts5 (3.45.3+1)": - sqlite3/common - - sqlite3/perf-threadsafe (3.45.1): + - "sqlite3/perf-threadsafe (3.45.3+1)": - sqlite3/common - - sqlite3/rtree (3.45.1): + - "sqlite3/rtree (3.45.3+1)": - sqlite3/common - sqlite3_flutter_libs (0.0.1): - Flutter @@ -246,7 +244,6 @@ DEPENDENCIES: - image_editor_common (from `.symlinks/plugins/image_editor_common/ios`) - in_app_purchase_storekit (from `.symlinks/plugins/in_app_purchase_storekit/darwin`) - integration_test (from `.symlinks/plugins/integration_test/ios`) - - isar_flutter_libs (from `.symlinks/plugins/isar_flutter_libs/ios`) - local_auth_darwin (from `.symlinks/plugins/local_auth_darwin/darwin`) - local_auth_ios (from `.symlinks/plugins/local_auth_ios/ios`) - media_extension (from `.symlinks/plugins/media_extension/ios`) @@ -341,8 +338,6 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/in_app_purchase_storekit/darwin" integration_test: :path: ".symlinks/plugins/integration_test/ios" - isar_flutter_libs: - :path: ".symlinks/plugins/isar_flutter_libs/ios" local_auth_darwin: :path: ".symlinks/plugins/local_auth_darwin/darwin" local_auth_ios: @@ -404,13 +399,13 @@ SPEC CHECKSUMS: connectivity_plus: ddd7f30999e1faaef5967c23d5b6d503d10434db device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6 file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808 - Firebase: 797fd7297b7e1be954432743a0b3f90038e45a71 - firebase_core: aaadbddb3cb2ee3792b9804f9dbb63e5f6f7b55c - firebase_messaging: e65050bf9b187511d80ea3a4de7cf5573d2c7543 - FirebaseCore: 0326ec9b05fbed8f8716cddbf0e36894a13837f7 + Firebase: 91fefd38712feb9186ea8996af6cbdef41473442 + firebase_core: 66b99b4fb4e5d7cc4e88d4c195fe986681f3466a + firebase_messaging: 0eb0425d28b4f4af147cdd4adcaf7c0100df28ed + FirebaseCore: 11dc8a16dfb7c5e3c3f45ba0e191a33ac4f50894 FirebaseCoreInternal: bcb5acffd4ea05e12a783ecf835f2210ce3dc6af FirebaseInstallations: 8f581fca6478a50705d2bd2abd66d306e0f5736e - FirebaseMessaging: 9f71037fd9db3376a4caa54e5a3949d1027b4b6e + FirebaseMessaging: 4d52717dd820707cc4eadec5eb981b4832ec8d5d fk_user_agent: 1f47ec39291e8372b1d692b50084b0d54103c545 Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 flutter_email_sender: 02d7443217d8c41483223627972bfdc09f74276b @@ -427,7 +422,6 @@ SPEC CHECKSUMS: image_editor_common: d6f6644ae4a6de80481e89fe6d0a8c49e30b4b43 in_app_purchase_storekit: 0e4b3c2e43ba1e1281f4f46dd71b0593ce529892 integration_test: 13825b8a9334a850581300559b8839134b124670 - isar_flutter_libs: b69f437aeab9c521821c3f376198c4371fa21073 libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009 local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98 local_auth_ios: 5046a18c018dd973247a0564496c8898dbb5adf9 @@ -452,14 +446,14 @@ SPEC CHECKSUMS: receive_sharing_intent: 6837b01768e567fe8562182397bf43d63d8c6437 screen_brightness_ios: 715ca807df953bf676d339f11464e438143ee625 SDWebImage: 40b0b4053e36c660a764958bff99eed16610acbb - SDWebImageWebPCoder: c94f09adbca681822edad9e532ac752db713eabf + SDWebImageWebPCoder: e38c0a70396191361d60c092933e22c20d5b1380 Sentry: ebc12276bd17613a114ab359074096b6b3725203 sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5 SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec - sqlite3: 73b7fc691fdc43277614250e04d183740cb15078 + sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80 Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e uni_links: d97da20c7701486ba192624d99bffaaffcfc298a diff --git a/mobile/ios/Runner.xcodeproj/project.pbxproj b/mobile/ios/Runner.xcodeproj/project.pbxproj index 89c4926296..c88f9da380 100644 --- a/mobile/ios/Runner.xcodeproj/project.pbxproj +++ b/mobile/ios/Runner.xcodeproj/project.pbxproj @@ -308,7 +308,6 @@ "${BUILT_PRODUCTS_DIR}/image_editor_common/image_editor_common.framework", "${BUILT_PRODUCTS_DIR}/in_app_purchase_storekit/in_app_purchase_storekit.framework", "${BUILT_PRODUCTS_DIR}/integration_test/integration_test.framework", - "${BUILT_PRODUCTS_DIR}/isar_flutter_libs/isar_flutter_libs.framework", "${BUILT_PRODUCTS_DIR}/libwebp/libwebp.framework", "${BUILT_PRODUCTS_DIR}/local_auth_darwin/local_auth_darwin.framework", "${BUILT_PRODUCTS_DIR}/local_auth_ios/local_auth_ios.framework", @@ -390,7 +389,6 @@ "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/image_editor_common.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/in_app_purchase_storekit.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/integration_test.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/isar_flutter_libs.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/libwebp.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/local_auth_darwin.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/local_auth_ios.framework", diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index 037996520e..9afb874e52 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -105,5 +105,14 @@ UIApplicationSupportsIndirectInputEvents + NSBonjourServices + + _googlecast._tcp + _F5BCEC64._googlecast._tcp + + + NSLocalNetworkUsageDescription + ${PRODUCT_NAME} uses the local network to discover Cast-enabled devices on your WiFi + network. diff --git a/mobile/lib/app.dart b/mobile/lib/app.dart index bc406d7c33..e7e299ed94 100644 --- a/mobile/lib/app.dart +++ b/mobile/lib/app.dart @@ -64,6 +64,9 @@ class _EnteAppState extends State with WidgetsBindingObserver { } void _checkForWidgetLaunch() { + if (Platform.isIOS) { + return; + } hw.HomeWidget.initiallyLaunchedFromHomeWidget().then( (uri) => HomeWidgetService.instance.onLaunchFromWidget(uri, context), ); diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart index 68becdf0be..ff784206c5 100644 --- a/mobile/lib/core/constants.dart +++ b/mobile/lib/core/constants.dart @@ -16,6 +16,7 @@ const int jan011981Time = 347155200000000; const int galleryLoadStartTime = -8000000000000000; // Wednesday, March 6, 1748 const int galleryLoadEndTime = 9223372036854775807; // 2^63 -1 const int batchSize = 1000; +const int batchSizeCopy = 100; const photoGridSizeDefault = 4; const photoGridSizeMin = 2; const photoGridSizeMax = 6; @@ -38,13 +39,6 @@ const dragSensitivity = 8; const supportEmail = 'support@ente.io'; -// Default values for various feature flags -class FFDefault { - static const bool enableStripe = true; - static const bool disableCFWorker = false; - static const bool enablePasskey = false; -} - // this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. const multipartPartSize = 20 * 1024 * 1024; const multipartPartSizeInternal = 8 * 1024 * 1024; diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart index a339d4d0d2..0eb1d3f6d7 100644 --- a/mobile/lib/db/embeddings_db.dart +++ b/mobile/lib/db/embeddings_db.dart @@ -1,79 +1,167 @@ import "dart:io"; +import "dart:typed_data"; -import "package:isar/isar.dart"; +import "package:path/path.dart"; import 'package:path_provider/path_provider.dart'; import "package:photos/core/event_bus.dart"; import "package:photos/events/embedding_updated_event.dart"; import "package:photos/models/embedding.dart"; +import "package:sqlite_async/sqlite_async.dart"; class EmbeddingsDB { - late final Isar _isar; - EmbeddingsDB._privateConstructor(); static final EmbeddingsDB instance = EmbeddingsDB._privateConstructor(); + static const databaseName = "ente.embeddings.db"; + static const tableName = "embeddings"; + static const columnFileID = "file_id"; + static const columnModel = "model"; + static const columnEmbedding = "embedding"; + static const columnUpdationTime = "updation_time"; + + static Future? _dbFuture; + + Future get _database async { + _dbFuture ??= _initDatabase(); + return _dbFuture!; + } + Future init() async { final dir = await getApplicationDocumentsDirectory(); - _isar = await Isar.open( - [EmbeddingSchema], - directory: dir.path, - ); - await _clearDeprecatedStore(dir); + await _clearDeprecatedStores(dir); + } + + Future _initDatabase() async { + final Directory documentsDirectory = + await getApplicationDocumentsDirectory(); + final String path = join(documentsDirectory.path, databaseName); + final migrations = SqliteMigrations() + ..add( + SqliteMigration( + 1, + (tx) async { + await tx.execute( + 'CREATE TABLE $tableName ($columnFileID INTEGER NOT NULL, $columnModel INTEGER NOT NULL, $columnEmbedding BLOB NOT NULL, $columnUpdationTime INTEGER, UNIQUE ($columnFileID, $columnModel))', + ); + }, + ), + ); + final database = SqliteDatabase(path: path); + await migrations.migrate(database); + return database; } Future clearTable() async { - await _isar.writeTxn(() => _isar.clear()); + final db = await _database; + await db.execute('DELETE * FROM $tableName'); } Future> getAll(Model model) async { - return _isar.embeddings.filter().modelEqualTo(model).findAll(); + final db = await _database; + final results = await db.getAll('SELECT * FROM $tableName'); + return _convertToEmbeddings(results); } - Future put(Embedding embedding) { - return _isar.writeTxn(() async { - await _isar.embeddings.putByIndex(Embedding.index, embedding); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + Future put(Embedding embedding) async { + final db = await _database; + await db.execute( + 'INSERT OR REPLACE INTO $tableName ($columnFileID, $columnModel, $columnEmbedding, $columnUpdationTime) VALUES (?, ?, ?, ?)', + _getRowFromEmbedding(embedding), + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } - Future putMany(List embeddings) { - return _isar.writeTxn(() async { - await _isar.embeddings.putAllByIndex(Embedding.index, embeddings); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + Future putMany(List embeddings) async { + final db = await _database; + final inputs = embeddings.map((e) => _getRowFromEmbedding(e)).toList(); + await db.executeBatch( + 'INSERT OR REPLACE INTO $tableName ($columnFileID, $columnModel, $columnEmbedding, $columnUpdationTime) values(?, ?, ?, ?)', + inputs, + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } Future> getUnsyncedEmbeddings() async { - return await _isar.embeddings.filter().updationTimeEqualTo(null).findAll(); + final db = await _database; + final results = await db.getAll( + 'SELECT * FROM $tableName WHERE $columnUpdationTime IS NULL', + ); + return _convertToEmbeddings(results); } Future deleteEmbeddings(List fileIDs) async { - await _isar.writeTxn(() async { - final embeddings = []; - for (final fileID in fileIDs) { - embeddings.addAll( - await _isar.embeddings.filter().fileIDEqualTo(fileID).findAll(), - ); - } - await _isar.embeddings.deleteAll(embeddings.map((e) => e.id).toList()); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + final db = await _database; + await db.execute( + 'DELETE FROM $tableName WHERE $columnFileID IN (${fileIDs.join(", ")})', + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } Future deleteAllForModel(Model model) async { - await _isar.writeTxn(() async { - final embeddings = - await _isar.embeddings.filter().modelEqualTo(model).findAll(); - await _isar.embeddings.deleteAll(embeddings.map((e) => e.id).toList()); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + final db = await _database; + await db.execute( + 'DELETE FROM $tableName WHERE $columnModel = ?', + [modelToInt(model)!], + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } - Future _clearDeprecatedStore(Directory dir) async { - final deprecatedStore = Directory(dir.path + "/object-box-store"); - if (await deprecatedStore.exists()) { - await deprecatedStore.delete(recursive: true); + List _convertToEmbeddings(List> results) { + final List embeddings = []; + for (final result in results) { + embeddings.add(_getEmbeddingFromRow(result)); + } + return embeddings; + } + + Embedding _getEmbeddingFromRow(Map row) { + final fileID = row[columnFileID]; + final model = intToModel(row[columnModel])!; + final bytes = row[columnEmbedding] as Uint8List; + final list = Float32List.view(bytes.buffer); + return Embedding(fileID: fileID, model: model, embedding: list); + } + + List _getRowFromEmbedding(Embedding embedding) { + return [ + embedding.fileID, + modelToInt(embedding.model)!, + Float32List.fromList(embedding.embedding).buffer.asUint8List(), + embedding.updationTime, + ]; + } + + Future _clearDeprecatedStores(Directory dir) async { + final deprecatedObjectBox = Directory(dir.path + "/object-box-store"); + if (await deprecatedObjectBox.exists()) { + await deprecatedObjectBox.delete(recursive: true); + } + final deprecatedIsar = File(dir.path + "/default.isar"); + if (await deprecatedIsar.exists()) { + await deprecatedIsar.delete(); + } + } + + int? modelToInt(Model model) { + switch (model) { + case Model.onnxClip: + return 1; + case Model.ggmlClip: + return 2; + default: + return null; + } + } + + Model? intToModel(int model) { + switch (model) { + case 1: + return Model.onnxClip; + case 2: + return Model.ggmlClip; + default: + return null; } } } diff --git a/mobile/lib/db/files_db.dart b/mobile/lib/db/files_db.dart index 202faaaad9..7022100b73 100644 --- a/mobile/lib/db/files_db.dart +++ b/mobile/lib/db/files_db.dart @@ -16,7 +16,6 @@ import "package:photos/services/filter/db_filters.dart"; import 'package:photos/utils/file_uploader_util.dart'; import 'package:sqflite/sqflite.dart'; import 'package:sqflite_migration/sqflite_migration.dart'; -import 'package:sqlite3/sqlite3.dart' as sqlite3; import 'package:sqlite_async/sqlite_async.dart' as sqlite_async; class FilesDB { @@ -103,20 +102,15 @@ class FilesDB { // only have a single app-wide reference to the database static Future? _dbFuture; - static Future? _ffiDBFuture; static Future? _sqliteAsyncDBFuture; + @Deprecated("Use sqliteAsyncDB instead (sqlite_async)") Future get database async { // lazily instantiate the db the first time it is accessed _dbFuture ??= _initDatabase(); return _dbFuture!; } - Future get ffiDB async { - _ffiDBFuture ??= _initFFIDatabase(); - return _ffiDBFuture!; - } - Future get sqliteAsyncDB async { _sqliteAsyncDBFuture ??= _initSqliteAsyncDatabase(); return _sqliteAsyncDBFuture!; @@ -131,14 +125,6 @@ class FilesDB { return await openDatabaseWithMigration(path, dbConfig); } - Future _initFFIDatabase() async { - final Directory documentsDirectory = - await getApplicationDocumentsDirectory(); - final String path = join(documentsDirectory.path, _databaseName); - _logger.info("DB path " + path); - return sqlite3.sqlite3.open(path); - } - Future _initSqliteAsyncDatabase() async { final Directory documentsDirectory = await getApplicationDocumentsDirectory(); @@ -469,6 +455,7 @@ class FilesDB { } Future insert(EnteFile file) async { + _logger.info("Inserting $file"); final db = await instance.database; return db.insert( filesTable, @@ -478,11 +465,10 @@ class FilesDB { } Future getFile(int generatedID) async { - final db = await instance.database; - final results = await db.query( - filesTable, - where: '$columnGeneratedID = ?', - whereArgs: [generatedID], + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + 'SELECT * FROM $filesTable WHERE $columnGeneratedID = ?', + [generatedID], ); if (results.isEmpty) { return null; @@ -491,11 +477,10 @@ class FilesDB { } Future getUploadedFile(int uploadedID, int collectionID) async { - final db = await instance.database; - final results = await db.query( - filesTable, - where: '$columnUploadedFileID = ? AND $columnCollectionID = ?', - whereArgs: [ + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + 'SELECT * FROM $filesTable WHERE $columnUploadedFileID = ? AND $columnCollectionID = ?', + [ uploadedID, collectionID, ], @@ -506,29 +491,12 @@ class FilesDB { return convertToFiles(results)[0]; } - Future getAnyUploadedFile(int uploadedID) async { - final db = await instance.database; - final results = await db.query( - filesTable, - where: '$columnUploadedFileID = ?', - whereArgs: [ - uploadedID, - ], - ); - if (results.isEmpty) { - return null; - } - return convertToFiles(results)[0]; - } - Future> getUploadedFileIDs(int collectionID) async { - final db = await instance.database; - final results = await db.query( - filesTable, - columns: [columnUploadedFileID], - where: - '$columnCollectionID = ? AND ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', - whereArgs: [ + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + 'SELECT $columnUploadedFileID FROM $filesTable' + ' WHERE $columnCollectionID = ? AND ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', + [ collectionID, ], ); @@ -539,13 +507,36 @@ class FilesDB { return ids; } - Future getBackedUpIDs() async { + Future<(Set, Map)> getUploadAndHash( + int collectionID, + ) async { final db = await instance.database; final results = await db.query( filesTable, - columns: [columnLocalID, columnUploadedFileID, columnFileSize], + columns: [columnUploadedFileID, columnHash], where: - '$columnLocalID IS NOT NULL AND ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', + '$columnCollectionID = ? AND ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', + whereArgs: [ + collectionID, + ], + ); + final ids = {}; + final hash = {}; + for (final result in results) { + ids.add(result[columnUploadedFileID] as int); + if (result[columnHash] != null) { + hash[result[columnHash] as String] = + result[columnUploadedFileID] as int; + } + } + return (ids, hash); + } + + Future getBackedUpIDs() async { + final db = await instance.sqliteAsyncDB; + final results = await db.getAll( + 'SELECT $columnLocalID, $columnUploadedFileID, $columnFileSize FROM $filesTable' + ' WHERE $columnLocalID IS NOT NULL AND ($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)', ); final Set localIDs = {}; final Set uploadedIDs = {}; @@ -681,13 +672,12 @@ class FilesDB { } Future> getAllFilesCollection(int collectionID) async { - final db = await instance.database; + final db = await instance.sqliteAsyncDB; const String whereClause = '$columnCollectionID = ?'; final List whereArgs = [collectionID]; - final results = await db.query( - filesTable, - where: whereClause, - whereArgs: whereArgs, + final results = await db.getAll( + 'SELECT * FROM $filesTable WHERE $whereClause', + whereArgs, ); final files = convertToFiles(results); return files; @@ -697,14 +687,13 @@ class FilesDB { int collectionID, int addedTime, ) async { - final db = await instance.database; + final db = await instance.sqliteAsyncDB; const String whereClause = '$columnCollectionID = ? AND $columnAddedTime > ?'; final List whereArgs = [collectionID, addedTime]; - final results = await db.query( - filesTable, - where: whereClause, - whereArgs: whereArgs, + final results = await db.getAll( + 'SELECT * FROM $filesTable WHERE $whereClause', + whereArgs, ); final files = convertToFiles(results); return files; @@ -726,20 +715,22 @@ class FilesDB { inParam += "'" + id.toString() + "',"; } inParam = inParam.substring(0, inParam.length - 1); - final db = await instance.database; + final db = await instance.sqliteAsyncDB; final order = (asc ?? false ? 'ASC' : 'DESC'); final String whereClause = '$columnCollectionID IN ($inParam) AND $columnCreationTime >= ? AND ' '$columnCreationTime <= ? AND $columnOwnerID = ?'; final List whereArgs = [startTime, endTime, userID]; - final results = await db.query( - filesTable, - where: whereClause, - whereArgs: whereArgs, - orderBy: - '$columnCreationTime ' + order + ', $columnModificationTime ' + order, - limit: limit, + String query = 'SELECT * FROM $filesTable WHERE $whereClause ORDER BY ' + '$columnCreationTime $order, $columnModificationTime $order'; + if (limit != null) { + query += ' LIMIT ?'; + whereArgs.add(limit); + } + final results = await db.getAll( + query, + whereArgs, ); final files = convertToFiles(results); final dedupeResult = @@ -757,7 +748,7 @@ class FilesDB { if (durations.isEmpty) { return []; } - final db = await instance.database; + final db = await instance.sqliteAsyncDB; String whereClause = "( "; for (int index = 0; index < durations.length; index++) { whereClause += "($columnCreationTime >= " + @@ -772,44 +763,10 @@ class FilesDB { } } whereClause += ")"; - final results = await db.query( - filesTable, - where: whereClause, - orderBy: '$columnCreationTime ' + order, - ); - final files = convertToFiles(results); - return applyDBFilters( - files, - DBFilterOptions(ignoredCollectionIDs: ignoredCollectionIDs), - ); - } - - Future> getFilesCreatedWithinDurationsSync( - List> durations, - Set ignoredCollectionIDs, { - int? visibility, - String order = 'ASC', - }) async { - if (durations.isEmpty) { - return []; - } - final db = await instance.ffiDB; - String whereClause = "( "; - for (int index = 0; index < durations.length; index++) { - whereClause += "($columnCreationTime >= " + - durations[index][0].toString() + - " AND $columnCreationTime < " + - durations[index][1].toString() + - ")"; - if (index != durations.length - 1) { - whereClause += " OR "; - } else if (visibility != null) { - whereClause += ' AND $columnMMdVisibility = $visibility'; - } - } - whereClause += ")"; - final results = db.select( - 'select * from $filesTable where $whereClause order by $columnCreationTime $order', + final query = + 'SELECT * FROM $filesTable WHERE $whereClause ORDER BY $columnCreationTime $order'; + final results = await db.getAll( + query, ); final files = convertToFiles(results); return applyDBFilters( @@ -1041,6 +998,29 @@ class FilesDB { return convertToFiles(rows); } + Future> + getUserOwnedFilesWithSameHashForGivenListOfFiles( + List files, + int userID, + ) async { + final db = await sqliteAsyncDB; + final List hashes = []; + for (final file in files) { + if (file.hash != null && file.hash != '') { + hashes.add(file.hash!); + } + } + if (hashes.isEmpty) { + return {}; + } + final inParam = hashes.map((e) => "'$e'").join(','); + final rows = await db.execute(''' + SELECT * FROM $filesTable WHERE $columnHash IN ($inParam) AND $columnOwnerID = $userID; + '''); + final matchedFiles = convertToFiles(rows); + return Map.fromIterable(matchedFiles, key: (e) => e.hash); + } + Future> getUploadedFilesWithHashes( FileHashData hashData, FileType fileType, diff --git a/mobile/lib/db/upload_locks_db.dart b/mobile/lib/db/upload_locks_db.dart index 0bd2f5a549..408d65b723 100644 --- a/mobile/lib/db/upload_locks_db.dart +++ b/mobile/lib/db/upload_locks_db.dart @@ -4,7 +4,6 @@ import 'dart:io'; import 'package:path/path.dart'; import 'package:path_provider/path_provider.dart'; import "package:photos/module/upload/model/multipart.dart"; -import "package:photos/module/upload/service/multipart.dart"; import 'package:sqflite/sqflite.dart'; import "package:sqflite_migration/sqflite_migration.dart"; @@ -313,8 +312,9 @@ class UploadLocksDB { int fileSize, String fileKey, String fileNonce, - String keyNonce, - ) async { + String keyNonce, { + required int partSize, + }) async { final db = await UploadLocksDB.instance.database; final objectKey = urls.objectKey; @@ -331,8 +331,7 @@ class UploadLocksDB { _trackUploadTable.columnEncryptedFileKey: fileKey, _trackUploadTable.columnFileEncryptionNonce: fileNonce, _trackUploadTable.columnKeyEncryptionNonce: keyNonce, - _trackUploadTable.columnPartSize: - MultiPartUploader.multipartPartSizeForUpload, + _trackUploadTable.columnPartSize: partSize, _trackUploadTable.columnLastAttemptedAt: DateTime.now().millisecondsSinceEpoch, }, diff --git a/mobile/lib/gateways/cast_gw.dart b/mobile/lib/gateways/cast_gw.dart index fb342c1a90..63735d6782 100644 --- a/mobile/lib/gateways/cast_gw.dart +++ b/mobile/lib/gateways/cast_gw.dart @@ -12,10 +12,14 @@ class CastGateway { ); return response.data["publicKey"]; } catch (e) { - if (e is DioError && - e.response != null && - e.response!.statusCode == 404) { - return null; + if (e is DioError && e.response != null) { + if (e.response!.statusCode == 404) { + return null; + } else if (e.response!.statusCode == 403) { + throw CastIPMismatchException(); + } else { + rethrow; + } } rethrow; } @@ -48,3 +52,7 @@ class CastGateway { } } } + +class CastIPMismatchException implements Exception { + CastIPMismatchException(); +} diff --git a/mobile/lib/generated/intl/messages_en.dart b/mobile/lib/generated/intl/messages_en.dart index eef309aa5d..aab7f47bd8 100644 --- a/mobile/lib/generated/intl/messages_en.dart +++ b/mobile/lib/generated/intl/messages_en.dart @@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary { "Please talk to ${providerName} support if you were charged"; static String m38(endDate) => - "Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards."; + "Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards."; static String m39(toEmail) => "Please email us at ${toEmail}"; @@ -357,6 +357,13 @@ class MessageLookup extends MessageLookupByLibrary { "Authentication failed, please try again"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("Authentication successful!"), + "autoCastDialogBody": MessageLookupByLibrary.simpleMessage( + "You\'ll see available Cast devices here."), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "Make sure Local Network permissions are turned on for the Ente Photos app, in Settings."), + "autoPair": MessageLookupByLibrary.simpleMessage("Auto pair"), + "autoPairDesc": MessageLookupByLibrary.simpleMessage( + "Auto pair works only with devices that support Chromecast."), "available": MessageLookupByLibrary.simpleMessage("Available"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Backed up folders"), @@ -387,6 +394,10 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage("Cannot delete shared files"), + "castIPMismatchBody": MessageLookupByLibrary.simpleMessage( + "Please make sure you are on the same network as the TV."), + "castIPMismatchTitle": + MessageLookupByLibrary.simpleMessage("Failed to cast album"), "castInstruction": MessageLookupByLibrary.simpleMessage( "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV."), "centerPoint": MessageLookupByLibrary.simpleMessage("Center point"), @@ -460,6 +471,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Confirm recovery key"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage("Confirm your recovery key"), + "connectToDevice": + MessageLookupByLibrary.simpleMessage("Connect to device"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("Contact support"), @@ -721,6 +734,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpFromDevice": m22, "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Files deleted"), + "filesSavedToGallery": + MessageLookupByLibrary.simpleMessage("Files saved to gallery"), "flip": MessageLookupByLibrary.simpleMessage("Flip"), "forYourMemories": MessageLookupByLibrary.simpleMessage("for your memories"), @@ -902,6 +917,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("Manage"), "manageSubscription": MessageLookupByLibrary.simpleMessage("Manage subscription"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "Pair with PIN works with any screen you wish to view your album on."), "map": MessageLookupByLibrary.simpleMessage("Map"), "maps": MessageLookupByLibrary.simpleMessage("Maps"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -936,6 +953,8 @@ class MessageLookup extends MessageLookupByLibrary { "no": MessageLookupByLibrary.simpleMessage("No"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage("No albums shared by you yet"), + "noDeviceFound": + MessageLookupByLibrary.simpleMessage("No device found"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("None"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage( "You\'ve no files on this device that can be deleted"), @@ -982,6 +1001,9 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("Or pick an existing one"), "pair": MessageLookupByLibrary.simpleMessage("Pair"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("Pair with PIN"), + "pairingComplete": + MessageLookupByLibrary.simpleMessage("Pairing complete"), "passkey": MessageLookupByLibrary.simpleMessage("Passkey"), "passkeyAuthTitle": MessageLookupByLibrary.simpleMessage("Passkey verification"), @@ -1328,6 +1350,10 @@ class MessageLookup extends MessageLookupByLibrary { "sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Success"), "startBackup": MessageLookupByLibrary.simpleMessage("Start backup"), "status": MessageLookupByLibrary.simpleMessage("Status"), + "stopCastingBody": MessageLookupByLibrary.simpleMessage( + "Do you want to stop casting?"), + "stopCastingTitle": + MessageLookupByLibrary.simpleMessage("Stop casting"), "storage": MessageLookupByLibrary.simpleMessage("Storage"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Family"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("You"), diff --git a/mobile/lib/generated/intl/messages_nl.dart b/mobile/lib/generated/intl/messages_nl.dart index 5f2b0903b5..f6987973c3 100644 --- a/mobile/lib/generated/intl/messages_nl.dart +++ b/mobile/lib/generated/intl/messages_nl.dart @@ -21,7 +21,7 @@ class MessageLookup extends MessageLookupByLibrary { String get localeName => 'nl'; static String m0(count) => - "${Intl.plural(count, zero: 'Add collaborator', one: 'Add collaborator', other: 'Add collaborators')}"; + "${Intl.plural(count, zero: 'Voeg samenwerker toe', one: 'Voeg samenwerker toe', other: 'Voeg samenwerkers toe')}"; static String m2(count) => "${Intl.plural(count, one: 'Bestand toevoegen', other: 'Bestanden toevoegen')}"; @@ -30,7 +30,7 @@ class MessageLookup extends MessageLookupByLibrary { "Jouw ${storageAmount} add-on is geldig tot ${endDate}"; static String m1(count) => - "${Intl.plural(count, zero: 'Add viewer', one: 'Add viewer', other: 'Add viewers')}"; + "${Intl.plural(count, one: 'Voeg kijker toe', other: 'Voeg kijkers toe')}"; static String m4(emailOrName) => "Toegevoegd door ${emailOrName}"; @@ -64,6 +64,8 @@ class MessageLookup extends MessageLookupByLibrary { static String m13(provider) => "Neem contact met ons op via support@ente.io om uw ${provider} abonnement te beheren."; + static String m69(endpoint) => "Verbonden met ${endpoint}"; + static String m14(count) => "${Intl.plural(count, one: 'Verwijder ${count} bestand', other: 'Verwijder ${count} bestanden')}"; @@ -85,7 +87,7 @@ class MessageLookup extends MessageLookupByLibrary { static String m20(newEmail) => "E-mailadres gewijzigd naar ${newEmail}"; static String m21(email) => - "${email} heeft geen ente account.\n\nStuur ze een uitnodiging om foto\'s te delen."; + "${email} heeft geen Ente account.\n\nStuur ze een uitnodiging om foto\'s te delen."; static String m22(count, formattedNumber) => "${Intl.plural(count, one: '1 bestand', other: '${formattedNumber} bestanden')} in dit album zijn veilig geback-upt"; @@ -102,7 +104,7 @@ class MessageLookup extends MessageLookupByLibrary { static String m26(endDate) => "Gratis proefversie geldig tot ${endDate}"; static String m27(count) => - "U heeft nog steeds toegang tot ${Intl.plural(count, one: 'het', other: 'ze')} op ente zolang u een actief abonnement heeft"; + "Je hebt nog steeds toegang tot ${Intl.plural(count, one: 'het', other: 'ze')} op Ente zolang je een actief abonnement hebt"; static String m28(sizeInMBorGB) => "Maak ${sizeInMBorGB} vrij"; @@ -164,7 +166,7 @@ class MessageLookup extends MessageLookupByLibrary { "Hey, kunt u bevestigen dat dit uw ente.io verificatie-ID is: ${verificationID}"; static String m50(referralCode, referralStorageInGB) => - "ente verwijzingscode: ${referralCode} \n\nPas het toe bij Instellingen → Algemeen → Verwijzingen om ${referralStorageInGB} GB gratis te krijgen nadat je je hebt aangemeld voor een betaald abonnement\n\nhttps://ente.io"; + "Ente verwijzingscode: ${referralCode} \n\nPas het toe bij Instellingen → Algemeen → Verwijzingen om ${referralStorageInGB} GB gratis te krijgen nadat je je hebt aangemeld voor een betaald abonnement\n\nhttps://ente.io"; static String m51(numberOfPeople) => "${Intl.plural(numberOfPeople, zero: 'Deel met specifieke mensen', one: 'Gedeeld met 1 persoon', other: 'Gedeeld met ${numberOfPeople} mensen')}"; @@ -175,10 +177,10 @@ class MessageLookup extends MessageLookupByLibrary { "Deze ${fileType} zal worden verwijderd van jouw apparaat."; static String m54(fileType) => - "Deze ${fileType} staat zowel in ente als op jouw apparaat."; + "Deze ${fileType} staat zowel in Ente als op jouw apparaat."; static String m55(fileType) => - "Deze ${fileType} zal worden verwijderd uit ente."; + "Deze ${fileType} zal worden verwijderd uit Ente."; static String m56(storageAmountInGB) => "${storageAmountInGB} GB"; @@ -187,7 +189,7 @@ class MessageLookup extends MessageLookupByLibrary { "${usedAmount} ${usedStorageUnit} van ${totalAmount} ${totalStorageUnit} gebruikt"; static String m58(id) => - "Uw ${id} is al aan een ander ente account gekoppeld.\nAls u uw ${id} wilt gebruiken met dit account, neem dan contact op met onze klantenservice"; + "Jouw ${id} is al aan een ander Ente account gekoppeld.\nAls je jouw ${id} wilt gebruiken met dit account, neem dan contact op met onze klantenservice"; static String m59(endDate) => "Uw abonnement loopt af op ${endDate}"; @@ -218,7 +220,7 @@ class MessageLookup extends MessageLookupByLibrary { final messages = _notInlinedMessages(_notInlinedMessages); static Map _notInlinedMessages(_) => { "aNewVersionOfEnteIsAvailable": MessageLookupByLibrary.simpleMessage( - "Er is een nieuwe versie van ente beschikbaar."), + "Er is een nieuwe versie van Ente beschikbaar."), "about": MessageLookupByLibrary.simpleMessage("Over"), "account": MessageLookupByLibrary.simpleMessage("Account"), "accountWelcomeBack": @@ -249,7 +251,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Voeg geselecteerde toe"), "addToAlbum": MessageLookupByLibrary.simpleMessage("Toevoegen aan album"), - "addToEnte": MessageLookupByLibrary.simpleMessage("Toevoegen aan ente"), + "addToEnte": MessageLookupByLibrary.simpleMessage("Toevoegen aan Ente"), "addToHiddenAlbum": MessageLookupByLibrary.simpleMessage( "Toevoegen aan verborgen album"), "addViewer": MessageLookupByLibrary.simpleMessage("Voeg kijker toe"), @@ -366,6 +368,14 @@ class MessageLookup extends MessageLookupByLibrary { "Verificatie mislukt, probeer het opnieuw"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("Verificatie geslaagd!"), + "autoCastDialogBody": MessageLookupByLibrary.simpleMessage( + "Je zult de beschikbare Cast apparaten hier zien."), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen."), + "autoPair": + MessageLookupByLibrary.simpleMessage("Automatisch koppelen"), + "autoPairDesc": MessageLookupByLibrary.simpleMessage( + "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen."), "available": MessageLookupByLibrary.simpleMessage("Beschikbaar"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Back-up mappen"), @@ -397,6 +407,10 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage( "Kan gedeelde bestanden niet verwijderen"), + "castIPMismatchBody": MessageLookupByLibrary.simpleMessage( + "Zorg ervoor dat je op hetzelfde netwerk zit als de tv."), + "castIPMismatchTitle": + MessageLookupByLibrary.simpleMessage("Album casten mislukt"), "castInstruction": MessageLookupByLibrary.simpleMessage( "Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen."), "centerPoint": MessageLookupByLibrary.simpleMessage("Middelpunt"), @@ -421,6 +435,8 @@ class MessageLookup extends MessageLookupByLibrary { "claimedStorageSoFar": m10, "cleanUncategorized": MessageLookupByLibrary.simpleMessage("Ongecategoriseerd opschonen"), + "cleanUncategorizedDescription": MessageLookupByLibrary.simpleMessage( + "Verwijder alle bestanden van Ongecategoriseerd die aanwezig zijn in andere albums"), "clearCaches": MessageLookupByLibrary.simpleMessage("Cache legen"), "clearIndexes": MessageLookupByLibrary.simpleMessage("Index wissen"), "click": MessageLookupByLibrary.simpleMessage("• Click"), @@ -438,7 +454,7 @@ class MessageLookup extends MessageLookupByLibrary { "codeUsedByYou": MessageLookupByLibrary.simpleMessage("Code gebruikt door jou"), "collabLinkSectionDescription": MessageLookupByLibrary.simpleMessage( - "Maak een link waarmee mensen foto\'s in jouw gedeelde album kunnen toevoegen en bekijken zonder dat ze daarvoor een ente app of account nodig hebben. Handig voor het verzamelen van foto\'s van evenementen."), + "Maak een link waarmee mensen foto\'s in jouw gedeelde album kunnen toevoegen en bekijken zonder dat ze daarvoor een Ente app of account nodig hebben. Handig voor het verzamelen van foto\'s van evenementen."), "collaborativeLink": MessageLookupByLibrary.simpleMessage("Gezamenlijke link"), "collaborativeLinkCreatedFor": m11, @@ -469,6 +485,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"), + "connectToDevice": MessageLookupByLibrary.simpleMessage( + "Verbinding maken met apparaat"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("Contacteer klantenservice"), @@ -501,7 +519,7 @@ class MessageLookup extends MessageLookupByLibrary { "createAlbumActionHint": MessageLookupByLibrary.simpleMessage( "Lang indrukken om foto\'s te selecteren en klik + om een album te maken"), "createCollaborativeLink": - MessageLookupByLibrary.simpleMessage("Create collaborative link"), + MessageLookupByLibrary.simpleMessage("Maak een gezamenlijke link"), "createCollage": MessageLookupByLibrary.simpleMessage("Creëer collage"), "createNewAccount": MessageLookupByLibrary.simpleMessage("Nieuw account aanmaken"), @@ -516,6 +534,7 @@ class MessageLookup extends MessageLookupByLibrary { "currentUsageIs": MessageLookupByLibrary.simpleMessage("Huidig gebruik is "), "custom": MessageLookupByLibrary.simpleMessage("Aangepast"), + "customEndpoint": m69, "darkTheme": MessageLookupByLibrary.simpleMessage("Donker"), "dayToday": MessageLookupByLibrary.simpleMessage("Vandaag"), "dayYesterday": MessageLookupByLibrary.simpleMessage("Gisteren"), @@ -538,7 +557,7 @@ class MessageLookup extends MessageLookupByLibrary { "Hiermee worden alle lege albums verwijderd. Dit is handig wanneer je rommel in je albumlijst wilt verminderen."), "deleteAll": MessageLookupByLibrary.simpleMessage("Alles Verwijderen"), "deleteConfirmDialogBody": MessageLookupByLibrary.simpleMessage( - "Dit account is gekoppeld aan andere ente apps, als je er gebruik van maakt.\\n\\nJe geüploade gegevens worden in alle ente apps gepland voor verwijdering, en je account wordt permanent verwijderd voor alle ente diensten."), + "Dit account is gekoppeld aan andere Ente apps, als je er gebruik van maakt. Je geüploade gegevens worden in alle Ente apps gepland voor verwijdering, en je account wordt permanent verwijderd voor alle Ente diensten."), "deleteEmailRequest": MessageLookupByLibrary.simpleMessage( "Stuur een e-mail naar account-deletion@ente.io vanaf het door jou geregistreerde e-mailadres."), "deleteEmptyAlbums": @@ -550,7 +569,7 @@ class MessageLookup extends MessageLookupByLibrary { "deleteFromDevice": MessageLookupByLibrary.simpleMessage("Verwijder van apparaat"), "deleteFromEnte": - MessageLookupByLibrary.simpleMessage("Verwijder van ente"), + MessageLookupByLibrary.simpleMessage("Verwijder van Ente"), "deleteItemCount": m14, "deleteLocation": MessageLookupByLibrary.simpleMessage("Verwijder locatie"), @@ -571,7 +590,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Gedeeld album verwijderen?"), "deleteSharedAlbumDialogBody": MessageLookupByLibrary.simpleMessage( "Het album wordt verwijderd voor iedereen\n\nJe verliest de toegang tot gedeelde foto\'s in dit album die eigendom zijn van anderen"), - "descriptions": MessageLookupByLibrary.simpleMessage("Descriptions"), + "descriptions": MessageLookupByLibrary.simpleMessage("Beschrijvingen"), "deselectAll": MessageLookupByLibrary.simpleMessage("Alles deselecteren"), "designedToOutlive": MessageLookupByLibrary.simpleMessage( @@ -579,12 +598,16 @@ class MessageLookup extends MessageLookupByLibrary { "details": MessageLookupByLibrary.simpleMessage("Details"), "devAccountChanged": MessageLookupByLibrary.simpleMessage( "Het ontwikkelaarsaccount dat we gebruiken om te publiceren in de App Store is veranderd. Daarom moet je opnieuw inloggen.\n\nOnze excuses voor het ongemak, helaas was dit onvermijdelijk."), + "developerSettings": + MessageLookupByLibrary.simpleMessage("Ontwikkelaarsinstellingen"), + "developerSettingsWarning": MessageLookupByLibrary.simpleMessage( + "Weet je zeker dat je de ontwikkelaarsinstellingen wilt wijzigen?"), "deviceCodeHint": MessageLookupByLibrary.simpleMessage("Voer de code in"), "deviceFilesAutoUploading": MessageLookupByLibrary.simpleMessage( - "Bestanden toegevoegd aan dit album van dit apparaat zullen automatisch geüpload worden naar ente."), + "Bestanden toegevoegd aan dit album van dit apparaat zullen automatisch geüpload worden naar Ente."), "deviceLockExplanation": MessageLookupByLibrary.simpleMessage( - "Schakel de schermvergrendeling van het apparaat uit wanneer ente op de voorgrond is en er een back-up aan de gang is. Dit is normaal gesproken niet nodig, maar kan grote uploads en initiële imports van grote mappen sneller laten verlopen."), + "Schakel de schermvergrendeling van het apparaat uit wanneer Ente op de voorgrond is en er een back-up aan de gang is. Dit is normaal gesproken niet nodig, maar kan grote uploads en initiële imports van grote mappen sneller laten verlopen."), "deviceNotFound": MessageLookupByLibrary.simpleMessage("Apparaat niet gevonden"), "didYouKnow": MessageLookupByLibrary.simpleMessage("Wist u dat?"), @@ -648,15 +671,17 @@ class MessageLookup extends MessageLookupByLibrary { "encryption": MessageLookupByLibrary.simpleMessage("Encryptie"), "encryptionKeys": MessageLookupByLibrary.simpleMessage("Encryptiesleutels"), + "endpointUpdatedMessage": MessageLookupByLibrary.simpleMessage( + "Eindpunt met succes bijgewerkt"), "endtoendEncryptedByDefault": MessageLookupByLibrary.simpleMessage( "Standaard end-to-end versleuteld"), "enteCanEncryptAndPreserveFilesOnlyIfYouGrant": MessageLookupByLibrary.simpleMessage( - "ente kan bestanden alleen versleutelen en bewaren als u toegang tot ze geeft"), + "Ente kan bestanden alleen versleutelen en bewaren als u toegang tot ze geeft"), "entePhotosPerm": MessageLookupByLibrary.simpleMessage( - "ente heeft toestemming nodig om je foto\'s te bewaren"), + "Ente heeft toestemming nodig om je foto\'s te bewaren"), "enteSubscriptionPitch": MessageLookupByLibrary.simpleMessage( - "ente bewaart uw herinneringen, zodat ze altijd beschikbaar voor u zijn, zelfs als u uw apparaat verliest."), + "Ente bewaart uw herinneringen, zodat ze altijd beschikbaar voor u zijn, zelfs als u uw apparaat verliest."), "enteSubscriptionShareWithFamily": MessageLookupByLibrary.simpleMessage( "Je familie kan ook aan je abonnement worden toegevoegd."), "enterAlbumName": @@ -716,7 +741,7 @@ class MessageLookup extends MessageLookupByLibrary { "failedToVerifyPaymentStatus": MessageLookupByLibrary.simpleMessage( "Betalingsstatus verifiëren mislukt"), "familyPlanOverview": MessageLookupByLibrary.simpleMessage( - "Voeg 5 gezinsleden toe aan uw bestaande abonnement zonder extra te betalen.\n\nElk lid krijgt zijn eigen privé ruimte en kan elkaars bestanden niet zien, tenzij ze zijn gedeeld.\n\nFamilieplannen zijn beschikbaar voor klanten die een betaald ente abonnement hebben.\n\nAbonneer u nu om aan de slag te gaan!"), + "Voeg 5 gezinsleden toe aan je bestaande abonnement zonder extra te betalen.\n\nElk lid krijgt zijn eigen privé ruimte en kan elkaars bestanden niet zien tenzij ze zijn gedeeld.\n\nFamilieplannen zijn beschikbaar voor klanten die een betaald Ente abonnement hebben.\n\nAbonneer nu om aan de slag te gaan!"), "familyPlanPortalTitle": MessageLookupByLibrary.simpleMessage("Familie"), "familyPlans": @@ -739,6 +764,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Bestanden verwijderd"), + "filesSavedToGallery": MessageLookupByLibrary.simpleMessage( + "Bestand opgeslagen in galerij"), "flip": MessageLookupByLibrary.simpleMessage("Omdraaien"), "forYourMemories": MessageLookupByLibrary.simpleMessage("voor uw herinneringen"), @@ -777,6 +804,7 @@ class MessageLookup extends MessageLookupByLibrary { "Wij gebruiken geen tracking. Het zou helpen als je ons vertelt waar je ons gevonden hebt!"), "hearUsWhereTitle": MessageLookupByLibrary.simpleMessage( "Hoe hoorde je over Ente? (optioneel)"), + "help": MessageLookupByLibrary.simpleMessage("Hulp"), "hidden": MessageLookupByLibrary.simpleMessage("Verborgen"), "hide": MessageLookupByLibrary.simpleMessage("Verbergen"), "hiding": MessageLookupByLibrary.simpleMessage("Verbergen..."), @@ -792,7 +820,7 @@ class MessageLookup extends MessageLookupByLibrary { "iOSOkButton": MessageLookupByLibrary.simpleMessage("Oké"), "ignoreUpdate": MessageLookupByLibrary.simpleMessage("Negeren"), "ignoredFolderUploadReason": MessageLookupByLibrary.simpleMessage( - "Sommige bestanden in dit album worden genegeerd voor de upload omdat ze eerder van ente zijn verwijderd."), + "Sommige bestanden in dit album worden genegeerd voor uploaden omdat ze eerder van Ente zijn verwijderd."), "importing": MessageLookupByLibrary.simpleMessage("Importeren...."), "incorrectCode": MessageLookupByLibrary.simpleMessage("Onjuiste code"), "incorrectPasswordTitle": @@ -811,16 +839,20 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Installeer handmatig"), "invalidEmailAddress": MessageLookupByLibrary.simpleMessage("Ongeldig e-mailadres"), + "invalidEndpoint": + MessageLookupByLibrary.simpleMessage("Ongeldig eindpunt"), + "invalidEndpointMessage": MessageLookupByLibrary.simpleMessage( + "Sorry, het eindpunt dat je hebt ingevoerd is ongeldig. Voer een geldig eindpunt in en probeer het opnieuw."), "invalidKey": MessageLookupByLibrary.simpleMessage("Ongeldige sleutel"), "invalidRecoveryKey": MessageLookupByLibrary.simpleMessage( "De herstelsleutel die je hebt ingevoerd is niet geldig. Zorg ervoor dat deze 24 woorden bevat en controleer de spelling van elk van deze woorden.\n\nAls je een oudere herstelcode hebt ingevoerd, zorg ervoor dat deze 64 tekens lang is, en controleer ze allemaal."), "invite": MessageLookupByLibrary.simpleMessage("Uitnodigen"), "inviteToEnte": - MessageLookupByLibrary.simpleMessage("Uitnodigen voor ente"), + MessageLookupByLibrary.simpleMessage("Uitnodigen voor Ente"), "inviteYourFriends": MessageLookupByLibrary.simpleMessage("Vrienden uitnodigen"), "inviteYourFriendsToEnte": MessageLookupByLibrary.simpleMessage( - "Vrienden uitnodigen voor ente"), + "Vrienden uitnodigen voor Ente"), "itLooksLikeSomethingWentWrongPleaseRetryAfterSome": MessageLookupByLibrary.simpleMessage( "Het lijkt erop dat er iets fout is gegaan. Probeer het later opnieuw. Als de fout zich blijft voordoen, neem dan contact op met ons supportteam."), @@ -830,7 +862,7 @@ class MessageLookup extends MessageLookupByLibrary { "Bestanden tonen het aantal resterende dagen voordat ze permanent worden verwijderd"), "itemsWillBeRemovedFromAlbum": MessageLookupByLibrary.simpleMessage( "Geselecteerde items zullen worden verwijderd uit dit album"), - "joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"), + "joinDiscord": MessageLookupByLibrary.simpleMessage("Join de Discord"), "keepPhotos": MessageLookupByLibrary.simpleMessage("Foto\'s behouden"), "kiloMeterUnit": MessageLookupByLibrary.simpleMessage("km"), "kindlyHelpUsWithThisInformation": MessageLookupByLibrary.simpleMessage( @@ -888,7 +920,7 @@ class MessageLookup extends MessageLookupByLibrary { "locationName": MessageLookupByLibrary.simpleMessage("Locatie naam"), "locationTagFeatureDescription": MessageLookupByLibrary.simpleMessage( "Een locatie tag groept alle foto\'s die binnen een bepaalde straal van een foto zijn genomen"), - "locations": MessageLookupByLibrary.simpleMessage("Locations"), + "locations": MessageLookupByLibrary.simpleMessage("Locaties"), "lockButtonLabel": MessageLookupByLibrary.simpleMessage("Vergrendel"), "lockScreenEnablePreSteps": MessageLookupByLibrary.simpleMessage( "Om vergrendelscherm in te schakelen, moet u een toegangscode of schermvergrendeling instellen in uw systeeminstellingen."), @@ -902,7 +934,7 @@ class MessageLookup extends MessageLookupByLibrary { "Dit zal logboeken verzenden om ons te helpen uw probleem op te lossen. Houd er rekening mee dat bestandsnamen zullen worden meegenomen om problemen met specifieke bestanden bij te houden."), "longPressAnEmailToVerifyEndToEndEncryption": MessageLookupByLibrary.simpleMessage( - "Long press an email to verify end to end encryption."), + "Druk lang op een e-mail om de versleuteling te verifiëren."), "longpressOnAnItemToViewInFullscreen": MessageLookupByLibrary.simpleMessage( "Houd een bestand lang ingedrukt om te bekijken op volledig scherm"), "lostDevice": @@ -922,6 +954,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("Beheren"), "manageSubscription": MessageLookupByLibrary.simpleMessage("Abonnement beheren"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien."), "map": MessageLookupByLibrary.simpleMessage("Kaart"), "maps": MessageLookupByLibrary.simpleMessage("Kaarten"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -953,11 +987,13 @@ class MessageLookup extends MessageLookupByLibrary { "Kan geen verbinding maken met Ente, controleer uw netwerkinstellingen en neem contact op met ondersteuning als de fout zich blijft voordoen."), "never": MessageLookupByLibrary.simpleMessage("Nooit"), "newAlbum": MessageLookupByLibrary.simpleMessage("Nieuw album"), - "newToEnte": MessageLookupByLibrary.simpleMessage("Nieuw bij ente"), + "newToEnte": MessageLookupByLibrary.simpleMessage("Nieuw bij Ente"), "newest": MessageLookupByLibrary.simpleMessage("Nieuwste"), "no": MessageLookupByLibrary.simpleMessage("Nee"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage( "Nog geen albums gedeeld door jou"), + "noDeviceFound": + MessageLookupByLibrary.simpleMessage("Geen apparaat gevonden"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("Geen"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage( "Je hebt geen bestanden op dit apparaat die verwijderd kunnen worden"), @@ -1007,6 +1043,12 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("Of kies een bestaande"), "pair": MessageLookupByLibrary.simpleMessage("Koppelen"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("Koppelen met PIN"), + "pairingComplete": + MessageLookupByLibrary.simpleMessage("Koppeling voltooid"), + "passkey": MessageLookupByLibrary.simpleMessage("Passkey"), + "passkeyAuthTitle": + MessageLookupByLibrary.simpleMessage("Passkey verificatie"), "password": MessageLookupByLibrary.simpleMessage("Wachtwoord"), "passwordChangedSuccessfully": MessageLookupByLibrary.simpleMessage( "Wachtwoord succesvol aangepast"), @@ -1018,6 +1060,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Betaalgegevens"), "paymentFailed": MessageLookupByLibrary.simpleMessage("Betaling mislukt"), + "paymentFailedMessage": MessageLookupByLibrary.simpleMessage( + "Helaas is je betaling mislukt. Neem contact op met support zodat we je kunnen helpen!"), "paymentFailedTalkToProvider": m37, "pendingItems": MessageLookupByLibrary.simpleMessage("Bestanden in behandeling"), @@ -1206,7 +1250,7 @@ class MessageLookup extends MessageLookupByLibrary { "scanThisBarcodeWithnyourAuthenticatorApp": MessageLookupByLibrary.simpleMessage( "Scan deze barcode met\nje authenticator app"), - "search": MessageLookupByLibrary.simpleMessage("Search"), + "search": MessageLookupByLibrary.simpleMessage("Zoeken"), "searchAlbumsEmptySection": MessageLookupByLibrary.simpleMessage("Albums"), "searchByAlbumNameHint": @@ -1254,7 +1298,7 @@ class MessageLookup extends MessageLookupByLibrary { "selectYourPlan": MessageLookupByLibrary.simpleMessage("Kies uw abonnement"), "selectedFilesAreNotOnEnte": MessageLookupByLibrary.simpleMessage( - "Geselecteerde bestanden staan niet op ente"), + "Geselecteerde bestanden staan niet op Ente"), "selectedFoldersWillBeEncryptedAndBackedUp": MessageLookupByLibrary.simpleMessage( "Geselecteerde mappen worden versleuteld en geback-upt"), @@ -1268,6 +1312,8 @@ class MessageLookup extends MessageLookupByLibrary { "sendInvite": MessageLookupByLibrary.simpleMessage("Stuur een uitnodiging"), "sendLink": MessageLookupByLibrary.simpleMessage("Stuur link"), + "serverEndpoint": + MessageLookupByLibrary.simpleMessage("Server eindpunt"), "sessionExpired": MessageLookupByLibrary.simpleMessage("Sessie verlopen"), "setAPassword": @@ -1291,15 +1337,15 @@ class MessageLookup extends MessageLookupByLibrary { "Deel alleen met de mensen die u wilt"), "shareTextConfirmOthersVerificationID": m49, "shareTextRecommendUsingEnte": MessageLookupByLibrary.simpleMessage( - "Download ente zodat we gemakkelijk foto\'s en video\'s van originele kwaliteit kunnen delen\n\nhttps://ente.io"), + "Download Ente zodat we gemakkelijk foto\'s en video\'s in originele kwaliteit kunnen delen\n\nhttps://ente.io"), "shareTextReferralCode": m50, "shareWithNonenteUsers": MessageLookupByLibrary.simpleMessage( - "Delen met niet-ente gebruikers"), + "Delen met niet-Ente gebruikers"), "shareWithPeopleSectionTitle": m51, "shareYourFirstAlbum": MessageLookupByLibrary.simpleMessage("Deel jouw eerste album"), "sharedAlbumSectionDescription": MessageLookupByLibrary.simpleMessage( - "Maak gedeelde en collaboratieve albums met andere ente gebruikers, inclusief gebruikers met gratis abonnementen."), + "Maak gedeelde en collaboratieve albums met andere Ente gebruikers, inclusief gebruikers met gratis abonnementen."), "sharedByMe": MessageLookupByLibrary.simpleMessage("Gedeeld door mij"), "sharedByYou": MessageLookupByLibrary.simpleMessage("Gedeeld door jou"), "sharedPhotoNotifications": @@ -1329,7 +1375,7 @@ class MessageLookup extends MessageLookupByLibrary { "skip": MessageLookupByLibrary.simpleMessage("Overslaan"), "social": MessageLookupByLibrary.simpleMessage("Sociale media"), "someItemsAreInBothEnteAndYourDevice": MessageLookupByLibrary.simpleMessage( - "Sommige bestanden bevinden zich in zowel ente als op uw apparaat."), + "Sommige bestanden bevinden zich zowel in Ente als op jouw apparaat."), "someOfTheFilesYouAreTryingToDeleteAre": MessageLookupByLibrary.simpleMessage( "Sommige bestanden die u probeert te verwijderen zijn alleen beschikbaar op uw apparaat en kunnen niet hersteld worden als deze verwijderd worden"), @@ -1360,6 +1406,10 @@ class MessageLookup extends MessageLookupByLibrary { "sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Succes"), "startBackup": MessageLookupByLibrary.simpleMessage("Back-up starten"), "status": MessageLookupByLibrary.simpleMessage("Status"), + "stopCastingBody": + MessageLookupByLibrary.simpleMessage("Wil je stoppen met casten?"), + "stopCastingTitle": + MessageLookupByLibrary.simpleMessage("Casten stoppen"), "storage": MessageLookupByLibrary.simpleMessage("Opslagruimte"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Familie"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("Jij"), @@ -1495,9 +1545,8 @@ class MessageLookup extends MessageLookupByLibrary { "Tot 50% korting, tot 4 december."), "usableReferralStorageInfo": MessageLookupByLibrary.simpleMessage( "Bruikbare opslag is beperkt door je huidige abonnement. Buitensporige geclaimde opslag zal automatisch bruikbaar worden wanneer je je abonnement upgrade."), - "usePublicLinksForPeopleNotOnEnte": - MessageLookupByLibrary.simpleMessage( - "Gebruik publieke links voor mensen die niet op ente zitten"), + "usePublicLinksForPeopleNotOnEnte": MessageLookupByLibrary.simpleMessage( + "Gebruik publieke links voor mensen die geen Ente account hebben"), "useRecoveryKey": MessageLookupByLibrary.simpleMessage("Herstelcode gebruiken"), "useSelectedPhoto": @@ -1513,6 +1562,8 @@ class MessageLookup extends MessageLookupByLibrary { "verifyEmail": MessageLookupByLibrary.simpleMessage("Bevestig e-mail"), "verifyEmailID": m65, "verifyIDLabel": MessageLookupByLibrary.simpleMessage("Verifiëren"), + "verifyPasskey": + MessageLookupByLibrary.simpleMessage("Bevestig passkey"), "verifyPassword": MessageLookupByLibrary.simpleMessage("Bevestig wachtwoord"), "verifying": MessageLookupByLibrary.simpleMessage("Verifiëren..."), @@ -1533,6 +1584,8 @@ class MessageLookup extends MessageLookupByLibrary { "viewer": MessageLookupByLibrary.simpleMessage("Kijker"), "visitWebToManage": MessageLookupByLibrary.simpleMessage( "Bezoek alstublieft web.ente.io om uw abonnement te beheren"), + "waitingForVerification": + MessageLookupByLibrary.simpleMessage("Wachten op verificatie..."), "waitingForWifi": MessageLookupByLibrary.simpleMessage("Wachten op WiFi..."), "weAreOpenSource": diff --git a/mobile/lib/generated/intl/messages_pt.dart b/mobile/lib/generated/intl/messages_pt.dart index 0747cfa826..ef6dc5e54e 100644 --- a/mobile/lib/generated/intl/messages_pt.dart +++ b/mobile/lib/generated/intl/messages_pt.dart @@ -280,7 +280,7 @@ class MessageLookup extends MessageLookupByLibrary { "allowAddingPhotos": MessageLookupByLibrary.simpleMessage("Permitir adicionar fotos"), "allowDownloads": - MessageLookupByLibrary.simpleMessage("Permitir transferências"), + MessageLookupByLibrary.simpleMessage("Permitir downloads"), "allowPeopleToAddPhotos": MessageLookupByLibrary.simpleMessage( "Permitir que pessoas adicionem fotos"), "androidBiometricHint": @@ -311,7 +311,7 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Aplicar código"), "appstoreSubscription": MessageLookupByLibrary.simpleMessage("Assinatura da AppStore"), - "archive": MessageLookupByLibrary.simpleMessage("Arquivado"), + "archive": MessageLookupByLibrary.simpleMessage("Arquivar"), "archiveAlbum": MessageLookupByLibrary.simpleMessage("Arquivar álbum"), "archiving": MessageLookupByLibrary.simpleMessage("Arquivando..."), "areYouSureThatYouWantToLeaveTheFamily": @@ -365,6 +365,14 @@ class MessageLookup extends MessageLookupByLibrary { "Falha na autenticação. Por favor, tente novamente"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("Autenticação bem-sucedida!"), + "autoCastDialogBody": MessageLookupByLibrary.simpleMessage( + "Você verá dispositivos disponíveis para transmitir aqui."), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações."), + "autoPair": + MessageLookupByLibrary.simpleMessage("Pareamento automático"), + "autoPairDesc": MessageLookupByLibrary.simpleMessage( + "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast."), "available": MessageLookupByLibrary.simpleMessage("Disponível"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Backup de pastas concluído"), @@ -397,6 +405,10 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage( "Não é possível excluir arquivos compartilhados"), + "castIPMismatchBody": MessageLookupByLibrary.simpleMessage( + "Certifique-se de estar na mesma rede que a TV."), + "castIPMismatchTitle": + MessageLookupByLibrary.simpleMessage("Falha ao transmitir álbum"), "castInstruction": MessageLookupByLibrary.simpleMessage( "Visite cast.ente.io no dispositivo que você deseja parear.\n\ndigite o código abaixo para reproduzir o álbum em sua TV."), "centerPoint": MessageLookupByLibrary.simpleMessage("Ponto central"), @@ -470,6 +482,8 @@ class MessageLookup extends MessageLookupByLibrary { "Confirme a chave de recuperação"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage( "Confirme sua chave de recuperação"), + "connectToDevice": + MessageLookupByLibrary.simpleMessage("Conectar ao dispositivo"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("Contate o suporte"), @@ -551,7 +565,7 @@ class MessageLookup extends MessageLookupByLibrary { "deleteFromDevice": MessageLookupByLibrary.simpleMessage("Excluir do dispositivo"), "deleteFromEnte": - MessageLookupByLibrary.simpleMessage("Excluir do ente"), + MessageLookupByLibrary.simpleMessage("Excluir do Ente"), "deleteItemCount": m14, "deleteLocation": MessageLookupByLibrary.simpleMessage("Excluir Local"), "deletePhotos": MessageLookupByLibrary.simpleMessage("Excluir fotos"), @@ -667,7 +681,7 @@ class MessageLookup extends MessageLookupByLibrary { "enterCode": MessageLookupByLibrary.simpleMessage("Coloque o código"), "enterCodeDescription": MessageLookupByLibrary.simpleMessage( "Digite o código fornecido pelo seu amigo para reivindicar o armazenamento gratuito para vocês dois"), - "enterEmail": MessageLookupByLibrary.simpleMessage("Digite o email"), + "enterEmail": MessageLookupByLibrary.simpleMessage("Insira o e-mail"), "enterFileName": MessageLookupByLibrary.simpleMessage("Digite o nome do arquivo"), "enterNewPasswordToEncrypt": MessageLookupByLibrary.simpleMessage( @@ -738,6 +752,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Arquivos excluídos"), + "filesSavedToGallery": + MessageLookupByLibrary.simpleMessage("Arquivos salvos na galeria"), "flip": MessageLookupByLibrary.simpleMessage("Inverter"), "forYourMemories": MessageLookupByLibrary.simpleMessage("para suas memórias"), @@ -821,7 +837,7 @@ class MessageLookup extends MessageLookupByLibrary { "A chave de recuperação que você digitou não é válida. Certifique-se de que contém 24 palavras e verifique a ortografia de cada uma.\n\nSe você inseriu um código de recuperação mais antigo, verifique se ele tem 64 caracteres e verifique cada um deles."), "invite": MessageLookupByLibrary.simpleMessage("Convidar"), "inviteToEnte": - MessageLookupByLibrary.simpleMessage("Convidar para o ente"), + MessageLookupByLibrary.simpleMessage("Convidar para o Ente"), "inviteYourFriends": MessageLookupByLibrary.simpleMessage("Convide seus amigos"), "inviteYourFriendsToEnte": @@ -929,6 +945,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("Gerenciar"), "manageSubscription": MessageLookupByLibrary.simpleMessage("Gerenciar assinatura"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado."), "map": MessageLookupByLibrary.simpleMessage("Mapa"), "maps": MessageLookupByLibrary.simpleMessage("Mapas"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -964,6 +982,8 @@ class MessageLookup extends MessageLookupByLibrary { "no": MessageLookupByLibrary.simpleMessage("Não"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage( "Nenhum álbum compartilhado por você ainda"), + "noDeviceFound": MessageLookupByLibrary.simpleMessage( + "Nenhum dispositivo encontrado"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("Nenhum"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage( "Você não tem nenhum arquivo neste dispositivo que pode ser excluído"), @@ -1012,6 +1032,9 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("Ou escolha um existente"), "pair": MessageLookupByLibrary.simpleMessage("Parear"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("Parear com PIN"), + "pairingComplete": + MessageLookupByLibrary.simpleMessage("Pareamento concluído"), "passkey": MessageLookupByLibrary.simpleMessage("Chave de acesso"), "passkeyAuthTitle": MessageLookupByLibrary.simpleMessage( "Autenticação via Chave de acesso"), @@ -1217,7 +1240,7 @@ class MessageLookup extends MessageLookupByLibrary { "scanThisBarcodeWithnyourAuthenticatorApp": MessageLookupByLibrary.simpleMessage( "Escaneie este código de barras com\nseu aplicativo autenticador"), - "search": MessageLookupByLibrary.simpleMessage("Search"), + "search": MessageLookupByLibrary.simpleMessage("Pesquisar"), "searchAlbumsEmptySection": MessageLookupByLibrary.simpleMessage("Álbuns"), "searchByAlbumNameHint": @@ -1380,6 +1403,10 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("✨ Bem-sucedido"), "startBackup": MessageLookupByLibrary.simpleMessage("Iniciar backup"), "status": MessageLookupByLibrary.simpleMessage("Estado"), + "stopCastingBody": MessageLookupByLibrary.simpleMessage( + "Você quer parar a transmissão?"), + "stopCastingTitle": + MessageLookupByLibrary.simpleMessage("Parar transmissão"), "storage": MessageLookupByLibrary.simpleMessage("Armazenamento"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Família"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("Você"), @@ -1425,7 +1452,7 @@ class MessageLookup extends MessageLookupByLibrary { "thankYouForSubscribing": MessageLookupByLibrary.simpleMessage("Obrigado por assinar!"), "theDownloadCouldNotBeCompleted": MessageLookupByLibrary.simpleMessage( - "Não foi possível concluir a transferência"), + "Não foi possível concluir o download"), "theRecoveryKeyYouEnteredIsIncorrect": MessageLookupByLibrary.simpleMessage( "A chave de recuperação inserida está incorreta"), @@ -1525,7 +1552,7 @@ class MessageLookup extends MessageLookupByLibrary { "verificationId": MessageLookupByLibrary.simpleMessage("ID de Verificação"), "verify": MessageLookupByLibrary.simpleMessage("Verificar"), - "verifyEmail": MessageLookupByLibrary.simpleMessage("Verificar email"), + "verifyEmail": MessageLookupByLibrary.simpleMessage("Verificar e-mail"), "verifyEmailID": m65, "verifyIDLabel": MessageLookupByLibrary.simpleMessage("Verificar"), "verifyPasskey": diff --git a/mobile/lib/generated/intl/messages_zh.dart b/mobile/lib/generated/intl/messages_zh.dart index 7f4c0accf8..80cc135697 100644 --- a/mobile/lib/generated/intl/messages_zh.dart +++ b/mobile/lib/generated/intl/messages_zh.dart @@ -320,6 +320,13 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("身份验证失败,请重试"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("验证成功"), + "autoCastDialogBody": + MessageLookupByLibrary.simpleMessage("您将在此处看到可用的 Cast 设备。"), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。"), + "autoPair": MessageLookupByLibrary.simpleMessage("自动配对"), + "autoPairDesc": + MessageLookupByLibrary.simpleMessage("自动配对仅适用于支持 Chromecast 的设备。"), "available": MessageLookupByLibrary.simpleMessage("可用"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("已备份的文件夹"), "backup": MessageLookupByLibrary.simpleMessage("备份"), @@ -344,6 +351,9 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage("无法删除共享文件"), + "castIPMismatchBody": + MessageLookupByLibrary.simpleMessage("请确保您的设备与电视处于同一网络。"), + "castIPMismatchTitle": MessageLookupByLibrary.simpleMessage("投放相册失败"), "castInstruction": MessageLookupByLibrary.simpleMessage( "在您要配对的设备上访问 cast.ente.io。\n输入下面的代码即可在电视上播放相册。"), "centerPoint": MessageLookupByLibrary.simpleMessage("中心点"), @@ -400,6 +410,7 @@ class MessageLookup extends MessageLookupByLibrary { "confirmRecoveryKey": MessageLookupByLibrary.simpleMessage("确认恢复密钥"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage("确认您的恢复密钥"), + "connectToDevice": MessageLookupByLibrary.simpleMessage("连接到设备"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("联系支持"), "contactToManageSubscription": m13, @@ -610,6 +621,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpFromDevice": m22, "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("文件已删除"), + "filesSavedToGallery": + MessageLookupByLibrary.simpleMessage("多个文件已保存到相册"), "flip": MessageLookupByLibrary.simpleMessage("上下翻转"), "forYourMemories": MessageLookupByLibrary.simpleMessage("为您的回忆"), "forgotPassword": MessageLookupByLibrary.simpleMessage("忘记密码"), @@ -765,6 +778,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageLink": MessageLookupByLibrary.simpleMessage("管理链接"), "manageParticipants": MessageLookupByLibrary.simpleMessage("管理"), "manageSubscription": MessageLookupByLibrary.simpleMessage("管理订阅"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。"), "map": MessageLookupByLibrary.simpleMessage("地图"), "maps": MessageLookupByLibrary.simpleMessage("地图"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -797,6 +812,7 @@ class MessageLookup extends MessageLookupByLibrary { "no": MessageLookupByLibrary.simpleMessage("否"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage("您尚未共享任何相册"), + "noDeviceFound": MessageLookupByLibrary.simpleMessage("未发现设备"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("无"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage("您在此设备上没有可被删除的文件"), @@ -837,6 +853,8 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("或者选择一个现有的"), "pair": MessageLookupByLibrary.simpleMessage("配对"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("用 PIN 配对"), + "pairingComplete": MessageLookupByLibrary.simpleMessage("配对完成"), "passkey": MessageLookupByLibrary.simpleMessage("通行密钥"), "passkeyAuthTitle": MessageLookupByLibrary.simpleMessage("通行密钥认证"), "password": MessageLookupByLibrary.simpleMessage("密码"), @@ -988,7 +1006,7 @@ class MessageLookup extends MessageLookupByLibrary { "scanCode": MessageLookupByLibrary.simpleMessage("扫描二维码/条码"), "scanThisBarcodeWithnyourAuthenticatorApp": MessageLookupByLibrary.simpleMessage("用您的身份验证器应用\n扫描此条码"), - "search": MessageLookupByLibrary.simpleMessage("Search"), + "search": MessageLookupByLibrary.simpleMessage("搜索"), "searchAlbumsEmptySection": MessageLookupByLibrary.simpleMessage("相册"), "searchByAlbumNameHint": MessageLookupByLibrary.simpleMessage("相册名称"), "searchByExamples": MessageLookupByLibrary.simpleMessage( @@ -1115,6 +1133,8 @@ class MessageLookup extends MessageLookupByLibrary { "sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ 成功"), "startBackup": MessageLookupByLibrary.simpleMessage("开始备份"), "status": MessageLookupByLibrary.simpleMessage("状态"), + "stopCastingBody": MessageLookupByLibrary.simpleMessage("您想停止投放吗?"), + "stopCastingTitle": MessageLookupByLibrary.simpleMessage("停止投放"), "storage": MessageLookupByLibrary.simpleMessage("存储空间"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("家庭"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("您"), diff --git a/mobile/lib/generated/l10n.dart b/mobile/lib/generated/l10n.dart index 3fa9c2209a..4c7679154f 100644 --- a/mobile/lib/generated/l10n.dart +++ b/mobile/lib/generated/l10n.dart @@ -4034,10 +4034,10 @@ class S { ); } - /// `Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.` + /// `Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.` String playStoreFreeTrialValidTill(Object endDate) { return Intl.message( - 'Free trial valid till $endDate.\nYou can choose a paid plan afterwards.', + 'Free trial valid till $endDate.\nYou can purchase a paid plan afterwards.', name: 'playStoreFreeTrialValidTill', desc: '', args: [endDate], @@ -5945,6 +5945,16 @@ class S { ); } + /// `Files saved to gallery` + String get filesSavedToGallery { + return Intl.message( + 'Files saved to gallery', + name: 'filesSavedToGallery', + desc: '', + args: [], + ); + } + /// `Failed to save file to gallery` String get fileFailedToSaveToGallery { return Intl.message( @@ -8378,6 +8388,26 @@ class S { ); } + /// `Auto pair` + String get autoPair { + return Intl.message( + 'Auto pair', + name: 'autoPair', + desc: '', + args: [], + ); + } + + /// `Pair with PIN` + String get pairWithPin { + return Intl.message( + 'Pair with PIN', + name: 'pairWithPin', + desc: '', + args: [], + ); + } + /// `Device not found` String get deviceNotFound { return Intl.message( @@ -8563,6 +8593,116 @@ class S { args: [], ); } + + /// `Auto pair works only with devices that support Chromecast.` + String get autoPairDesc { + return Intl.message( + 'Auto pair works only with devices that support Chromecast.', + name: 'autoPairDesc', + desc: '', + args: [], + ); + } + + /// `Pair with PIN works with any screen you wish to view your album on.` + String get manualPairDesc { + return Intl.message( + 'Pair with PIN works with any screen you wish to view your album on.', + name: 'manualPairDesc', + desc: '', + args: [], + ); + } + + /// `Connect to device` + String get connectToDevice { + return Intl.message( + 'Connect to device', + name: 'connectToDevice', + desc: '', + args: [], + ); + } + + /// `You'll see available Cast devices here.` + String get autoCastDialogBody { + return Intl.message( + 'You\'ll see available Cast devices here.', + name: 'autoCastDialogBody', + desc: '', + args: [], + ); + } + + /// `Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.` + String get autoCastiOSPermission { + return Intl.message( + 'Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.', + name: 'autoCastiOSPermission', + desc: '', + args: [], + ); + } + + /// `No device found` + String get noDeviceFound { + return Intl.message( + 'No device found', + name: 'noDeviceFound', + desc: '', + args: [], + ); + } + + /// `Stop casting` + String get stopCastingTitle { + return Intl.message( + 'Stop casting', + name: 'stopCastingTitle', + desc: '', + args: [], + ); + } + + /// `Do you want to stop casting?` + String get stopCastingBody { + return Intl.message( + 'Do you want to stop casting?', + name: 'stopCastingBody', + desc: '', + args: [], + ); + } + + /// `Failed to cast album` + String get castIPMismatchTitle { + return Intl.message( + 'Failed to cast album', + name: 'castIPMismatchTitle', + desc: '', + args: [], + ); + } + + /// `Please make sure you are on the same network as the TV.` + String get castIPMismatchBody { + return Intl.message( + 'Please make sure you are on the same network as the TV.', + name: 'castIPMismatchBody', + desc: '', + args: [], + ); + } + + /// `Pairing complete` + String get pairingComplete { + return Intl.message( + 'Pairing complete', + name: 'pairingComplete', + desc: '', + args: [], + ); + } } class AppLocalizationDelegate extends LocalizationsDelegate { diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb index 7115c69508..6bc8b59269 100644 --- a/mobile/lib/l10n/intl_en.arb +++ b/mobile/lib/l10n/intl_en.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "Free trial valid till {endDate}", "validTill": "Valid till {endDate}", "addOnValidTill": "Your {storageAmount} add-on is valid till {endDate}", - "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.", + "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.", "subWillBeCancelledOn": "Your subscription will be cancelled on {endDate}", "subscription": "Subscription", "paymentDetails": "Payment details", @@ -835,6 +835,7 @@ "close": "Close", "setAs": "Set as", "fileSavedToGallery": "File saved to gallery", + "filesSavedToGallery": "Files saved to gallery", "fileFailedToSaveToGallery": "Failed to save file to gallery", "download": "Download", "pressAndHoldToPlayVideo": "Press and hold to play video", @@ -1195,6 +1196,8 @@ "verifyPasskey": "Verify passkey", "playOnTv": "Play album on TV", "pair": "Pair", + "autoPair": "Auto pair", + "pairWithPin": "Pair with PIN", "deviceNotFound": "Device not found", "castInstruction": "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV.", "deviceCodeHint": "Enter the code", @@ -1212,5 +1215,16 @@ "endpointUpdatedMessage": "Endpoint updated successfully", "customEndpoint": "Connected to {endpoint}", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "autoPairDesc": "Auto pair works only with devices that support Chromecast.", + "manualPairDesc": "Pair with PIN works with any screen you wish to view your album on.", + "connectToDevice": "Connect to device", + "autoCastDialogBody": "You'll see available Cast devices here.", + "autoCastiOSPermission": "Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.", + "noDeviceFound": "No device found", + "stopCastingTitle": "Stop casting", + "stopCastingBody": "Do you want to stop casting?", + "castIPMismatchTitle": "Failed to cast album", + "castIPMismatchBody": "Please make sure you are on the same network as the TV.", + "pairingComplete": "Pairing complete" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_nl.arb b/mobile/lib/l10n/intl_nl.arb index 98302c20c6..a8f854a430 100644 --- a/mobile/lib/l10n/intl_nl.arb +++ b/mobile/lib/l10n/intl_nl.arb @@ -23,7 +23,7 @@ "sendEmail": "E-mail versturen", "deleteRequestSLAText": "Je verzoek wordt binnen 72 uur verwerkt.", "deleteEmailRequest": "Stuur een e-mail naar account-deletion@ente.io vanaf het door jou geregistreerde e-mailadres.", - "entePhotosPerm": "ente heeft toestemming nodig om je foto's te bewaren", + "entePhotosPerm": "Ente heeft toestemming nodig om je foto's te bewaren", "ok": "Oké", "createAccount": "Account aanmaken", "createNewAccount": "Nieuw account aanmaken", @@ -225,17 +225,17 @@ }, "description": "Number of participants in an album, including the album owner." }, - "collabLinkSectionDescription": "Maak een link waarmee mensen foto's in jouw gedeelde album kunnen toevoegen en bekijken zonder dat ze daarvoor een ente app of account nodig hebben. Handig voor het verzamelen van foto's van evenementen.", + "collabLinkSectionDescription": "Maak een link waarmee mensen foto's in jouw gedeelde album kunnen toevoegen en bekijken zonder dat ze daarvoor een Ente app of account nodig hebben. Handig voor het verzamelen van foto's van evenementen.", "collectPhotos": "Foto's verzamelen", "collaborativeLink": "Gezamenlijke link", - "shareWithNonenteUsers": "Delen met niet-ente gebruikers", + "shareWithNonenteUsers": "Delen met niet-Ente gebruikers", "createPublicLink": "Maak publieke link", "sendLink": "Stuur link", "copyLink": "Kopieer link", "linkHasExpired": "Link is vervallen", "publicLinkEnabled": "Publieke link ingeschakeld", "shareALink": "Deel een link", - "sharedAlbumSectionDescription": "Maak gedeelde en collaboratieve albums met andere ente gebruikers, inclusief gebruikers met gratis abonnementen.", + "sharedAlbumSectionDescription": "Maak gedeelde en collaboratieve albums met andere Ente gebruikers, inclusief gebruikers met gratis abonnementen.", "shareWithPeopleSectionTitle": "{numberOfPeople, plural, =0 {Deel met specifieke mensen} =1 {Gedeeld met 1 persoon} other {Gedeeld met {numberOfPeople} mensen}}", "@shareWithPeopleSectionTitle": { "placeholders": { @@ -259,12 +259,12 @@ }, "verificationId": "Verificatie ID", "verifyEmailID": "Verifieer {email}", - "emailNoEnteAccount": "{email} heeft geen ente account.\n\nStuur ze een uitnodiging om foto's te delen.", + "emailNoEnteAccount": "{email} heeft geen Ente account.\n\nStuur ze een uitnodiging om foto's te delen.", "shareMyVerificationID": "Hier is mijn verificatie-ID: {verificationID} voor ente.io.", "shareTextConfirmOthersVerificationID": "Hey, kunt u bevestigen dat dit uw ente.io verificatie-ID is: {verificationID}", "somethingWentWrong": "Er ging iets mis", "sendInvite": "Stuur een uitnodiging", - "shareTextRecommendUsingEnte": "Download ente zodat we gemakkelijk foto's en video's van originele kwaliteit kunnen delen\n\nhttps://ente.io", + "shareTextRecommendUsingEnte": "Download Ente zodat we gemakkelijk foto's en video's in originele kwaliteit kunnen delen\n\nhttps://ente.io", "done": "Voltooid", "applyCodeTitle": "Code toepassen", "enterCodeDescription": "Voer de code van de vriend in om gratis opslag voor jullie beiden te claimen", @@ -281,7 +281,7 @@ "claimMore": "Claim meer!", "theyAlsoGetXGb": "Zij krijgen ook {storageAmountInGB} GB", "freeStorageOnReferralSuccess": "{storageAmountInGB} GB telkens als iemand zich aanmeldt voor een betaald abonnement en je code toepast", - "shareTextReferralCode": "ente verwijzingscode: {referralCode} \n\nPas het toe bij Instellingen → Algemeen → Verwijzingen om {referralStorageInGB} GB gratis te krijgen nadat je je hebt aangemeld voor een betaald abonnement\n\nhttps://ente.io", + "shareTextReferralCode": "Ente verwijzingscode: {referralCode} \n\nPas het toe bij Instellingen → Algemeen → Verwijzingen om {referralStorageInGB} GB gratis te krijgen nadat je je hebt aangemeld voor een betaald abonnement\n\nhttps://ente.io", "claimFreeStorage": "Claim gratis opslag", "inviteYourFriends": "Vrienden uitnodigen", "failedToFetchReferralDetails": "Kan geen verwijzingsgegevens ophalen. Probeer het later nog eens.", @@ -304,6 +304,7 @@ } }, "faq": "Veelgestelde vragen", + "help": "Hulp", "oopsSomethingWentWrong": "Oeps, er is iets misgegaan", "peopleUsingYourCode": "Mensen die jouw code gebruiken", "eligible": "gerechtigd", @@ -333,7 +334,7 @@ "removeParticipantBody": "{userEmail} zal worden verwijderd uit dit gedeelde album\n\nAlle door hen toegevoegde foto's worden ook uit het album verwijderd", "keepPhotos": "Foto's behouden", "deletePhotos": "Foto's verwijderen", - "inviteToEnte": "Uitnodigen voor ente", + "inviteToEnte": "Uitnodigen voor Ente", "removePublicLink": "Verwijder publieke link", "disableLinkMessage": "Dit verwijdert de openbare link voor toegang tot \"{albumName}\".", "sharing": "Delen...", @@ -349,10 +350,10 @@ "videoSmallCase": "video", "photoSmallCase": "foto", "singleFileDeleteHighlight": "Het wordt uit alle albums verwijderd.", - "singleFileInBothLocalAndRemote": "Deze {fileType} staat zowel in ente als op jouw apparaat.", - "singleFileInRemoteOnly": "Deze {fileType} zal worden verwijderd uit ente.", + "singleFileInBothLocalAndRemote": "Deze {fileType} staat zowel in Ente als op jouw apparaat.", + "singleFileInRemoteOnly": "Deze {fileType} zal worden verwijderd uit Ente.", "singleFileDeleteFromDevice": "Deze {fileType} zal worden verwijderd van jouw apparaat.", - "deleteFromEnte": "Verwijder van ente", + "deleteFromEnte": "Verwijder van Ente", "yesDelete": "Ja, verwijderen", "movedToTrash": "Naar prullenbak verplaatst", "deleteFromDevice": "Verwijder van apparaat", @@ -444,7 +445,7 @@ "backupOverMobileData": "Back-up maken via mobiele data", "backupVideos": "Back-up video's", "disableAutoLock": "Automatisch vergrendelen uitschakelen", - "deviceLockExplanation": "Schakel de schermvergrendeling van het apparaat uit wanneer ente op de voorgrond is en er een back-up aan de gang is. Dit is normaal gesproken niet nodig, maar kan grote uploads en initiële imports van grote mappen sneller laten verlopen.", + "deviceLockExplanation": "Schakel de schermvergrendeling van het apparaat uit wanneer Ente op de voorgrond is en er een back-up aan de gang is. Dit is normaal gesproken niet nodig, maar kan grote uploads en initiële imports van grote mappen sneller laten verlopen.", "about": "Over", "weAreOpenSource": "We zijn open source!", "privacy": "Privacy", @@ -464,7 +465,7 @@ "authToInitiateAccountDeletion": "Gelieve te verifiëren om het verwijderen van je account te starten", "areYouSureYouWantToLogout": "Weet je zeker dat je wilt uitloggen?", "yesLogout": "Ja, log uit", - "aNewVersionOfEnteIsAvailable": "Er is een nieuwe versie van ente beschikbaar.", + "aNewVersionOfEnteIsAvailable": "Er is een nieuwe versie van Ente beschikbaar.", "update": "Update", "installManually": "Installeer handmatig", "criticalUpdateAvailable": "Belangrijke update beschikbaar", @@ -553,11 +554,11 @@ "systemTheme": "Systeem", "freeTrial": "Gratis proefversie", "selectYourPlan": "Kies uw abonnement", - "enteSubscriptionPitch": "ente bewaart uw herinneringen, zodat ze altijd beschikbaar voor u zijn, zelfs als u uw apparaat verliest.", + "enteSubscriptionPitch": "Ente bewaart uw herinneringen, zodat ze altijd beschikbaar voor u zijn, zelfs als u uw apparaat verliest.", "enteSubscriptionShareWithFamily": "Je familie kan ook aan je abonnement worden toegevoegd.", "currentUsageIs": "Huidig gebruik is ", "@currentUsageIs": { - "description": "This text is followed by storage usaged", + "description": "This text is followed by storage usage", "examples": { "0": "Current usage is 1.2 GB" }, @@ -619,7 +620,7 @@ "appleId": "Apple ID", "playstoreSubscription": "PlayStore abonnement", "appstoreSubscription": "PlayStore abonnement", - "subAlreadyLinkedErrMessage": "Uw {id} is al aan een ander ente account gekoppeld.\nAls u uw {id} wilt gebruiken met dit account, neem dan contact op met onze klantenservice", + "subAlreadyLinkedErrMessage": "Jouw {id} is al aan een ander Ente account gekoppeld.\nAls je jouw {id} wilt gebruiken met dit account, neem dan contact op met onze klantenservice", "visitWebToManage": "Bezoek alstublieft web.ente.io om uw abonnement te beheren", "couldNotUpdateSubscription": "Kon abonnement niet wijzigen", "pleaseContactSupportAndWeWillBeHappyToHelp": "Neem alstublieft contact op met support@ente.io en we helpen u graag!", @@ -640,7 +641,7 @@ "thankYou": "Bedankt", "failedToVerifyPaymentStatus": "Betalingsstatus verifiëren mislukt", "pleaseWaitForSometimeBeforeRetrying": "Gelieve even te wachten voordat u opnieuw probeert", - "paymentFailedWithReason": "Helaas is uw betaling mislukt vanwege {reason}", + "paymentFailedMessage": "Helaas is je betaling mislukt. Neem contact op met support zodat we je kunnen helpen!", "youAreOnAFamilyPlan": "U bent onderdeel van een familie abonnement!", "contactFamilyAdmin": "Neem contact op met {familyAdminEmail} om uw abonnement te beheren", "leaveFamily": "Familie abonnement verlaten", @@ -664,7 +665,7 @@ "everywhere": "overal", "androidIosWebDesktop": "Android, iOS, Web, Desktop", "mobileWebDesktop": "Mobiel, Web, Desktop", - "newToEnte": "Nieuw bij ente", + "newToEnte": "Nieuw bij Ente", "pleaseLoginAgain": "Log opnieuw in", "devAccountChanged": "Het ontwikkelaarsaccount dat we gebruiken om te publiceren in de App Store is veranderd. Daarom moet je opnieuw inloggen.\n\nOnze excuses voor het ongemak, helaas was dit onvermijdelijk.", "yourSubscriptionHasExpired": "Uw abonnement is verlopen", @@ -677,12 +678,12 @@ }, "backupFailed": "Back-up mislukt", "couldNotBackUpTryLater": "We konden uw gegevens niet back-uppen.\nWe zullen het later opnieuw proberen.", - "enteCanEncryptAndPreserveFilesOnlyIfYouGrant": "ente kan bestanden alleen versleutelen en bewaren als u toegang tot ze geeft", + "enteCanEncryptAndPreserveFilesOnlyIfYouGrant": "Ente kan bestanden alleen versleutelen en bewaren als u toegang tot ze geeft", "pleaseGrantPermissions": "Geef alstublieft toestemming", "grantPermission": "Toestemming verlenen", "privateSharing": "Privé delen", "shareOnlyWithThePeopleYouWant": "Deel alleen met de mensen die u wilt", - "usePublicLinksForPeopleNotOnEnte": "Gebruik publieke links voor mensen die niet op ente zitten", + "usePublicLinksForPeopleNotOnEnte": "Gebruik publieke links voor mensen die geen Ente account hebben", "allowPeopleToAddPhotos": "Mensen toestaan foto's toe te voegen", "shareAnAlbumNow": "Deel nu een album", "collectEventPhotos": "Foto's van gebeurtenissen verzamelen", @@ -694,7 +695,7 @@ }, "onDevice": "Op het apparaat", "@onEnte": { - "description": "The text displayed above albums backed up to ente", + "description": "The text displayed above albums backed up to Ente", "type": "text" }, "onEnte": "Op ente", @@ -740,7 +741,7 @@ "saveCollage": "Sla collage op", "collageSaved": "Collage opgeslagen in gallerij", "collageLayout": "Layout", - "addToEnte": "Toevoegen aan ente", + "addToEnte": "Toevoegen aan Ente", "addToAlbum": "Toevoegen aan album", "delete": "Verwijderen", "hide": "Verbergen", @@ -805,9 +806,9 @@ "photosAddedByYouWillBeRemovedFromTheAlbum": "Foto's toegevoegd door u zullen worden verwijderd uit het album", "youveNoFilesInThisAlbumThatCanBeDeleted": "Je hebt geen bestanden in dit album die verwijderd kunnen worden", "youDontHaveAnyArchivedItems": "U heeft geen gearchiveerde bestanden.", - "ignoredFolderUploadReason": "Sommige bestanden in dit album worden genegeerd voor de upload omdat ze eerder van ente zijn verwijderd.", + "ignoredFolderUploadReason": "Sommige bestanden in dit album worden genegeerd voor uploaden omdat ze eerder van Ente zijn verwijderd.", "resetIgnoredFiles": "Reset genegeerde bestanden", - "deviceFilesAutoUploading": "Bestanden toegevoegd aan dit album van dit apparaat zullen automatisch geüpload worden naar ente.", + "deviceFilesAutoUploading": "Bestanden toegevoegd aan dit album van dit apparaat zullen automatisch geüpload worden naar Ente.", "turnOnBackupForAutoUpload": "Schakel back-up in om bestanden die toegevoegd zijn aan deze map op dit apparaat automatisch te uploaden.", "noHiddenPhotosOrVideos": "Geen verborgen foto's of video's", "toHideAPhotoOrVideo": "Om een foto of video te verbergen", @@ -834,6 +835,7 @@ "close": "Sluiten", "setAs": "Instellen als", "fileSavedToGallery": "Bestand opgeslagen in galerij", + "filesSavedToGallery": "Bestand opgeslagen in galerij", "fileFailedToSaveToGallery": "Opslaan van bestand naar galerij mislukt", "download": "Downloaden", "pressAndHoldToPlayVideo": "Ingedrukt houden om video af te spelen", @@ -885,7 +887,7 @@ "@freeUpSpaceSaving": { "description": "Text to tell user how much space they can free up by deleting items from the device" }, - "freeUpAccessPostDelete": "U heeft nog steeds toegang tot {count, plural, one {het} other {ze}} op ente zolang u een actief abonnement heeft", + "freeUpAccessPostDelete": "Je hebt nog steeds toegang tot {count, plural, one {het} other {ze}} op Ente zolang je een actief abonnement hebt", "@freeUpAccessPostDelete": { "placeholders": { "count": { @@ -936,7 +938,7 @@ "renameFile": "Bestandsnaam wijzigen", "enterFileName": "Geef bestandsnaam op", "filesDeleted": "Bestanden verwijderd", - "selectedFilesAreNotOnEnte": "Geselecteerde bestanden staan niet op ente", + "selectedFilesAreNotOnEnte": "Geselecteerde bestanden staan niet op Ente", "thisActionCannotBeUndone": "Deze actie kan niet ongedaan gemaakt worden", "emptyTrash": "Prullenbak leegmaken?", "permDeleteWarning": "Alle bestanden in de prullenbak zullen permanent worden verwijderd\n\nDeze actie kan niet ongedaan worden gemaakt", @@ -945,7 +947,7 @@ "permanentlyDeleteFromDevice": "Permanent verwijderen van apparaat?", "someOfTheFilesYouAreTryingToDeleteAre": "Sommige bestanden die u probeert te verwijderen zijn alleen beschikbaar op uw apparaat en kunnen niet hersteld worden als deze verwijderd worden", "theyWillBeDeletedFromAllAlbums": "Ze zullen uit alle albums worden verwijderd.", - "someItemsAreInBothEnteAndYourDevice": "Sommige bestanden bevinden zich in zowel ente als op uw apparaat.", + "someItemsAreInBothEnteAndYourDevice": "Sommige bestanden bevinden zich zowel in Ente als op jouw apparaat.", "selectedItemsWillBeDeletedFromAllAlbumsAndMoved": "Geselecteerde bestanden worden verwijderd uit alle albums en verplaatst naar de prullenbak.", "theseItemsWillBeDeletedFromYourDevice": "Deze bestanden zullen worden verwijderd van uw apparaat.", "itLooksLikeSomethingWentWrongPleaseRetryAfterSome": "Het lijkt erop dat er iets fout is gegaan. Probeer het later opnieuw. Als de fout zich blijft voordoen, neem dan contact op met ons supportteam.", @@ -1051,7 +1053,7 @@ }, "setRadius": "Radius instellen", "familyPlanPortalTitle": "Familie", - "familyPlanOverview": "Voeg 5 gezinsleden toe aan uw bestaande abonnement zonder extra te betalen.\n\nElk lid krijgt zijn eigen privé ruimte en kan elkaars bestanden niet zien, tenzij ze zijn gedeeld.\n\nFamilieplannen zijn beschikbaar voor klanten die een betaald ente abonnement hebben.\n\nAbonneer u nu om aan de slag te gaan!", + "familyPlanOverview": "Voeg 5 gezinsleden toe aan je bestaande abonnement zonder extra te betalen.\n\nElk lid krijgt zijn eigen privé ruimte en kan elkaars bestanden niet zien tenzij ze zijn gedeeld.\n\nFamilieplannen zijn beschikbaar voor klanten die een betaald Ente abonnement hebben.\n\nAbonneer nu om aan de slag te gaan!", "androidBiometricHint": "Identiteit verifiëren", "@androidBiometricHint": { "description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters." @@ -1129,7 +1131,7 @@ "noAlbumsSharedByYouYet": "Nog geen albums gedeeld door jou", "sharedWithYou": "Gedeeld met jou", "sharedByYou": "Gedeeld door jou", - "inviteYourFriendsToEnte": "Vrienden uitnodigen voor ente", + "inviteYourFriendsToEnte": "Vrienden uitnodigen voor Ente", "failedToDownloadVideo": "Downloaden van video mislukt", "hiding": "Verbergen...", "unhiding": "Zichtbaar maken...", @@ -1139,7 +1141,7 @@ "addToHiddenAlbum": "Toevoegen aan verborgen album", "moveToHiddenAlbum": "Verplaatsen naar verborgen album", "fileTypes": "Bestandstype", - "deleteConfirmDialogBody": "Dit account is gekoppeld aan andere ente apps, als je er gebruik van maakt.\\n\\nJe geüploade gegevens worden in alle ente apps gepland voor verwijdering, en je account wordt permanent verwijderd voor alle ente diensten.", + "deleteConfirmDialogBody": "Dit account is gekoppeld aan andere Ente apps, als je er gebruik van maakt. Je geüploade gegevens worden in alle Ente apps gepland voor verwijdering, en je account wordt permanent verwijderd voor alle Ente diensten.", "hearUsWhereTitle": "Hoe hoorde je over Ente? (optioneel)", "hearUsExplanation": "Wij gebruiken geen tracking. Het zou helpen als je ons vertelt waar je ons gevonden hebt!", "viewAddOnButton": "Add-ons bekijken", @@ -1187,17 +1189,42 @@ "changeLocationOfSelectedItems": "Locatie van geselecteerde items wijzigen?", "editsToLocationWillOnlyBeSeenWithinEnte": "Bewerkte locatie wordt alleen gezien binnen Ente", "cleanUncategorized": "Ongecategoriseerd opschonen", + "cleanUncategorizedDescription": "Verwijder alle bestanden van Ongecategoriseerd die aanwezig zijn in andere albums", + "waitingForVerification": "Wachten op verificatie...", + "passkey": "Passkey", + "passkeyAuthTitle": "Passkey verificatie", + "verifyPasskey": "Bevestig passkey", "playOnTv": "Album afspelen op TV", "pair": "Koppelen", + "autoPair": "Automatisch koppelen", + "pairWithPin": "Koppelen met PIN", "deviceNotFound": "Apparaat niet gevonden", "castInstruction": "Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen.", "deviceCodeHint": "Voer de code in", - "joinDiscord": "Join Discord", - "locations": "Locations", - "descriptions": "Descriptions", - "addViewers": "{count, plural, zero {Add viewer} one {Add viewer} other {Add viewers}}", - "addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}", - "longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.", - "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "joinDiscord": "Join de Discord", + "locations": "Locaties", + "descriptions": "Beschrijvingen", + "addViewers": "{count, plural, one {Voeg kijker toe} other {Voeg kijkers toe}}", + "addCollaborators": "{count, plural, zero {Voeg samenwerker toe} one {Voeg samenwerker toe} other {Voeg samenwerkers toe}}", + "longPressAnEmailToVerifyEndToEndEncryption": "Druk lang op een e-mail om de versleuteling te verifiëren.", + "developerSettingsWarning": "Weet je zeker dat je de ontwikkelaarsinstellingen wilt wijzigen?", + "developerSettings": "Ontwikkelaarsinstellingen", + "serverEndpoint": "Server eindpunt", + "invalidEndpoint": "Ongeldig eindpunt", + "invalidEndpointMessage": "Sorry, het eindpunt dat je hebt ingevoerd is ongeldig. Voer een geldig eindpunt in en probeer het opnieuw.", + "endpointUpdatedMessage": "Eindpunt met succes bijgewerkt", + "customEndpoint": "Verbonden met {endpoint}", + "createCollaborativeLink": "Maak een gezamenlijke link", + "search": "Zoeken", + "autoPairDesc": "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen.", + "manualPairDesc": "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien.", + "connectToDevice": "Verbinding maken met apparaat", + "autoCastDialogBody": "Je zult de beschikbare Cast apparaten hier zien.", + "autoCastiOSPermission": "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen.", + "noDeviceFound": "Geen apparaat gevonden", + "stopCastingTitle": "Casten stoppen", + "stopCastingBody": "Wil je stoppen met casten?", + "castIPMismatchTitle": "Album casten mislukt", + "castIPMismatchBody": "Zorg ervoor dat je op hetzelfde netwerk zit als de tv.", + "pairingComplete": "Koppeling voltooid" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_pt.arb b/mobile/lib/l10n/intl_pt.arb index 765ce3e1d3..08d932cdaa 100644 --- a/mobile/lib/l10n/intl_pt.arb +++ b/mobile/lib/l10n/intl_pt.arb @@ -47,7 +47,7 @@ "noRecoveryKey": "Nenhuma chave de recuperação?", "sorry": "Desculpe", "noRecoveryKeyNoDecryption": "Devido à natureza do nosso protocolo de criptografia de ponta a ponta, seus dados não podem ser descriptografados sem sua senha ou chave de recuperação", - "verifyEmail": "Verificar email", + "verifyEmail": "Verificar e-mail", "toResetVerifyEmail": "Para redefinir a sua senha, por favor verifique o seu email primeiro.", "checkInboxAndSpamFolder": "Verifique sua caixa de entrada (e ‘spam’) para concluir a verificação", "tapToEnterCode": "Toque para inserir código", @@ -156,7 +156,7 @@ "addANewEmail": "Adicionar um novo email", "orPickAnExistingOne": "Ou escolha um existente", "collaboratorsCanAddPhotosAndVideosToTheSharedAlbum": "Os colaboradores podem adicionar fotos e vídeos ao álbum compartilhado.", - "enterEmail": "Digite o email", + "enterEmail": "Insira o e-mail", "albumOwner": "Proprietário", "@albumOwner": { "description": "Role of the album owner" @@ -186,7 +186,7 @@ "passwordLock": "Bloqueio de senha", "disableDownloadWarningTitle": "Observe", "disableDownloadWarningBody": "Os espectadores ainda podem tirar screenshots ou salvar uma cópia de suas fotos usando ferramentas externas", - "allowDownloads": "Permitir transferências", + "allowDownloads": "Permitir downloads", "linkDeviceLimit": "Limite do dispositivo", "noDeviceLimit": "Nenhum", "@noDeviceLimit": { @@ -334,12 +334,12 @@ "removeParticipantBody": "{userEmail} será removido deste álbum compartilhado\n\nQuaisquer fotos adicionadas por eles também serão removidas do álbum", "keepPhotos": "Manter fotos", "deletePhotos": "Excluir fotos", - "inviteToEnte": "Convidar para o ente", + "inviteToEnte": "Convidar para o Ente", "removePublicLink": "Remover link público", "disableLinkMessage": "Isso removerá o link público para acessar \"{albumName}\".", "sharing": "Compartilhando...", "youCannotShareWithYourself": "Você não pode compartilhar consigo mesmo", - "archive": "Arquivado", + "archive": "Arquivar", "createAlbumActionHint": "Pressione e segure para selecionar fotos e clique em + para criar um álbum", "importing": "Importando....", "failedToLoadAlbums": "Falha ao carregar álbuns", @@ -353,7 +353,7 @@ "singleFileInBothLocalAndRemote": "Este {fileType} está tanto no Ente quanto no seu dispositivo.", "singleFileInRemoteOnly": "Este {fileType} será excluído do Ente.", "singleFileDeleteFromDevice": "Este {fileType} será excluído do seu dispositivo.", - "deleteFromEnte": "Excluir do ente", + "deleteFromEnte": "Excluir do Ente", "yesDelete": "Sim, excluir", "movedToTrash": "Movido para a lixeira", "deleteFromDevice": "Excluir do dispositivo", @@ -473,7 +473,7 @@ "ignoreUpdate": "Ignorar", "downloading": "Baixando...", "cannotDeleteSharedFiles": "Não é possível excluir arquivos compartilhados", - "theDownloadCouldNotBeCompleted": "Não foi possível concluir a transferência", + "theDownloadCouldNotBeCompleted": "Não foi possível concluir o download", "retry": "Tentar novamente", "backedUpFolders": "Backup de pastas concluído", "backup": "Backup", @@ -835,6 +835,7 @@ "close": "Fechar", "setAs": "Definir como", "fileSavedToGallery": "Vídeo salvo na galeria", + "filesSavedToGallery": "Arquivos salvos na galeria", "fileFailedToSaveToGallery": "Falha ao salvar o arquivo na galeria", "download": "Baixar", "pressAndHoldToPlayVideo": "Pressione e segure para reproduzir o vídeo", @@ -1195,6 +1196,8 @@ "verifyPasskey": "Verificar chave de acesso", "playOnTv": "Reproduzir álbum na TV", "pair": "Parear", + "autoPair": "Pareamento automático", + "pairWithPin": "Parear com PIN", "deviceNotFound": "Dispositivo não encontrado", "castInstruction": "Visite cast.ente.io no dispositivo que você deseja parear.\n\ndigite o código abaixo para reproduzir o álbum em sua TV.", "deviceCodeHint": "Insira o código", @@ -1212,5 +1215,16 @@ "endpointUpdatedMessage": "Endpoint atualizado com sucesso", "customEndpoint": "Conectado a {endpoint}", "createCollaborativeLink": "Criar link colaborativo", - "search": "Search" + "search": "Pesquisar", + "autoPairDesc": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.", + "manualPairDesc": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.", + "connectToDevice": "Conectar ao dispositivo", + "autoCastDialogBody": "Você verá dispositivos disponíveis para transmitir aqui.", + "autoCastiOSPermission": "Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações.", + "noDeviceFound": "Nenhum dispositivo encontrado", + "stopCastingTitle": "Parar transmissão", + "stopCastingBody": "Você quer parar a transmissão?", + "castIPMismatchTitle": "Falha ao transmitir álbum", + "castIPMismatchBody": "Certifique-se de estar na mesma rede que a TV.", + "pairingComplete": "Pareamento concluído" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_zh.arb b/mobile/lib/l10n/intl_zh.arb index 4a991c7735..9a854a4f0f 100644 --- a/mobile/lib/l10n/intl_zh.arb +++ b/mobile/lib/l10n/intl_zh.arb @@ -835,6 +835,7 @@ "close": "关闭", "setAs": "设置为", "fileSavedToGallery": "文件已保存到相册", + "filesSavedToGallery": "多个文件已保存到相册", "fileFailedToSaveToGallery": "无法将文件保存到相册", "download": "下载", "pressAndHoldToPlayVideo": "按住以播放视频", @@ -1195,6 +1196,8 @@ "verifyPasskey": "验证通行密钥", "playOnTv": "在电视上播放相册", "pair": "配对", + "autoPair": "自动配对", + "pairWithPin": "用 PIN 配对", "deviceNotFound": "未发现设备", "castInstruction": "在您要配对的设备上访问 cast.ente.io。\n输入下面的代码即可在电视上播放相册。", "deviceCodeHint": "输入代码", @@ -1212,5 +1215,16 @@ "endpointUpdatedMessage": "端点更新成功", "customEndpoint": "已连接至 {endpoint}", "createCollaborativeLink": "创建协作链接", - "search": "Search" + "search": "搜索", + "autoPairDesc": "自动配对仅适用于支持 Chromecast 的设备。", + "manualPairDesc": "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。", + "connectToDevice": "连接到设备", + "autoCastDialogBody": "您将在此处看到可用的 Cast 设备。", + "autoCastiOSPermission": "请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。", + "noDeviceFound": "未发现设备", + "stopCastingTitle": "停止投放", + "stopCastingBody": "您想停止投放吗?", + "castIPMismatchTitle": "投放相册失败", + "castIPMismatchBody": "请确保您的设备与电视处于同一网络。", + "pairingComplete": "配对完成" } \ No newline at end of file diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index 97338f55f9..52c9c715ae 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import "dart:isolate"; import "package:adaptive_theme/adaptive_theme.dart"; import 'package:background_fetch/background_fetch.dart'; @@ -21,12 +22,12 @@ import 'package:photos/core/network/network.dart'; import 'package:photos/db/upload_locks_db.dart'; import 'package:photos/ente_theme_data.dart'; import "package:photos/l10n/l10n.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/app_lifecycle_service.dart'; import 'package:photos/services/billing_service.dart'; import 'package:photos/services/collections_service.dart'; import "package:photos/services/entity_service.dart"; import 'package:photos/services/favorites_service.dart'; -import 'package:photos/services/feature_flag_service.dart'; import 'package:photos/services/home_widget_service.dart'; import 'package:photos/services/local_file_update_service.dart'; import 'package:photos/services/local_sync_service.dart'; @@ -178,6 +179,7 @@ Future _init(bool isBackground, {String via = ''}) async { _isProcessRunning = true; _logger.info("Initializing... inBG =$isBackground via: $via"); final SharedPreferences preferences = await SharedPreferences.getInstance(); + await _logFGHeartBeatInfo(); unawaited(_scheduleHeartBeat(preferences, isBackground)); AppLifecycleService.instance.init(preferences); @@ -191,6 +193,7 @@ Future _init(bool isBackground, {String via = ''}) async { CryptoUtil.init(); await Configuration.instance.init(); await NetworkClient.instance.init(); + ServiceLocator.instance.init(preferences, NetworkClient.instance.enteDio); await UserService.instance.init(); await EntityService.instance.init(); LocationService.instance.init(preferences); @@ -224,7 +227,7 @@ Future _init(bool isBackground, {String via = ''}) async { ); }); } - unawaited(FeatureFlagService.instance.init()); + unawaited(SemanticSearchService.instance.init()); MachineLearningController.instance.init(); // Can not including existing tf/ml binaries as they are not being built @@ -328,10 +331,15 @@ Future _killBGTask([String? taskId]) async { DateTime.now().microsecondsSinceEpoch, ); final prefs = await SharedPreferences.getInstance(); + await prefs.remove(kLastBGTaskHeartBeatTime); if (taskId != null) { BackgroundFetch.finish(taskId); } + + ///Band aid for background process not getting killed. Should migrate to using + ///workmanager instead of background_fetch. + Isolate.current.kill(); } Future _firebaseMessagingBackgroundHandler(RemoteMessage message) async { @@ -371,7 +379,7 @@ Future _logFGHeartBeatInfo() async { final String lastRun = lastFGTaskHeartBeatTime == 0 ? 'never' : DateTime.fromMicrosecondsSinceEpoch(lastFGTaskHeartBeatTime).toString(); - _logger.info('isAlreaduunningFG: $isRunningInFG, last Beat: $lastRun'); + _logger.info('isAlreadyRunningFG: $isRunningInFG, last Beat: $lastRun'); } void _scheduleSuicide(Duration duration, [String? taskID]) { diff --git a/mobile/lib/models/embedding.dart b/mobile/lib/models/embedding.dart index 1f78687b91..c8f742caa9 100644 --- a/mobile/lib/models/embedding.dart +++ b/mobile/lib/models/embedding.dart @@ -1,17 +1,7 @@ import "dart:convert"; -import "package:isar/isar.dart"; - -part 'embedding.g.dart'; - -@collection class Embedding { - static const index = 'unique_file_model_embedding'; - - Id id = Isar.autoIncrement; final int fileID; - @enumerated - @Index(name: index, composite: [CompositeIndex('fileID')], unique: true, replace: true) final Model model; final List embedding; int? updationTime; diff --git a/mobile/lib/models/embedding.g.dart b/mobile/lib/models/embedding.g.dart deleted file mode 100644 index ca041a0d0a..0000000000 --- a/mobile/lib/models/embedding.g.dart +++ /dev/null @@ -1,1059 +0,0 @@ -// GENERATED CODE - DO NOT MODIFY BY HAND - -part of 'embedding.dart'; - -// ************************************************************************** -// IsarCollectionGenerator -// ************************************************************************** - -// coverage:ignore-file -// ignore_for_file: duplicate_ignore, non_constant_identifier_names, constant_identifier_names, invalid_use_of_protected_member, unnecessary_cast, prefer_const_constructors, lines_longer_than_80_chars, require_trailing_commas, inference_failure_on_function_invocation, unnecessary_parenthesis, unnecessary_raw_strings, unnecessary_null_checks, join_return_with_assignment, prefer_final_locals, avoid_js_rounded_ints, avoid_positional_boolean_parameters, always_specify_types - -extension GetEmbeddingCollection on Isar { - IsarCollection get embeddings => this.collection(); -} - -const EmbeddingSchema = CollectionSchema( - name: r'Embedding', - id: -8064100183150254587, - properties: { - r'embedding': PropertySchema( - id: 0, - name: r'embedding', - type: IsarType.doubleList, - ), - r'fileID': PropertySchema( - id: 1, - name: r'fileID', - type: IsarType.long, - ), - r'model': PropertySchema( - id: 2, - name: r'model', - type: IsarType.byte, - enumMap: _EmbeddingmodelEnumValueMap, - ), - r'updationTime': PropertySchema( - id: 3, - name: r'updationTime', - type: IsarType.long, - ) - }, - estimateSize: _embeddingEstimateSize, - serialize: _embeddingSerialize, - deserialize: _embeddingDeserialize, - deserializeProp: _embeddingDeserializeProp, - idName: r'id', - indexes: { - r'unique_file_model_embedding': IndexSchema( - id: 6248303800853228628, - name: r'unique_file_model_embedding', - unique: true, - replace: true, - properties: [ - IndexPropertySchema( - name: r'model', - type: IndexType.value, - caseSensitive: false, - ), - IndexPropertySchema( - name: r'fileID', - type: IndexType.value, - caseSensitive: false, - ) - ], - ) - }, - links: {}, - embeddedSchemas: {}, - getId: _embeddingGetId, - getLinks: _embeddingGetLinks, - attach: _embeddingAttach, - version: '3.1.0+1', -); - -int _embeddingEstimateSize( - Embedding object, - List offsets, - Map> allOffsets, -) { - var bytesCount = offsets.last; - bytesCount += 3 + object.embedding.length * 8; - return bytesCount; -} - -void _embeddingSerialize( - Embedding object, - IsarWriter writer, - List offsets, - Map> allOffsets, -) { - writer.writeDoubleList(offsets[0], object.embedding); - writer.writeLong(offsets[1], object.fileID); - writer.writeByte(offsets[2], object.model.index); - writer.writeLong(offsets[3], object.updationTime); -} - -Embedding _embeddingDeserialize( - Id id, - IsarReader reader, - List offsets, - Map> allOffsets, -) { - final object = Embedding( - embedding: reader.readDoubleList(offsets[0]) ?? [], - fileID: reader.readLong(offsets[1]), - model: _EmbeddingmodelValueEnumMap[reader.readByteOrNull(offsets[2])] ?? - Model.onnxClip, - updationTime: reader.readLongOrNull(offsets[3]), - ); - object.id = id; - return object; -} - -P _embeddingDeserializeProp

( - IsarReader reader, - int propertyId, - int offset, - Map> allOffsets, -) { - switch (propertyId) { - case 0: - return (reader.readDoubleList(offset) ?? []) as P; - case 1: - return (reader.readLong(offset)) as P; - case 2: - return (_EmbeddingmodelValueEnumMap[reader.readByteOrNull(offset)] ?? - Model.onnxClip) as P; - case 3: - return (reader.readLongOrNull(offset)) as P; - default: - throw IsarError('Unknown property with id $propertyId'); - } -} - -const _EmbeddingmodelEnumValueMap = { - 'onnxClip': 0, - 'ggmlClip': 1, -}; -const _EmbeddingmodelValueEnumMap = { - 0: Model.onnxClip, - 1: Model.ggmlClip, -}; - -Id _embeddingGetId(Embedding object) { - return object.id; -} - -List> _embeddingGetLinks(Embedding object) { - return []; -} - -void _embeddingAttach(IsarCollection col, Id id, Embedding object) { - object.id = id; -} - -extension EmbeddingByIndex on IsarCollection { - Future getByModelFileID(Model model, int fileID) { - return getByIndex(r'unique_file_model_embedding', [model, fileID]); - } - - Embedding? getByModelFileIDSync(Model model, int fileID) { - return getByIndexSync(r'unique_file_model_embedding', [model, fileID]); - } - - Future deleteByModelFileID(Model model, int fileID) { - return deleteByIndex(r'unique_file_model_embedding', [model, fileID]); - } - - bool deleteByModelFileIDSync(Model model, int fileID) { - return deleteByIndexSync(r'unique_file_model_embedding', [model, fileID]); - } - - Future> getAllByModelFileID( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return getAllByIndex(r'unique_file_model_embedding', values); - } - - List getAllByModelFileIDSync( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return getAllByIndexSync(r'unique_file_model_embedding', values); - } - - Future deleteAllByModelFileID( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return deleteAllByIndex(r'unique_file_model_embedding', values); - } - - int deleteAllByModelFileIDSync( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return deleteAllByIndexSync(r'unique_file_model_embedding', values); - } - - Future putByModelFileID(Embedding object) { - return putByIndex(r'unique_file_model_embedding', object); - } - - Id putByModelFileIDSync(Embedding object, {bool saveLinks = true}) { - return putByIndexSync(r'unique_file_model_embedding', object, - saveLinks: saveLinks); - } - - Future> putAllByModelFileID(List objects) { - return putAllByIndex(r'unique_file_model_embedding', objects); - } - - List putAllByModelFileIDSync(List objects, - {bool saveLinks = true}) { - return putAllByIndexSync(r'unique_file_model_embedding', objects, - saveLinks: saveLinks); - } -} - -extension EmbeddingQueryWhereSort - on QueryBuilder { - QueryBuilder anyId() { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(const IdWhereClause.any()); - }); - } - - QueryBuilder anyModelFileID() { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - const IndexWhereClause.any(indexName: r'unique_file_model_embedding'), - ); - }); - } -} - -extension EmbeddingQueryWhere - on QueryBuilder { - QueryBuilder idEqualTo(Id id) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IdWhereClause.between( - lower: id, - upper: id, - )); - }); - } - - QueryBuilder idNotEqualTo(Id id) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: false), - ) - .addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: false), - ); - } else { - return query - .addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: false), - ) - .addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: false), - ); - } - }); - } - - QueryBuilder idGreaterThan(Id id, - {bool include = false}) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: include), - ); - }); - } - - QueryBuilder idLessThan(Id id, - {bool include = false}) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: include), - ); - }); - } - - QueryBuilder idBetween( - Id lowerId, - Id upperId, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IdWhereClause.between( - lower: lowerId, - includeLower: includeLower, - upper: upperId, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelEqualToAnyFileID( - Model model) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.equalTo( - indexName: r'unique_file_model_embedding', - value: [model], - )); - }); - } - - QueryBuilder - modelNotEqualToAnyFileID(Model model) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: false, - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: false, - upper: [], - )); - } else { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: false, - upper: [], - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: false, - )); - } - }); - } - - QueryBuilder - modelGreaterThanAnyFileID( - Model model, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: include, - upper: [], - )); - }); - } - - QueryBuilder modelLessThanAnyFileID( - Model model, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: include, - )); - }); - } - - QueryBuilder modelBetweenAnyFileID( - Model lowerModel, - Model upperModel, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [lowerModel], - includeLower: includeLower, - upper: [upperModel], - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelFileIDEqualTo( - Model model, int fileID) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.equalTo( - indexName: r'unique_file_model_embedding', - value: [model, fileID], - )); - }); - } - - QueryBuilder - modelEqualToFileIDNotEqualTo(Model model, int fileID) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: false, - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: false, - upper: [model], - )); - } else { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: false, - upper: [model], - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: false, - )); - } - }); - } - - QueryBuilder - modelEqualToFileIDGreaterThan( - Model model, - int fileID, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: include, - upper: [model], - )); - }); - } - - QueryBuilder - modelEqualToFileIDLessThan( - Model model, - int fileID, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: include, - )); - }); - } - - QueryBuilder - modelEqualToFileIDBetween( - Model model, - int lowerFileID, - int upperFileID, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, lowerFileID], - includeLower: includeLower, - upper: [model, upperFileID], - includeUpper: includeUpper, - )); - }); - } -} - -extension EmbeddingQueryFilter - on QueryBuilder { - QueryBuilder - embeddingElementEqualTo( - double value, { - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementGreaterThan( - double value, { - bool include = false, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementLessThan( - double value, { - bool include = false, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementBetween( - double lower, - double upper, { - bool includeLower = true, - bool includeUpper = true, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'embedding', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingLengthEqualTo(int length) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - length, - true, - length, - true, - ); - }); - } - - QueryBuilder embeddingIsEmpty() { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - true, - 0, - true, - ); - }); - } - - QueryBuilder - embeddingIsNotEmpty() { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - false, - 999999, - true, - ); - }); - } - - QueryBuilder - embeddingLengthLessThan( - int length, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - true, - length, - include, - ); - }); - } - - QueryBuilder - embeddingLengthGreaterThan( - int length, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - length, - include, - 999999, - true, - ); - }); - } - - QueryBuilder - embeddingLengthBetween( - int lower, - int upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - lower, - includeLower, - upper, - includeUpper, - ); - }); - } - - QueryBuilder fileIDEqualTo( - int value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDGreaterThan( - int value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDLessThan( - int value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDBetween( - int lower, - int upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'fileID', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder idEqualTo( - Id value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idGreaterThan( - Id value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idLessThan( - Id value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idBetween( - Id lower, - Id upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'id', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelEqualTo( - Model value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelGreaterThan( - Model value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelLessThan( - Model value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelBetween( - Model lower, - Model upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'model', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder - updationTimeIsNull() { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(const FilterCondition.isNull( - property: r'updationTime', - )); - }); - } - - QueryBuilder - updationTimeIsNotNull() { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(const FilterCondition.isNotNull( - property: r'updationTime', - )); - }); - } - - QueryBuilder updationTimeEqualTo( - int? value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder - updationTimeGreaterThan( - int? value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder - updationTimeLessThan( - int? value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder updationTimeBetween( - int? lower, - int? upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'updationTime', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } -} - -extension EmbeddingQueryObject - on QueryBuilder {} - -extension EmbeddingQueryLinks - on QueryBuilder {} - -extension EmbeddingQuerySortBy on QueryBuilder { - QueryBuilder sortByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.asc); - }); - } - - QueryBuilder sortByFileIDDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.desc); - }); - } - - QueryBuilder sortByModel() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.asc); - }); - } - - QueryBuilder sortByModelDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.desc); - }); - } - - QueryBuilder sortByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.asc); - }); - } - - QueryBuilder sortByUpdationTimeDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.desc); - }); - } -} - -extension EmbeddingQuerySortThenBy - on QueryBuilder { - QueryBuilder thenByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.asc); - }); - } - - QueryBuilder thenByFileIDDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.desc); - }); - } - - QueryBuilder thenById() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'id', Sort.asc); - }); - } - - QueryBuilder thenByIdDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'id', Sort.desc); - }); - } - - QueryBuilder thenByModel() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.asc); - }); - } - - QueryBuilder thenByModelDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.desc); - }); - } - - QueryBuilder thenByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.asc); - }); - } - - QueryBuilder thenByUpdationTimeDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.desc); - }); - } -} - -extension EmbeddingQueryWhereDistinct - on QueryBuilder { - QueryBuilder distinctByEmbedding() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'embedding'); - }); - } - - QueryBuilder distinctByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'fileID'); - }); - } - - QueryBuilder distinctByModel() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'model'); - }); - } - - QueryBuilder distinctByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'updationTime'); - }); - } -} - -extension EmbeddingQueryProperty - on QueryBuilder { - QueryBuilder idProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'id'); - }); - } - - QueryBuilder, QQueryOperations> embeddingProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'embedding'); - }); - } - - QueryBuilder fileIDProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'fileID'); - }); - } - - QueryBuilder modelProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'model'); - }); - } - - QueryBuilder updationTimeProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'updationTime'); - }); - } -} diff --git a/mobile/lib/models/file/file.dart b/mobile/lib/models/file/file.dart index 75a40c99b0..d96a81e1ce 100644 --- a/mobile/lib/models/file/file.dart +++ b/mobile/lib/models/file/file.dart @@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart'; import 'package:photos/models/file/file_type.dart'; import 'package:photos/models/location/location.dart'; import "package:photos/models/metadata/file_magic.dart"; -import 'package:photos/services/feature_flag_service.dart'; +import "package:photos/service_locator.dart"; import 'package:photos/utils/date_time_util.dart'; import 'package:photos/utils/exif_util.dart'; import 'package:photos/utils/file_uploader_util.dart'; @@ -244,8 +244,7 @@ class EnteFile { String get downloadUrl { final endpoint = Configuration.instance.getHttpEndpoint(); - if (endpoint != kDefaultProductionEndpoint || - FeatureFlagService.instance.disableCFWorker()) { + if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) { return endpoint + "/files/download/" + uploadedFileID.toString(); } else { return "https://files.ente.io/?fileID=" + uploadedFileID.toString(); @@ -258,8 +257,7 @@ class EnteFile { String get thumbnailUrl { final endpoint = Configuration.instance.getHttpEndpoint(); - if (endpoint != kDefaultProductionEndpoint || - FeatureFlagService.instance.disableCFWorker()) { + if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) { return endpoint + "/files/preview/" + uploadedFileID.toString(); } else { return "https://thumbnails.ente.io/?fileID=" + uploadedFileID.toString(); @@ -310,7 +308,7 @@ class EnteFile { @override String toString() { return '''File(generatedID: $generatedID, localID: $localID, title: $title, - uploadedFileId: $uploadedFileID, modificationTime: $modificationTime, + type: $fileType, uploadedFileId: $uploadedFileID, modificationTime: $modificationTime, ownerID: $ownerID, collectionID: $collectionID, updationTime: $updationTime)'''; } diff --git a/mobile/lib/models/gallery_type.dart b/mobile/lib/models/gallery_type.dart index ba0eb397f7..40426f7015 100644 --- a/mobile/lib/models/gallery_type.dart +++ b/mobile/lib/models/gallery_type.dart @@ -32,12 +32,12 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.locationTag: case GalleryType.quickLink: case GalleryType.uncategorized: + case GalleryType.sharedCollection: return true; case GalleryType.hiddenSection: case GalleryType.hiddenOwnedCollection: case GalleryType.trash: - case GalleryType.sharedCollection: return false; } } diff --git a/mobile/lib/module/upload/service/multipart.dart b/mobile/lib/module/upload/service/multipart.dart index 6ce2c8a410..496f573bbe 100644 --- a/mobile/lib/module/upload/service/multipart.dart +++ b/mobile/lib/module/upload/service/multipart.dart @@ -2,6 +2,7 @@ import "dart:io"; import "dart:typed_data"; import "package:dio/dio.dart"; +import "package:ente_feature_flag/ente_feature_flag.dart"; import "package:logging/logging.dart"; import "package:photos/core/constants.dart"; import "package:photos/db/upload_locks_db.dart"; @@ -9,14 +10,13 @@ import "package:photos/models/encryption_result.dart"; import "package:photos/module/upload/model/multipart.dart"; import "package:photos/module/upload/model/xml.dart"; import "package:photos/services/collections_service.dart"; -import "package:photos/services/feature_flag_service.dart"; import "package:photos/utils/crypto_util.dart"; class MultiPartUploader { final Dio _enteDio; final Dio _s3Dio; final UploadLocksDB _db; - final FeatureFlagService _featureFlagService; + final FlagService _featureFlagService; late final Logger _logger = Logger("MultiPartUploader"); MultiPartUploader( @@ -50,8 +50,8 @@ class MultiPartUploader { ); } - static int get multipartPartSizeForUpload { - if (FeatureFlagService.instance.isInternalUserOrDebugBuild()) { + int get multipartPartSizeForUpload { + if (_featureFlagService.internalUser) { return multipartPartSizeInternal; } return multipartPartSize; @@ -60,7 +60,7 @@ class MultiPartUploader { Future calculatePartCount(int fileSize) async { // Multipart upload is only enabled for internal users // and debug builds till it's battle tested. - if (!FeatureFlagService.instance.isInternalUserOrDebugBuild()) return 1; + if (!_featureFlagService.internalUser) return 1; final partCount = (fileSize / multipartPartSizeForUpload).ceil(); return partCount; @@ -69,7 +69,7 @@ class MultiPartUploader { Future getMultipartUploadURLs(int count) async { try { assert( - _featureFlagService.isInternalUserOrDebugBuild(), + _featureFlagService.internalUser, "Multipart upload should not be enabled for external users.", ); final response = await _enteDio.get( @@ -114,6 +114,7 @@ class MultiPartUploader { CryptoUtil.bin2base64(encryptedResult.encryptedData!), CryptoUtil.bin2base64(fileNonce), CryptoUtil.bin2base64(encryptedResult.nonce!), + partSize: multipartPartSizeForUpload, ); } diff --git a/mobile/lib/service_locator.dart b/mobile/lib/service_locator.dart new file mode 100644 index 0000000000..397703761e --- /dev/null +++ b/mobile/lib/service_locator.dart @@ -0,0 +1,36 @@ +import "package:dio/dio.dart"; +import "package:ente_cast/ente_cast.dart"; +import "package:ente_cast_normal/ente_cast_normal.dart"; +import "package:ente_feature_flag/ente_feature_flag.dart"; +import "package:shared_preferences/shared_preferences.dart"; + +class ServiceLocator { + late final SharedPreferences prefs; + late final Dio enteDio; + + // instance + ServiceLocator._privateConstructor(); + + static final ServiceLocator instance = ServiceLocator._privateConstructor(); + + init(SharedPreferences prefs, Dio enteDio) { + this.prefs = prefs; + this.enteDio = enteDio; + } +} + +FlagService? _flagService; + +FlagService get flagService { + _flagService ??= FlagService( + ServiceLocator.instance.prefs, + ServiceLocator.instance.enteDio, + ); + return _flagService!; +} + +CastService? _castService; +CastService get castService { + _castService ??= CastServiceImpl(); + return _castService!; +} diff --git a/mobile/lib/services/collections_service.dart b/mobile/lib/services/collections_service.dart index ae8ae150fb..5b16bc70fb 100644 --- a/mobile/lib/services/collections_service.dart +++ b/mobile/lib/services/collections_service.dart @@ -28,6 +28,7 @@ import 'package:photos/models/collection/collection.dart'; import 'package:photos/models/collection/collection_file_item.dart'; import 'package:photos/models/collection/collection_items.dart'; import 'package:photos/models/file/file.dart'; +import "package:photos/models/files_split.dart"; import "package:photos/models/metadata/collection_magic.dart"; import 'package:photos/services/app_lifecycle_service.dart'; import "package:photos/services/favorites_service.dart"; @@ -187,6 +188,23 @@ class CollectionsService { return result; } + bool allowUpload(int collectionID) { + final Collection? c = _collectionIDToCollections[collectionID]; + if (c == null) { + _logger.info('discardUpload: collectionMissing $collectionID'); + return false; + } + if (c.isDeleted) { + _logger.info('discardUpload: collectionDeleted $collectionID'); + return false; + } + if (!c.isOwner(_config.getUserID()!)) { + _logger.info('discardUpload: notOwner $collectionID'); + return false; + } + return true; + } + Future> getArchivedCollection() async { final allCollections = getCollectionsForUI(); return allCollections @@ -1148,11 +1166,53 @@ class CollectionsService { return collection; } - Future addToCollection(int collectionID, List files) async { - final containsUploadedFile = files.firstWhereOrNull( - (element) => element.uploadedFileID != null, - ) != - null; + Future addOrCopyToCollection( + int dstCollectionID, + List files, + ) async { + final splitResult = FilesSplit.split(files, _config.getUserID()!); + if (splitResult.pendingUploads.isNotEmpty) { + throw ArgumentError('File should be already uploaded'); + } + if (splitResult.ownedByCurrentUser.isNotEmpty) { + await _addToCollection(dstCollectionID, splitResult.ownedByCurrentUser); + } + if (splitResult.ownedByOtherUsers.isNotEmpty) { + late final List filesToCopy; + late final List filesToAdd; + (filesToAdd, filesToCopy) = (await _splitFilesToAddAndCopy( + splitResult.ownedByOtherUsers, + )); + + if (filesToAdd.isNotEmpty) { + _logger.info( + "found existing ${filesToAdd.length} files with same hash, adding symlinks", + ); + await _addToCollection(dstCollectionID, filesToAdd); + } + // group files by collectionID + final Map> filesByCollection = {}; + for (final file in filesToCopy) { + if (filesByCollection.containsKey(file.collectionID!)) { + filesByCollection[file.collectionID!]!.add(file.copyWith()); + } else { + filesByCollection[file.collectionID!] = [file.copyWith()]; + } + } + for (final entry in filesByCollection.entries) { + final srcCollectionID = entry.key; + final files = entry.value; + await _copyToCollection( + files, + dstCollectionID: dstCollectionID, + srcCollectionID: srcCollectionID, + ); + } + } + } + + Future _addToCollection(int collectionID, List files) async { + final containsUploadedFile = files.any((e) => e.isUploaded); if (containsUploadedFile) { final existingFileIDsInCollection = await FilesDB.instance.getUploadedFileIDs(collectionID); @@ -1166,6 +1226,13 @@ class CollectionsService { _logger.info("nothing to add to the collection"); return; } + final anyFileOwnedByOther = + files.any((e) => e.ownerID != null && e.ownerID != _config.getUserID()); + if (anyFileOwnedByOther) { + throw ArgumentError( + 'Cannot add files owned by other users, they should be copied', + ); + } final params = {}; params["collectionID"] = collectionID; @@ -1263,6 +1330,126 @@ class CollectionsService { } } + Future _copyToCollection( + List files, { + required int dstCollectionID, + required int srcCollectionID, + }) async { + _validateCopyInput(dstCollectionID, srcCollectionID, files); + final batchedFiles = files.chunks(batchSizeCopy); + final params = {}; + params["dstCollectionID"] = dstCollectionID; + params["srcCollectionID"] = srcCollectionID; + for (final batch in batchedFiles) { + params["files"] = []; + for (final batchFile in batch) { + final fileKey = getFileKey(batchFile); + _logger.info( + "srcCollection : $srcCollectionID file: ${batchFile.uploadedFileID} key: ${CryptoUtil.bin2base64(fileKey)} ", + ); + final encryptedKeyData = + CryptoUtil.encryptSync(fileKey, getCollectionKey(dstCollectionID)); + batchFile.encryptedKey = + CryptoUtil.bin2base64(encryptedKeyData.encryptedData!); + batchFile.keyDecryptionNonce = + CryptoUtil.bin2base64(encryptedKeyData.nonce!); + params["files"].add( + CollectionFileItem( + batchFile.uploadedFileID!, + batchFile.encryptedKey!, + batchFile.keyDecryptionNonce!, + ).toMap(), + ); + } + + try { + final res = await _enteDio.post( + "/files/copy", + data: params, + ); + final oldToCopiedFileIDMap = Map.from( + (res.data["oldToNewFileIDMap"] as Map).map( + (key, value) => MapEntry(int.parse(key), value as int), + ), + ); + for (final file in batch) { + final int uploadIDForOriginalFIle = file.uploadedFileID!; + if (oldToCopiedFileIDMap.containsKey(uploadIDForOriginalFIle)) { + file.generatedID = null; + file.collectionID = dstCollectionID; + file.uploadedFileID = oldToCopiedFileIDMap[uploadIDForOriginalFIle]; + file.ownerID = _config.getUserID(); + oldToCopiedFileIDMap.remove(uploadIDForOriginalFIle); + } else { + throw Exception("Failed to copy file ${file.uploadedFileID}"); + } + } + if (oldToCopiedFileIDMap.isNotEmpty) { + throw Exception( + "Failed to map following uploadKey ${oldToCopiedFileIDMap.keys}", + ); + } + await _filesDB.insertMultiple(batch); + Bus.instance + .fire(CollectionUpdatedEvent(dstCollectionID, batch, "copiedTo")); + } catch (e) { + rethrow; + } + } + } + + Future<(List, List)> _splitFilesToAddAndCopy( + List othersFile, + ) async { + final hashToUserFile = + await _filesDB.getUserOwnedFilesWithSameHashForGivenListOfFiles( + othersFile, + _config.getUserID()!, + ); + final List filesToCopy = []; + final List filesToAdd = []; + for (final EnteFile file in othersFile) { + if (hashToUserFile.containsKey(file.hash ?? '')) { + final userFile = hashToUserFile[file.hash]!; + if (userFile.fileType == file.fileType) { + filesToAdd.add(userFile); + } else { + filesToCopy.add(file); + } + } else { + filesToCopy.add(file); + } + } + return (filesToAdd, filesToCopy); + } + + void _validateCopyInput( + int destCollectionID, + int srcCollectionID, + List files, + ) { + final dstCollection = _collectionIDToCollections[destCollectionID]; + final srcCollection = _collectionIDToCollections[srcCollectionID]; + if (dstCollection == null || !dstCollection.isOwner(_config.getUserID()!)) { + throw ArgumentError( + 'Destination collection not found ${dstCollection == null} or not owned by user ', + ); + } + if (srcCollection == null) { + throw ArgumentError('Source collection not found'); + } + // verify that all fileIds belong to srcCollection and isn't owned by current user + for (final f in files) { + if (f.collectionID != srcCollectionID || + f.ownerID == _config.getUserID()) { + _logger.warning( + 'file $f does not belong to srcCollection $srcCollection or is owned by current user ${f.ownerID}', + ); + throw ArgumentError(''); + } + } + } + Future linkLocalFileToExistingUploadedFileInAnotherCollection( int destCollectionID, { required EnteFile localFileToUpload, @@ -1481,10 +1668,13 @@ class CollectionsService { for (final file in batch) { params["fileIDs"].add(file.uploadedFileID); } - await _enteDio.post( + final resp = await _enteDio.post( "/collections/v3/remove-files", data: params, ); + if (resp.statusCode != 200) { + throw Exception("Failed to remove files from collection"); + } await _filesDB.removeFromCollection(collectionID, params["fileIDs"]); Bus.instance diff --git a/mobile/lib/services/favorites_service.dart b/mobile/lib/services/favorites_service.dart index 5388a6e671..fef4a323a8 100644 --- a/mobile/lib/services/favorites_service.dart +++ b/mobile/lib/services/favorites_service.dart @@ -24,6 +24,7 @@ class FavoritesService { late FilesDB _filesDB; int? _cachedFavoritesCollectionID; final Set _cachedFavUploadedIDs = {}; + final Map _cachedFavFileHases = {}; final Set _cachedPendingLocalIDs = {}; late StreamSubscription _collectionUpdatesSubscription; @@ -60,9 +61,12 @@ class FavoritesService { Future _warmUpCache() async { final favCollection = await _getFavoritesCollection(); if (favCollection != null) { - final uploadedIDs = - await FilesDB.instance.getUploadedFileIDs(favCollection.id); + Set uploadedIDs; + Map fileHashes; + (uploadedIDs, fileHashes) = + await FilesDB.instance.getUploadAndHash(favCollection.id); _cachedFavUploadedIDs.addAll(uploadedIDs); + _cachedFavFileHases.addAll(fileHashes); } } @@ -87,6 +91,9 @@ class FavoritesService { return false; } if (file.uploadedFileID != null) { + if (file.ownerID != _config.getUserID() && file.hash != null) { + return _cachedFavFileHases.containsKey(file.hash!); + } return _cachedFavUploadedIDs.contains(file.uploadedFileID); } else if (file.localID != null) { return _cachedPendingLocalIDs.contains(file.localID); @@ -99,6 +106,9 @@ class FavoritesService { if (collection == null || file.uploadedFileID == null) { return false; } + if (file.ownerID != _config.getUserID() && file.hash != null) { + return _cachedFavFileHases.containsKey(file.hash!); + } return _filesDB.doesFileExistInCollection( file.uploadedFileID!, collection.id, @@ -110,10 +120,14 @@ class FavoritesService { required bool favFlag, }) { final Set updatedIDs = {}; + final Map hashes = {}; final Set localIDs = {}; for (var file in files) { if (file.uploadedFileID != null) { updatedIDs.add(file.uploadedFileID!); + if (file.hash != null) { + hashes[file.hash!] = file.uploadedFileID!; + } } else if (file.localID != null || file.localID != "") { /* Note: Favorite un-uploaded files For such files, as we don't have uploaded IDs yet, we will cache @@ -124,8 +138,12 @@ class FavoritesService { } if (favFlag) { _cachedFavUploadedIDs.addAll(updatedIDs); + _cachedFavFileHases.addAll(hashes); } else { _cachedFavUploadedIDs.removeAll(updatedIDs); + for (var hash in hashes.keys) { + _cachedFavFileHases.remove(hash); + } } } @@ -137,7 +155,7 @@ class FavoritesService { await _filesDB.insert(file); Bus.instance.fire(CollectionUpdatedEvent(collectionID, files, "addTFav")); } else { - await _collectionsService.addToCollection(collectionID, files); + await _collectionsService.addOrCopyToCollection(collectionID, files); } _updateFavoriteFilesCache(files, favFlag: true); RemoteSyncService.instance.sync(silently: true).ignore(); @@ -153,11 +171,11 @@ class FavoritesService { throw AssertionError("Can only favorite uploaded items"); } if (files.any((f) => f.ownerID != currentUserID)) { - throw AssertionError("Can not favortie files owned by others"); + throw AssertionError("Can not favorite files owned by others"); } final collectionID = await _getOrCreateFavoriteCollectionID(); if (favFlag) { - await _collectionsService.addToCollection(collectionID, files); + await _collectionsService.addOrCopyToCollection(collectionID, files); } else { final Collection? favCollection = await _getFavoritesCollection(); await _collectionActions.moveFilesFromCurrentCollection( @@ -169,17 +187,30 @@ class FavoritesService { _updateFavoriteFilesCache(files, favFlag: favFlag); } - Future removeFromFavorites(BuildContext context, EnteFile file) async { - final fileID = file.uploadedFileID; - if (fileID == null) { + Future removeFromFavorites( + BuildContext context, + EnteFile file, + ) async { + final inUploadID = file.uploadedFileID; + if (inUploadID == null) { // Do nothing, ignore } else { final Collection? favCollection = await _getFavoritesCollection(); // The file might be part of another collection. For unfav, we need to // move file from the fav collection to the . + if (file.ownerID != _config.getUserID() && + _cachedFavFileHases.containsKey(file.hash!)) { + final EnteFile? favFile = await FilesDB.instance.getUploadedFile( + _cachedFavFileHases[file.hash!]!, + favCollection!.id, + ); + if (favFile != null) { + file = favFile; + } + } if (file.collectionID != favCollection!.id) { final EnteFile? favFile = await FilesDB.instance.getUploadedFile( - fileID, + file.uploadedFileID!, favCollection.id, ); if (favFile != null) { diff --git a/mobile/lib/services/feature_flag_service.dart b/mobile/lib/services/feature_flag_service.dart deleted file mode 100644 index 2891b03f6d..0000000000 --- a/mobile/lib/services/feature_flag_service.dart +++ /dev/null @@ -1,142 +0,0 @@ -import 'dart:convert'; -import 'dart:io'; - -import 'package:flutter/foundation.dart'; -import 'package:logging/logging.dart'; -import 'package:photos/core/configuration.dart'; -import 'package:photos/core/constants.dart'; -import 'package:photos/core/network/network.dart'; -import 'package:shared_preferences/shared_preferences.dart'; - -class FeatureFlagService { - FeatureFlagService._privateConstructor(); - - static final FeatureFlagService instance = - FeatureFlagService._privateConstructor(); - static const _featureFlagsKey = "feature_flags_key"; - static final _internalUserIDs = const String.fromEnvironment( - "internal_user_ids", - defaultValue: "1,2,3,4,191,125,1580559962388044,1580559962392434,10000025", - ).split(",").map((element) { - return int.parse(element); - }).toSet(); - - final _logger = Logger("FeatureFlagService"); - FeatureFlags? _featureFlags; - late SharedPreferences _prefs; - - Future init() async { - _prefs = await SharedPreferences.getInstance(); - // Fetch feature flags from network in async manner. - // Intention of delay is to give more CPU cycles to other tasks - Future.delayed( - const Duration(seconds: 5), - () { - fetchFeatureFlags(); - }, - ); - } - - FeatureFlags _getFeatureFlags() { - _featureFlags ??= - FeatureFlags.fromJson(_prefs.getString(_featureFlagsKey)!); - // if nothing is cached, use defaults as temporary fallback - if (_featureFlags == null) { - return FeatureFlags.defaultFlags; - } - return _featureFlags!; - } - - bool disableCFWorker() { - try { - return _getFeatureFlags().disableCFWorker; - } catch (e) { - _logger.severe(e); - return FFDefault.disableCFWorker; - } - } - - bool enableStripe() { - if (Platform.isIOS) { - return false; - } - try { - return _getFeatureFlags().enableStripe; - } catch (e) { - _logger.severe(e); - return FFDefault.enableStripe; - } - } - - bool enablePasskey() { - try { - if (isInternalUserOrDebugBuild()) { - return true; - } - return _getFeatureFlags().enablePasskey; - } catch (e) { - _logger.info('error in enablePasskey check', e); - return FFDefault.enablePasskey; - } - } - - bool isInternalUserOrDebugBuild() { - final String? email = Configuration.instance.getEmail(); - final userID = Configuration.instance.getUserID(); - return (email != null && email.endsWith("@ente.io")) || - _internalUserIDs.contains(userID) || - kDebugMode; - } - - Future fetchFeatureFlags() async { - try { - final response = await NetworkClient.instance - .getDio() - .get("https://static.ente.io/feature_flags.json"); - final flagsResponse = FeatureFlags.fromMap(response.data); - await _prefs.setString(_featureFlagsKey, flagsResponse.toJson()); - _featureFlags = flagsResponse; - } catch (e) { - _logger.severe("Failed to sync feature flags ", e); - } - } -} - -class FeatureFlags { - static FeatureFlags defaultFlags = FeatureFlags( - disableCFWorker: FFDefault.disableCFWorker, - enableStripe: FFDefault.enableStripe, - enablePasskey: FFDefault.enablePasskey, - ); - - final bool disableCFWorker; - final bool enableStripe; - final bool enablePasskey; - - FeatureFlags({ - required this.disableCFWorker, - required this.enableStripe, - required this.enablePasskey, - }); - - Map toMap() { - return { - "disableCFWorker": disableCFWorker, - "enableStripe": enableStripe, - "enablePasskey": enablePasskey, - }; - } - - String toJson() => json.encode(toMap()); - - factory FeatureFlags.fromJson(String source) => - FeatureFlags.fromMap(json.decode(source)); - - factory FeatureFlags.fromMap(Map json) { - return FeatureFlags( - disableCFWorker: json["disableCFWorker"] ?? FFDefault.disableCFWorker, - enableStripe: json["enableStripe"] ?? FFDefault.enableStripe, - enablePasskey: json["enablePasskey"] ?? FFDefault.enablePasskey, - ); - } -} diff --git a/mobile/lib/services/filter/collection_ignore.dart b/mobile/lib/services/filter/collection_ignore.dart index d9e27f0948..f7f50a7df2 100644 --- a/mobile/lib/services/filter/collection_ignore.dart +++ b/mobile/lib/services/filter/collection_ignore.dart @@ -1,26 +1,38 @@ import 'package:photos/models/file/file.dart'; import "package:photos/services/filter/filter.dart"; -// CollectionsIgnoreFilter will filter out files that are in present in the -// given collections. This is useful for filtering out files that are in archive -// or hidden collections from home page and other places -class CollectionsIgnoreFilter extends Filter { +// CollectionsOrHashIgnoreFilter will filter out all files that are in present in the +// given collections collectionIDs. This is useful for filtering out files that are in archive +// or hidden collections from home page and other places. Based on flag, it will also filter out +// shared files if the user already as another file with the same hash. +class CollectionsAndSavedFileFilter extends Filter { final Set collectionIDs; + final bool ignoreSavedFiles; + final int ownerID; Set? _ignoredUploadIDs; + Set ownedFileHashes = {}; - CollectionsIgnoreFilter(this.collectionIDs, List files) : super() { + CollectionsAndSavedFileFilter( + this.collectionIDs, + this.ownerID, + List files, + this.ignoreSavedFiles, + ) : super() { init(files); } void init(List files) { _ignoredUploadIDs = {}; - if (collectionIDs.isEmpty) return; for (var file in files) { - if (file.collectionID != null && - file.isUploaded && - collectionIDs.contains(file.collectionID!)) { - _ignoredUploadIDs!.add(file.uploadedFileID!); + if (file.collectionID != null && file.isUploaded) { + if (collectionIDs.contains(file.collectionID!)) { + _ignoredUploadIDs!.add(file.uploadedFileID!); + } else if (ignoreSavedFiles && + file.ownerID == ownerID && + (file.hash ?? '').isNotEmpty) { + ownedFileHashes.add(file.hash!); + } } } } @@ -37,6 +49,16 @@ class CollectionsIgnoreFilter extends Filter { } return true; } - return !_ignoredUploadIDs!.contains(file.uploadedFileID!); + if (_ignoredUploadIDs!.contains(file.uploadedFileID!)) { + return false; // this file should be filtered out + } + if (ignoreSavedFiles && + file.ownerID != ownerID && + (file.hash ?? '').isNotEmpty) { + // if the file is shared and the user already has a file with the same hash + // then filter it out by returning false + return !ownedFileHashes.contains(file.hash!); + } + return true; } } diff --git a/mobile/lib/services/filter/db_filters.dart b/mobile/lib/services/filter/db_filters.dart index ad062fe1c5..0c969dc525 100644 --- a/mobile/lib/services/filter/db_filters.dart +++ b/mobile/lib/services/filter/db_filters.dart @@ -1,3 +1,4 @@ +import "package:photos/core/configuration.dart"; import 'package:photos/models/file/file.dart'; import "package:photos/services/filter/collection_ignore.dart"; import "package:photos/services/filter/dedupe_by_upload_id.dart"; @@ -12,11 +13,14 @@ class DBFilterOptions { Set? ignoredCollectionIDs; bool dedupeUploadID; bool hideIgnoredForUpload; + // If true, shared files that are already saved in the users account will be ignored. + bool ignoreSavedFiles; DBFilterOptions({ this.ignoredCollectionIDs, this.hideIgnoredForUpload = false, this.dedupeUploadID = true, + this.ignoreSavedFiles = false, }); static DBFilterOptions dedupeOption = DBFilterOptions( @@ -42,12 +46,18 @@ Future> applyDBFilters( if (options.dedupeUploadID) { filters.add(DedupeUploadIDFilter()); } - if (options.ignoredCollectionIDs != null && - options.ignoredCollectionIDs!.isNotEmpty) { - final collectionIgnoreFilter = - CollectionsIgnoreFilter(options.ignoredCollectionIDs!, files); + + if ((options.ignoredCollectionIDs ?? {}).isNotEmpty || + options.ignoreSavedFiles) { + final collectionIgnoreFilter = CollectionsAndSavedFileFilter( + options.ignoredCollectionIDs ?? {}, + Configuration.instance.getUserID() ?? 0, + files, + options.ignoreSavedFiles, + ); filters.add(collectionIgnoreFilter); } + final List filterFiles = []; for (final file in files) { if (filters.every((f) => f.filter(file))) { diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart index f7d17f8b86..420b8c97f7 100644 --- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart +++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart @@ -19,7 +19,7 @@ class EmbeddingStore { static final EmbeddingStore instance = EmbeddingStore._privateConstructor(); - static const kEmbeddingsSyncTimeKey = "sync_time_embeddings_v2"; + static const kEmbeddingsSyncTimeKey = "sync_time_embeddings_v3"; final _logger = Logger("EmbeddingStore"); final _dio = NetworkClient.instance.enteDio; diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index d1074053a2..337ca913ff 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -72,8 +72,8 @@ class SemanticSearchService { _mlFramework = _currentModel == Model.onnxClip ? ONNX(shouldDownloadOverMobileData) : GGML(shouldDownloadOverMobileData); - await EmbeddingsDB.instance.init(); await EmbeddingStore.instance.init(); + await EmbeddingsDB.instance.init(); await _loadEmbeddings(); Bus.instance.on().listen((event) { _embeddingLoaderDebouncer.run(() async { diff --git a/mobile/lib/services/memories_service.dart b/mobile/lib/services/memories_service.dart index de68e2dabc..6461131289 100644 --- a/mobile/lib/services/memories_service.dart +++ b/mobile/lib/services/memories_service.dart @@ -107,7 +107,7 @@ class MemoriesService extends ChangeNotifier { } final ignoredCollections = CollectionsService.instance.archivedOrHiddenCollectionIds(); - final files = await _filesDB.getFilesCreatedWithinDurationsSync( + final files = await _filesDB.getFilesCreatedWithinDurations( durations, ignoredCollections, visibility: visibleVisibility, diff --git a/mobile/lib/services/remote_sync_service.dart b/mobile/lib/services/remote_sync_service.dart index 4c52227581..eab8478a6c 100644 --- a/mobile/lib/services/remote_sync_service.dart +++ b/mobile/lib/services/remote_sync_service.dart @@ -23,9 +23,9 @@ import "package:photos/models/file/extensions/file_props.dart"; import 'package:photos/models/file/file.dart'; import 'package:photos/models/file/file_type.dart'; import 'package:photos/models/upload_strategy.dart'; +import "package:photos/service_locator.dart"; import 'package:photos/services/app_lifecycle_service.dart'; import 'package:photos/services/collections_service.dart'; -import "package:photos/services/feature_flag_service.dart"; import 'package:photos/services/ignored_files_service.dart'; import 'package:photos/services/local_file_update_service.dart'; import "package:photos/services/notification_service.dart"; @@ -185,7 +185,7 @@ class RemoteSyncService { rethrow; } else { _logger.severe("Error executing remote sync ", e, s); - if (FeatureFlagService.instance.isInternalUserOrDebugBuild()) { + if (flagService.internalUser) { rethrow; } } diff --git a/mobile/lib/services/update_service.dart b/mobile/lib/services/update_service.dart index 28c5732c8d..da01de828d 100644 --- a/mobile/lib/services/update_service.dart +++ b/mobile/lib/services/update_service.dart @@ -16,7 +16,7 @@ class UpdateService { static final UpdateService instance = UpdateService._privateConstructor(); static const kUpdateAvailableShownTimeKey = "update_available_shown_time_key"; static const changeLogVersionKey = "update_change_log_key"; - static const currentChangeLogVersion = 18; + static const currentChangeLogVersion = 19; LatestVersionInfo? _latestVersion; final _logger = Logger("UpdateService"); @@ -73,9 +73,13 @@ class UpdateService { return _latestVersion; } - Future shouldShowUpdateNoification() async { + Future shouldShowUpdateNotification() async { final shouldUpdate = await this.shouldUpdate(); + if (!shouldUpdate) { + return false; + } + final lastNotificationShownTime = _prefs.getInt(kUpdateAvailableShownTimeKey) ?? 0; final now = DateTime.now().microsecondsSinceEpoch; @@ -87,7 +91,7 @@ class UpdateService { } Future showUpdateNotification() async { - if (await shouldShowUpdateNoification()) { + if (await shouldShowUpdateNotification()) { // ignore: unawaited_futures NotificationService.instance.showNotification( "Update available", diff --git a/mobile/lib/ui/account/recovery_page.dart b/mobile/lib/ui/account/recovery_page.dart index 4b3d499955..881b0792dd 100644 --- a/mobile/lib/ui/account/recovery_page.dart +++ b/mobile/lib/ui/account/recovery_page.dart @@ -59,9 +59,9 @@ class _RecoveryPageState extends State { Navigator.of(context).pushReplacement( MaterialPageRoute( builder: (BuildContext context) { - return WillPopScope( - onWillPop: () async => false, - child: const PasswordEntryPage( + return const PopScope( + canPop: false, + child: PasswordEntryPage( mode: PasswordEntryMode.reset, ), ); diff --git a/mobile/lib/ui/actions/collection/collection_file_actions.dart b/mobile/lib/ui/actions/collection/collection_file_actions.dart index 8315c235b0..81b79825ac 100644 --- a/mobile/lib/ui/actions/collection/collection_file_actions.dart +++ b/mobile/lib/ui/actions/collection/collection_file_actions.dart @@ -184,7 +184,8 @@ extension CollectionFileActions on CollectionActions { } } if (files.isNotEmpty) { - await CollectionsService.instance.addToCollection(collectionID, files); + await CollectionsService.instance + .addOrCopyToCollection(collectionID, files); } unawaited(RemoteSyncService.instance.sync(silently: true)); await dialog?.hide(); diff --git a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart index dc28197bd8..7993c43423 100644 --- a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart +++ b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart @@ -140,7 +140,7 @@ class CollectionActions { req, ); logger.finest("adding files to share to new album"); - await collectionsService.addToCollection(collection.id, files); + await collectionsService.addOrCopyToCollection(collection.id, files); logger.finest("creating public link for the newly created album"); await CollectionsService.instance.createShareUrl(collection); await dialog.hide(); diff --git a/mobile/lib/ui/cast/auto.dart b/mobile/lib/ui/cast/auto.dart new file mode 100644 index 0000000000..34c97b34de --- /dev/null +++ b/mobile/lib/ui/cast/auto.dart @@ -0,0 +1,133 @@ +import "dart:io"; + +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/material.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/service_locator.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/common/loading_widget.dart"; +import "package:photos/utils/dialog_util.dart"; + +class AutoCastDialog extends StatefulWidget { + // async method that takes string as input + // and returns void + final void Function(String) onConnect; + AutoCastDialog( + this.onConnect, { + Key? key, + }) : super(key: key) {} + + @override + State createState() => _AutoCastDialogState(); +} + +class _AutoCastDialogState extends State { + final bool doesUserExist = true; + final Set _isDeviceTapInProgress = {}; + + @override + Widget build(BuildContext context) { + final textStyle = getEnteTextTheme(context); + final AlertDialog alert = AlertDialog( + title: Text( + S.of(context).connectToDevice, + style: textStyle.largeBold, + ), + content: Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + Text( + S.of(context).autoCastDialogBody, + style: textStyle.bodyMuted, + ), + if (Platform.isIOS) + Text( + S.of(context).autoCastiOSPermission, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 16), + FutureBuilder>( + future: castService.searchDevices(), + builder: (_, snapshot) { + if (snapshot.hasError) { + return Center( + child: Text( + 'Error: ${snapshot.error.toString()}', + ), + ); + } else if (!snapshot.hasData) { + return const EnteLoadingWidget(); + } + + if (snapshot.data!.isEmpty) { + return Center(child: Text(S.of(context).noDeviceFound)); + } + + return Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: snapshot.data!.map((result) { + final device = result.$2; + final name = result.$1; + return GestureDetector( + onTap: () async { + if (_isDeviceTapInProgress.contains(device)) { + return; + } + setState(() { + _isDeviceTapInProgress.add(device); + }); + try { + await _connectToYourApp(context, device); + } catch (e) { + if (mounted) { + setState(() { + _isDeviceTapInProgress.remove(device); + }); + showGenericErrorDialog(context: context, error: e) + .ignore(); + } + } + }, + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 8.0), + child: Row( + children: [ + Expanded(child: Text(name)), + if (_isDeviceTapInProgress.contains(device)) + const EnteLoadingWidget(), + ], + ), + ), + ); + }).toList(), + ); + }, + ), + ], + ), + ); + return alert; + } + + Future _connectToYourApp( + BuildContext context, + Object castDevice, + ) async { + await castService.connectDevice( + context, + castDevice, + onMessage: (message) { + if (message.containsKey(CastMessageType.pairCode)) { + final code = message[CastMessageType.pairCode]!['code']; + widget.onConnect(code); + } + if (mounted) { + setState(() { + _isDeviceTapInProgress.remove(castDevice); + }); + } + }, + ); + } +} diff --git a/mobile/lib/ui/cast/choose.dart b/mobile/lib/ui/cast/choose.dart new file mode 100644 index 0000000000..bd4c9876de --- /dev/null +++ b/mobile/lib/ui/cast/choose.dart @@ -0,0 +1,76 @@ +import "package:flutter/material.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/l10n/l10n.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/components/buttons/button_widget.dart"; +import "package:photos/ui/components/models/button_type.dart"; + +class CastChooseDialog extends StatefulWidget { + const CastChooseDialog({ + Key? key, + }) : super(key: key); + + @override + State createState() => _CastChooseDialogState(); +} + +class _CastChooseDialogState extends State { + final bool doesUserExist = true; + + @override + Widget build(BuildContext context) { + final textStyle = getEnteTextTheme(context); + final AlertDialog alert = AlertDialog( + title: Text( + context.l10n.playOnTv, + style: textStyle.largeBold, + ), + content: Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + const SizedBox(height: 8), + Text( + S.of(context).autoPairDesc, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 12), + ButtonWidget( + labelText: S.of(context).autoPair, + icon: Icons.cast_outlined, + buttonType: ButtonType.neutral, + buttonSize: ButtonSize.large, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.first, + shouldSurfaceExecutionStates: false, + isInAlert: true, + onTap: () async { + Navigator.of(context).pop(ButtonAction.first); + }, + ), + const SizedBox(height: 36), + Text( + S.of(context).manualPairDesc, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 12), + ButtonWidget( + labelText: S.of(context).pairWithPin, + buttonType: ButtonType.neutral, + // icon for pairing with TV manually + icon: Icons.tv_outlined, + buttonSize: ButtonSize.large, + isInAlert: true, + onTap: () async { + Navigator.of(context).pop(ButtonAction.second); + }, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.second, + shouldSurfaceExecutionStates: false, + ), + ], + ), + ); + return alert; + } +} diff --git a/mobile/lib/ui/common/linear_progress_dialog.dart b/mobile/lib/ui/common/linear_progress_dialog.dart index 3bd2f70fea..375eebe48c 100644 --- a/mobile/lib/ui/common/linear_progress_dialog.dart +++ b/mobile/lib/ui/common/linear_progress_dialog.dart @@ -27,8 +27,8 @@ class LinearProgressDialogState extends State { @override Widget build(BuildContext context) { - return WillPopScope( - onWillPop: () async => false, + return PopScope( + canPop: false, child: AlertDialog( title: Text( widget.message, diff --git a/mobile/lib/ui/common/popup_item.dart b/mobile/lib/ui/common/popup_item.dart new file mode 100644 index 0000000000..5f32104af0 --- /dev/null +++ b/mobile/lib/ui/common/popup_item.dart @@ -0,0 +1,38 @@ +import 'package:flutter/material.dart'; + +class EntePopupMenuItem extends PopupMenuItem { + final String label; + final IconData? icon; + final Widget? iconWidget; + + EntePopupMenuItem( + this.label, { + required T value, + this.icon, + this.iconWidget, + Key? key, + }) : assert( + icon != null || iconWidget != null, + 'Either icon or iconWidget must be provided.', + ), + assert( + !(icon != null && iconWidget != null), + 'Only one of icon or iconWidget can be provided.', + ), + super( + value: value, + key: key, + child: Row( + children: [ + if (iconWidget != null) + iconWidget + else if (icon != null) + Icon(icon), + const Padding( + padding: EdgeInsets.all(8), + ), + Text(label), + ], + ), // Initially empty, will be populated in build + ); +} diff --git a/mobile/lib/ui/common/progress_dialog.dart b/mobile/lib/ui/common/progress_dialog.dart index 61f8d4ca17..f08d7cdbcf 100644 --- a/mobile/lib/ui/common/progress_dialog.dart +++ b/mobile/lib/ui/common/progress_dialog.dart @@ -155,8 +155,8 @@ class ProgressDialog { barrierColor: _barrierColor, builder: (BuildContext context) { _dismissingContext = context; - return WillPopScope( - onWillPop: () async => _barrierDismissible, + return PopScope( + canPop: _barrierDismissible, child: Dialog( backgroundColor: _backgroundColor, insetAnimationCurve: _insetAnimCurve, diff --git a/mobile/lib/ui/components/bottom_action_bar/selection_action_button_widget.dart b/mobile/lib/ui/components/bottom_action_bar/selection_action_button_widget.dart index 60db98cf4f..5ca6a25dcc 100644 --- a/mobile/lib/ui/components/bottom_action_bar/selection_action_button_widget.dart +++ b/mobile/lib/ui/components/bottom_action_bar/selection_action_button_widget.dart @@ -132,14 +132,15 @@ class __BodyState extends State<_Body> { return maxWidth; } +//Todo: this doesn't give the correct width of the word, make it right double computeWidthOfWord(String text, TextStyle style) { final textPainter = TextPainter( text: TextSpan(text: text, style: style), maxLines: 1, textDirection: TextDirection.ltr, - textScaleFactor: MediaQuery.of(context).textScaleFactor, + textScaler: MediaQuery.textScalerOf(context), )..layout(); - - return textPainter.size.width; +//buffer of 8 added as width is shorter than actual text width + return textPainter.size.width + 8; } } diff --git a/mobile/lib/ui/home/home_gallery_widget.dart b/mobile/lib/ui/home/home_gallery_widget.dart index 195eb7b952..5d9f9c09dc 100644 --- a/mobile/lib/ui/home/home_gallery_widget.dart +++ b/mobile/lib/ui/home/home_gallery_widget.dart @@ -41,6 +41,7 @@ class HomeGalleryWidget extends StatelessWidget { hideIgnoredForUpload: true, dedupeUploadID: true, ignoredCollectionIDs: collectionsToHide, + ignoreSavedFiles: true, ); if (hasSelectedAllForBackup) { result = await FilesDB.instance.getAllLocalAndUploadedFiles( diff --git a/mobile/lib/ui/notification/update/change_log_page.dart b/mobile/lib/ui/notification/update/change_log_page.dart index 289d84590d..90430fae25 100644 --- a/mobile/lib/ui/notification/update/change_log_page.dart +++ b/mobile/lib/ui/notification/update/change_log_page.dart @@ -1,5 +1,3 @@ -import "dart:async"; - import 'package:flutter/material.dart'; import "package:photos/generated/l10n.dart"; import 'package:photos/services/update_service.dart'; @@ -9,7 +7,6 @@ import 'package:photos/ui/components/divider_widget.dart'; import 'package:photos/ui/components/models/button_type.dart'; import 'package:photos/ui/components/title_bar_title_widget.dart'; import 'package:photos/ui/notification/update/change_log_entry.dart'; -import "package:url_launcher/url_launcher_string.dart"; class ChangeLogPage extends StatefulWidget { const ChangeLogPage({ @@ -81,31 +78,31 @@ class _ChangeLogPageState extends State { const SizedBox( height: 8, ), - ButtonWidget( - buttonType: ButtonType.trailingIconSecondary, - buttonSize: ButtonSize.large, - labelText: S.of(context).joinDiscord, - icon: Icons.discord_outlined, - iconColor: enteColorScheme.primary500, - onTap: () async { - unawaited( - launchUrlString( - "https://discord.com/invite/z2YVKkycX3", - mode: LaunchMode.externalApplication, - ), - ); - }, - ), // ButtonWidget( // buttonType: ButtonType.trailingIconSecondary, // buttonSize: ButtonSize.large, - // labelText: S.of(context).rateTheApp, - // icon: Icons.favorite_rounded, + // labelText: S.of(context).joinDiscord, + // icon: Icons.discord_outlined, // iconColor: enteColorScheme.primary500, // onTap: () async { - // await UpdateService.instance.launchReviewUrl(); + // unawaited( + // launchUrlString( + // "https://discord.com/invite/z2YVKkycX3", + // mode: LaunchMode.externalApplication, + // ), + // ); // }, // ), + ButtonWidget( + buttonType: ButtonType.trailingIconSecondary, + buttonSize: ButtonSize.large, + labelText: S.of(context).rateTheApp, + icon: Icons.favorite_rounded, + iconColor: enteColorScheme.primary500, + onTap: () async { + await UpdateService.instance.launchReviewUrl(); + }, + ), const SizedBox(height: 8), ], ), @@ -122,18 +119,20 @@ class _ChangeLogPageState extends State { final List items = []; items.addAll([ ChangeLogEntry( - "Improved Performance for Large Galleries ✨", - 'We\'ve made significant improvements to how quickly galleries load and' - ' with less stutter, especially for those with a lot of photos and videos.', + "Cast albums to TV ✨", + "View a slideshow of your albums on any big screen! Open an album and click on the Cast button to get started.", ), ChangeLogEntry( - "Enhanced Functionality for Video Backups", - 'Even if video backups are disabled, you can now manually upload individual videos.', + "Organize shared photos", + "You can now add shared items to your favorites or to any of your personal albums. Ente will create a copy that is fully owned by you and can be organized to your liking.", ), ChangeLogEntry( - "Bug Fixes", - 'Many a bugs were squashed in this release.\n' - '\nIf you run into any, please write to team@ente.io, or let us know on Discord! 🙏', + "Download multiple items", + "You can now download multiple items to your gallery at once. Select the items you want to download and click on the download button.", + ), + ChangeLogEntry( + "Performance improvements", + "This release also brings in major changes that should improve responsiveness. If you discover room for improvement, please let us know!", ), ]); diff --git a/mobile/lib/ui/payment/payment_web_page.dart b/mobile/lib/ui/payment/payment_web_page.dart index cbe55f671e..c6c0c83d01 100644 --- a/mobile/lib/ui/payment/payment_web_page.dart +++ b/mobile/lib/ui/payment/payment_web_page.dart @@ -52,8 +52,15 @@ class _PaymentWebPageState extends State { if (initPaymentUrl == null) { return const EnteLoadingWidget(); } - return WillPopScope( - onWillPop: (() async => _buildPageExitWidget(context)), + return PopScope( + canPop: false, + onPopInvoked: (didPop) async { + if (didPop) return; + final shouldPop = await _buildPageExitWidget(context); + if (shouldPop) { + Navigator.of(context).pop(); + } + }, child: Scaffold( appBar: AppBar( title: Text(S.of(context).subscription), diff --git a/mobile/lib/ui/payment/subscription.dart b/mobile/lib/ui/payment/subscription.dart index 0327c3ab53..c30a1c67dd 100644 --- a/mobile/lib/ui/payment/subscription.dart +++ b/mobile/lib/ui/payment/subscription.dart @@ -1,6 +1,6 @@ import 'package:flutter/cupertino.dart'; import 'package:photos/core/configuration.dart'; -import 'package:photos/services/feature_flag_service.dart'; +import "package:photos/service_locator.dart"; import 'package:photos/services/update_service.dart'; import "package:photos/ui/payment/store_subscription_page.dart"; import 'package:photos/ui/payment/stripe_subscription_page.dart'; @@ -9,8 +9,7 @@ StatefulWidget getSubscriptionPage({bool isOnBoarding = false}) { if (UpdateService.instance.isIndependentFlavor()) { return StripeSubscriptionPage(isOnboarding: isOnBoarding); } - if (FeatureFlagService.instance.enableStripe() && - _isUserCreatedPostStripeSupport()) { + if (flagService.enableStripe && _isUserCreatedPostStripeSupport()) { return StripeSubscriptionPage(isOnboarding: isOnBoarding); } else { return StoreSubscriptionPage(isOnboarding: isOnBoarding); diff --git a/mobile/lib/ui/settings/app_update_dialog.dart b/mobile/lib/ui/settings/app_update_dialog.dart index 8038b7fa58..c9e612201a 100644 --- a/mobile/lib/ui/settings/app_update_dialog.dart +++ b/mobile/lib/ui/settings/app_update_dialog.dart @@ -83,8 +83,8 @@ class _AppUpdateDialogState extends State { ); final shouldForceUpdate = UpdateService.instance.shouldForceUpdate(widget.latestVersionInfo!); - return WillPopScope( - onWillPop: () async => !shouldForceUpdate, + return PopScope( + canPop: !shouldForceUpdate, child: AlertDialog( key: const ValueKey("updateAppDialog"), title: Column( diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 0ad5bce31e..3306ea36f7 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -5,7 +5,7 @@ import "package:intl/intl.dart"; import "package:photos/core/event_bus.dart"; import 'package:photos/events/embedding_updated_event.dart'; import "package:photos/generated/l10n.dart"; -import "package:photos/services/feature_flag_service.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/machine_learning/semantic_search/frameworks/ml_framework.dart'; import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart'; import "package:photos/theme/ente_theme.dart"; @@ -151,7 +151,7 @@ class _MachineLearningSettingsPageState const SizedBox( height: 12, ), - FeatureFlagService.instance.isInternalUserOrDebugBuild() + flagService.internalUser ? MenuItemWidget( leadingIcon: Icons.delete_sweep_outlined, captionedTextWidget: CaptionedTextWidget( diff --git a/mobile/lib/ui/settings/security_section_widget.dart b/mobile/lib/ui/settings/security_section_widget.dart index dce7e97ec5..eb93d85f62 100644 --- a/mobile/lib/ui/settings/security_section_widget.dart +++ b/mobile/lib/ui/settings/security_section_widget.dart @@ -10,7 +10,7 @@ import 'package:photos/events/two_factor_status_change_event.dart'; import "package:photos/generated/l10n.dart"; import "package:photos/l10n/l10n.dart"; import "package:photos/models/user_details.dart"; -import "package:photos/services/feature_flag_service.dart"; +import 'package:photos/service_locator.dart'; import 'package:photos/services/local_authentication_service.dart'; import "package:photos/services/passkey_service.dart"; import 'package:photos/services/user_service.dart'; @@ -70,8 +70,6 @@ class _SecuritySectionWidgetState extends State { final Completer completer = Completer(); final List children = []; if (_config.hasConfiguredAccount()) { - final bool isInternalUser = - FeatureFlagService.instance.isInternalUserOrDebugBuild(); children.addAll( [ sectionOptionSpacing, @@ -103,8 +101,8 @@ class _SecuritySectionWidgetState extends State { }, ), ), - if (isInternalUser) sectionOptionSpacing, - if (isInternalUser) + if (flagService.passKeyEnabled) sectionOptionSpacing, + if (flagService.passKeyEnabled) MenuItemWidget( captionedTextWidget: CaptionedTextWidget( title: context.l10n.passkey, diff --git a/mobile/lib/ui/settings_page.dart b/mobile/lib/ui/settings_page.dart index 51db275958..d5ba1254f6 100644 --- a/mobile/lib/ui/settings_page.dart +++ b/mobile/lib/ui/settings_page.dart @@ -7,7 +7,7 @@ import 'package:photos/core/configuration.dart'; import 'package:photos/core/event_bus.dart'; import 'package:photos/events/opened_settings_event.dart'; import "package:photos/generated/l10n.dart"; -import 'package:photos/services/feature_flag_service.dart'; +import "package:photos/service_locator.dart"; import "package:photos/services/storage_bonus_service.dart"; import 'package:photos/theme/colors.dart'; import 'package:photos/theme/ente_theme.dart'; @@ -140,8 +140,7 @@ class SettingsPage extends StatelessWidget { const AboutSectionWidget(), ]); - if (hasLoggedIn && - FeatureFlagService.instance.isInternalUserOrDebugBuild()) { + if (hasLoggedIn && flagService.internalUser) { contents.addAll([sectionSpacing, const DebugSectionWidget()]); } contents.add(const AppVersionWidget()); diff --git a/mobile/lib/ui/tabs/home_widget.dart b/mobile/lib/ui/tabs/home_widget.dart index 6745aaaa6f..ddad5073b0 100644 --- a/mobile/lib/ui/tabs/home_widget.dart +++ b/mobile/lib/ui/tabs/home_widget.dart @@ -195,7 +195,7 @@ class _HomeWidgetState extends State { }, ); _initDeepLinks(); - UpdateService.instance.shouldShowUpdateNoification().then((value) { + UpdateService.instance.shouldShowUpdateNotification().then((value) { Future.delayed(Duration.zero, () { if (value) { showDialog( @@ -315,7 +315,23 @@ class _HomeWidgetState extends State { final enableDrawer = LocalSyncService.instance.hasCompletedFirstImport(); final action = AppLifecycleService.instance.mediaExtensionAction.action; return UserDetailsStateWidget( - child: WillPopScope( + child: PopScope( + canPop: false, + onPopInvoked: (didPop) async { + if (didPop) return; + if (_selectedTabIndex == 0) { + if (isSettingsOpen) { + Navigator.pop(context); + } else if (Platform.isAndroid && action == IntentAction.main) { + unawaited(MoveToBackground.moveTaskToBack()); + } else { + Navigator.pop(context); + } + } else { + Bus.instance + .fire(TabChangedEvent(0, TabChangedEventSource.backButton)); + } + }, child: Scaffold( drawerScrimColor: getEnteColorScheme(context).strokeFainter, drawerEnableOpenDragGesture: false, @@ -341,24 +357,6 @@ class _HomeWidgetState extends State { ), resizeToAvoidBottomInset: false, ), - onWillPop: () async { - if (_selectedTabIndex == 0) { - if (isSettingsOpen) { - Navigator.pop(context); - return false; - } - if (Platform.isAndroid && action == IntentAction.main) { - unawaited(MoveToBackground.moveTaskToBack()); - return false; - } else { - return true; - } - } else { - Bus.instance - .fire(TabChangedEvent(0, TabChangedEventSource.backButton)); - return false; - } - }, ), ); } diff --git a/mobile/lib/ui/tools/app_lock.dart b/mobile/lib/ui/tools/app_lock.dart index 1fbc1678e5..c27555df0a 100644 --- a/mobile/lib/ui/tools/app_lock.dart +++ b/mobile/lib/ui/tools/app_lock.dart @@ -137,9 +137,9 @@ class _AppLockState extends State with WidgetsBindingObserver { } Widget get _lockScreen { - return WillPopScope( + return PopScope( + canPop: false, child: this.widget.lockScreen, - onWillPop: () => Future.value(false), ); } diff --git a/mobile/lib/ui/tools/debug/app_storage_viewer.dart b/mobile/lib/ui/tools/debug/app_storage_viewer.dart index 055457e085..50ec16c256 100644 --- a/mobile/lib/ui/tools/debug/app_storage_viewer.dart +++ b/mobile/lib/ui/tools/debug/app_storage_viewer.dart @@ -7,7 +7,7 @@ import 'package:path_provider/path_provider.dart'; import 'package:photos/core/cache/video_cache_manager.dart'; import 'package:photos/core/configuration.dart'; import "package:photos/generated/l10n.dart"; -import 'package:photos/services/feature_flag_service.dart'; +import "package:photos/service_locator.dart"; import 'package:photos/theme/ente_theme.dart'; import 'package:photos/ui/components/buttons/icon_button_widget.dart'; import 'package:photos/ui/components/captioned_text_widget.dart'; @@ -34,7 +34,7 @@ class _AppStorageViewerState extends State { @override void initState() { - internalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild(); + internalUser = flagService.internalUser; addPath(); super.initState(); } diff --git a/mobile/lib/ui/tools/editor/image_editor_page.dart b/mobile/lib/ui/tools/editor/image_editor_page.dart index ca36db002a..4830df9523 100644 --- a/mobile/lib/ui/tools/editor/image_editor_page.dart +++ b/mobile/lib/ui/tools/editor/image_editor_page.dart @@ -63,14 +63,14 @@ class _ImageEditorPageState extends State { @override Widget build(BuildContext context) { - return WillPopScope( - onWillPop: () async { + return PopScope( + canPop: false, + onPopInvoked: (didPop) async { if (_hasBeenEdited()) { await _showExitConfirmationDialog(context); } else { replacePage(context, DetailPage(widget.detailPageConfig)); } - return false; }, child: Scaffold( appBar: AppBar( diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart index dff39ef60a..e805927a64 100644 --- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart +++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart @@ -3,6 +3,7 @@ import "dart:async"; import 'package:fast_base58/fast_base58.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; +import "package:logging/logging.dart"; import "package:modal_bottom_sheet/modal_bottom_sheet.dart"; import 'package:photos/core/configuration.dart'; import "package:photos/generated/l10n.dart"; @@ -29,6 +30,8 @@ import 'package:photos/ui/sharing/manage_links_widget.dart'; import "package:photos/ui/tools/collage/collage_creator_page.dart"; import "package:photos/ui/viewer/location/update_location_data_widget.dart"; import 'package:photos/utils/delete_file_util.dart'; +import "package:photos/utils/dialog_util.dart"; +import "package:photos/utils/file_download_util.dart"; import 'package:photos/utils/magic_util.dart'; import 'package:photos/utils/navigation_util.dart'; import "package:photos/utils/share_util.dart"; @@ -55,6 +58,7 @@ class FileSelectionActionsWidget extends StatefulWidget { class _FileSelectionActionsWidgetState extends State { + static final _logger = Logger("FileSelectionActionsWidget"); late int currentUserID; late FilesSplit split; late CollectionActions collectionActions; @@ -69,6 +73,7 @@ class _FileSelectionActionsWidgetState @override void initState() { currentUserID = Configuration.instance.getUserID()!; + split = FilesSplit.split([], currentUserID); widget.selectedFiles.addListener(_selectFileChangeListener); collectionActions = CollectionActions(CollectionsService.instance); @@ -111,6 +116,8 @@ class _FileSelectionActionsWidgetState !widget.selectedFiles.files.any( (element) => element.fileType == FileType.video, ); + final showDownloadOption = + widget.selectedFiles.files.any((element) => element.localID == null); //To animate adding and removing of [SelectedActionButton], add all items //and set [shouldShow] to false for items that should not be shown and true @@ -146,7 +153,7 @@ class _FileSelectionActionsWidgetState SelectionActionButton( icon: Icons.cloud_upload_outlined, labelText: S.of(context).addToEnte, - onTap: anyOwnedFiles ? _addToAlbum : null, + onTap: _addToAlbum, ), ); } else { @@ -154,8 +161,7 @@ class _FileSelectionActionsWidgetState SelectionActionButton( icon: Icons.add_outlined, labelText: S.of(context).addToAlbum, - onTap: anyOwnedFiles ? _addToAlbum : null, - shouldShow: ownedAndPendingUploadFilesCount > 0, + onTap: _addToAlbum, ), ); } @@ -362,6 +368,16 @@ class _FileSelectionActionsWidgetState ); } + if (showDownloadOption) { + items.add( + SelectionActionButton( + labelText: S.of(context).download, + icon: Icons.cloud_download_outlined, + onTap: () => _download(widget.selectedFiles.files.toList()), + ), + ); + } + items.add( SelectionActionButton( labelText: S.of(context).share, @@ -374,41 +390,36 @@ class _FileSelectionActionsWidgetState ), ); - if (items.isNotEmpty) { - final scrollController = ScrollController(); - // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 - return MediaQuery( - data: MediaQuery.of(context).removePadding(removeBottom: true), - child: SafeArea( - child: Scrollbar( - radius: const Radius.circular(1), - thickness: 2, - controller: scrollController, - thumbVisibility: true, - child: SingleChildScrollView( - physics: const BouncingScrollPhysics( - decelerationRate: ScrollDecelerationRate.fast, - ), - scrollDirection: Axis.horizontal, - child: Container( - padding: const EdgeInsets.only(bottom: 24), - child: Row( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - const SizedBox(width: 4), - ...items, - const SizedBox(width: 4), - ], - ), + final scrollController = ScrollController(); + // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 + return MediaQuery( + data: MediaQuery.of(context).removePadding(removeBottom: true), + child: SafeArea( + child: Scrollbar( + radius: const Radius.circular(1), + thickness: 2, + controller: scrollController, + thumbVisibility: true, + child: SingleChildScrollView( + physics: const BouncingScrollPhysics( + decelerationRate: ScrollDecelerationRate.fast, + ), + scrollDirection: Axis.horizontal, + child: Container( + padding: const EdgeInsets.only(bottom: 24), + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const SizedBox(width: 4), + ...items, + const SizedBox(width: 4), + ], ), ), ), ), - ); - } else { - // TODO: Return "Select All" here - return const SizedBox.shrink(); - } + ), + ); } Future _moveFiles() async { @@ -434,10 +445,6 @@ class _FileSelectionActionsWidgetState } Future _addToAlbum() async { - if (split.ownedByOtherUsers.isNotEmpty) { - widget.selectedFiles - .unSelectAll(split.ownedByOtherUsers.toSet(), skipNotify: true); - } showCollectionActionSheet(context, selectedFiles: widget.selectedFiles); } @@ -642,4 +649,29 @@ class _FileSelectionActionsWidgetState widget.selectedFiles.clearAll(); } } + + Future _download(List files) async { + final dialog = createProgressDialog( + context, + S.of(context).downloading, + isDismissible: true, + ); + await dialog.show(); + try { + final futures = []; + for (final file in files) { + if (file.localID == null) { + futures.add(downloadToGallery(file)); + } + } + await Future.wait(futures); + await dialog.hide(); + widget.selectedFiles.clearAll(); + showToast(context, S.of(context).filesSavedToGallery); + } catch (e) { + _logger.warning("Failed to save files", e); + await dialog.hide(); + await showGenericErrorDialog(context: context, error: e); + } + } } diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart index 126f3093d6..2f2c8d0614 100644 --- a/mobile/lib/ui/viewer/file/file_app_bar.dart +++ b/mobile/lib/ui/viewer/file/file_app_bar.dart @@ -1,32 +1,24 @@ import 'dart:io'; -import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; import 'package:media_extension/media_extension.dart'; -import 'package:path/path.dart' as file_path; -import 'package:photo_manager/photo_manager.dart'; -import 'package:photos/core/event_bus.dart'; -import 'package:photos/db/files_db.dart'; -import 'package:photos/events/local_photos_updated_event.dart'; import "package:photos/generated/l10n.dart"; import "package:photos/l10n/l10n.dart"; import "package:photos/models/file/extensions/file_props.dart"; import 'package:photos/models/file/file.dart'; import 'package:photos/models/file/file_type.dart'; import 'package:photos/models/file/trash_file.dart'; -import 'package:photos/models/ignored_file.dart'; import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; import 'package:photos/services/collections_service.dart'; import 'package:photos/services/hidden_service.dart'; -import 'package:photos/services/ignored_files_service.dart'; -import 'package:photos/services/local_sync_service.dart'; import 'package:photos/ui/collections/collection_action_sheet.dart'; import 'package:photos/ui/viewer/file/custom_app_bar.dart'; import "package:photos/ui/viewer/file_details/favorite_widget.dart"; import "package:photos/ui/viewer/file_details/upload_icon_widget.dart"; import 'package:photos/utils/dialog_util.dart'; +import "package:photos/utils/file_download_util.dart"; import 'package:photos/utils/file_util.dart'; import "package:photos/utils/magic_util.dart"; import 'package:photos/utils/toast_util.dart'; @@ -53,47 +45,70 @@ class FileAppBar extends StatefulWidget { class FileAppBarState extends State { final _logger = Logger("FadingAppBar"); + final List _actions = []; + + @override + void didUpdateWidget(FileAppBar oldWidget) { + super.didUpdateWidget(oldWidget); + if (oldWidget.file.generatedID != widget.file.generatedID) { + _getActions(); + } + } @override Widget build(BuildContext context) { + _logger.fine("building app bar ${widget.file.generatedID?.toString()}"); + + //When the widget is initialized, the actions are not available. + //Cannot call _getActions() in initState. + if (_actions.isEmpty) { + _getActions(); + } + + final isTrashedFile = widget.file is TrashFile; + final shouldShowActions = widget.shouldShowActions && !isTrashedFile; return CustomAppBar( ValueListenableBuilder( valueListenable: widget.enableFullScreenNotifier, - builder: (context, bool isFullScreen, _) { + builder: (context, bool isFullScreen, child) { return IgnorePointer( ignoring: isFullScreen, child: AnimatedOpacity( opacity: isFullScreen ? 0 : 1, duration: const Duration(milliseconds: 150), - child: Container( - decoration: BoxDecoration( - gradient: LinearGradient( - begin: Alignment.topCenter, - end: Alignment.bottomCenter, - colors: [ - Colors.black.withOpacity(0.72), - Colors.black.withOpacity(0.6), - Colors.transparent, - ], - stops: const [0, 0.2, 1], - ), - ), - child: _buildAppBar(), - ), + child: child, ), ); }, + child: Container( + decoration: BoxDecoration( + gradient: LinearGradient( + begin: Alignment.topCenter, + end: Alignment.bottomCenter, + colors: [ + Colors.black.withOpacity(0.72), + Colors.black.withOpacity(0.6), + Colors.transparent, + ], + stops: const [0, 0.2, 1], + ), + ), + child: AppBar( + iconTheme: const IconThemeData( + color: Colors.white, + ), //same for both themes + actions: shouldShowActions ? _actions : [], + elevation: 0, + backgroundColor: const Color(0x00000000), + ), + ), ), Size.fromHeight(Platform.isAndroid ? 84 : 96), ); } - AppBar _buildAppBar() { - _logger.fine("building app bar ${widget.file.generatedID?.toString()}"); - - final List actions = []; - final isTrashedFile = widget.file is TrashFile; - final shouldShowActions = widget.shouldShowActions && !isTrashedFile; + List _getActions() { + _actions.clear(); final bool isOwnedByUser = widget.file.isOwner; final bool isFileUploaded = widget.file.isUploaded; bool isFileHidden = false; @@ -104,7 +119,7 @@ class FileAppBarState extends State { false; } if (widget.file.isLiveOrMotionPhoto) { - actions.add( + _actions.add( IconButton( icon: const Icon(Icons.album_outlined), onPressed: () { @@ -117,16 +132,11 @@ class FileAppBarState extends State { ); } // only show fav option for files owned by the user - if (isOwnedByUser && !isFileHidden && isFileUploaded) { - actions.add( - Padding( - padding: const EdgeInsets.all(8), - child: FavoriteWidget(widget.file), - ), - ); + if (!isFileHidden && isFileUploaded) { + _actions.add(FavoriteWidget(widget.file)); } if (!isFileUploaded) { - actions.add( + _actions.add( UploadIconWidget( file: widget.file, key: ValueKey(widget.file.tag), @@ -144,7 +154,7 @@ class FileAppBarState extends State { Icon( Platform.isAndroid ? Icons.download - : CupertinoIcons.cloud_download, + : Icons.cloud_download_outlined, color: Theme.of(context).iconTheme.color, ), const Padding( @@ -241,7 +251,7 @@ class FileAppBarState extends State { } } if (items.isNotEmpty) { - actions.add( + _actions.add( PopupMenuButton( itemBuilder: (context) { return items; @@ -262,13 +272,7 @@ class FileAppBarState extends State { ), ); } - return AppBar( - iconTheme: - const IconThemeData(color: Colors.white), //same for both themes - actions: shouldShowActions ? actions : [], - elevation: 0, - backgroundColor: const Color(0x00000000), - ); + return _actions; } Future _handleHideRequest(BuildContext context) async { @@ -315,98 +319,16 @@ class FileAppBarState extends State { ); await dialog.show(); try { - final FileType type = file.fileType; - final bool downloadLivePhotoOnDroid = - type == FileType.livePhoto && Platform.isAndroid; - AssetEntity? savedAsset; - final File? fileToSave = await getFile(file); - //Disabling notifications for assets changing to insert the file into - //files db before triggering a sync. - await PhotoManager.stopChangeNotify(); - if (type == FileType.image) { - savedAsset = await PhotoManager.editor - .saveImageWithPath(fileToSave!.path, title: file.title!); - } else if (type == FileType.video) { - savedAsset = await PhotoManager.editor - .saveVideo(fileToSave!, title: file.title!); - } else if (type == FileType.livePhoto) { - final File? liveVideoFile = - await getFileFromServer(file, liveVideo: true); - if (liveVideoFile == null) { - throw AssertionError("Live video can not be null"); - } - if (downloadLivePhotoOnDroid) { - await _saveLivePhotoOnDroid(fileToSave!, liveVideoFile, file); - } else { - savedAsset = await PhotoManager.editor.darwin.saveLivePhoto( - imageFile: fileToSave!, - videoFile: liveVideoFile, - title: file.title!, - ); - } - } - - if (savedAsset != null) { - file.localID = savedAsset.id; - await FilesDB.instance.insert(file); - Bus.instance.fire( - LocalPhotosUpdatedEvent( - [file], - source: "download", - ), - ); - } else if (!downloadLivePhotoOnDroid && savedAsset == null) { - _logger.severe('Failed to save assert of type $type'); - } + await downloadToGallery(file); showToast(context, S.of(context).fileSavedToGallery); await dialog.hide(); } catch (e) { _logger.warning("Failed to save file", e); await dialog.hide(); await showGenericErrorDialog(context: context, error: e); - } finally { - await PhotoManager.startChangeNotify(); - LocalSyncService.instance.checkAndSync().ignore(); } } - Future _saveLivePhotoOnDroid( - File image, - File video, - EnteFile enteFile, - ) async { - debugPrint("Downloading LivePhoto on Droid"); - AssetEntity? savedAsset = await (PhotoManager.editor - .saveImageWithPath(image.path, title: enteFile.title!)); - if (savedAsset == null) { - throw Exception("Failed to save image of live photo"); - } - IgnoredFile ignoreVideoFile = IgnoredFile( - savedAsset.id, - savedAsset.title ?? '', - savedAsset.relativePath ?? 'remoteDownload', - "remoteDownload", - ); - await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); - final videoTitle = file_path.basenameWithoutExtension(enteFile.title!) + - file_path.extension(video.path); - savedAsset = (await (PhotoManager.editor.saveVideo( - video, - title: videoTitle, - ))); - if (savedAsset == null) { - throw Exception("Failed to save video of live photo"); - } - - ignoreVideoFile = IgnoredFile( - savedAsset.id, - savedAsset.title ?? videoTitle, - savedAsset.relativePath ?? 'remoteDownload', - "remoteDownload", - ); - await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); - } - Future _setAs(EnteFile file) async { final dialog = createProgressDialog(context, S.of(context).pleaseWait); await dialog.show(); diff --git a/mobile/lib/ui/viewer/file/video_widget.dart b/mobile/lib/ui/viewer/file/video_widget.dart index c9c07df5ce..7f9218e9af 100644 --- a/mobile/lib/ui/viewer/file/video_widget.dart +++ b/mobile/lib/ui/viewer/file/video_widget.dart @@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart'; import "package:photos/generated/l10n.dart"; import "package:photos/models/file/extensions/file_props.dart"; import 'package:photos/models/file/file.dart'; -import "package:photos/services/feature_flag_service.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/files_service.dart'; import "package:photos/ui/actions/file/file_actions.dart"; import 'package:photos/ui/viewer/file/thumbnail_widget.dart'; @@ -161,8 +161,7 @@ class _VideoWidgetState extends State { } }).onError( (error, stackTrace) { - if (mounted && - FeatureFlagService.instance.isInternalUserOrDebugBuild()) { + if (mounted && flagService.internalUser) { if (error is Exception) { showErrorDialogForException( context: context, diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart index 15fb7397cc..f9d6434908 100644 --- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart +++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart @@ -3,11 +3,11 @@ import "dart:async"; import "package:flutter/material.dart"; import "package:like_button/like_button.dart"; import "package:logging/logging.dart"; +import "package:photos/core/configuration.dart"; import "package:photos/generated/l10n.dart"; import 'package:photos/models/file/file.dart'; import "package:photos/services/favorites_service.dart"; -import "package:photos/ui/common/progress_dialog.dart"; -import "package:photos/utils/dialog_util.dart"; +import "package:photos/ui/common/loading_widget.dart"; import "package:photos/utils/toast_util.dart"; class FavoriteWidget extends StatefulWidget { @@ -18,13 +18,13 @@ class FavoriteWidget extends StatefulWidget { super.key, }); - // State createState() => _ShareCollectionPageState(); @override State createState() => _FavoriteWidgetState(); } class _FavoriteWidgetState extends State { late Logger _logger; + bool _isLoading = false; @override void initState() { @@ -42,61 +42,72 @@ class _FavoriteWidgetState extends State { future: _fetchData(), builder: (context, snapshot) { final bool isLiked = snapshot.data ?? false; - return LikeButton( - size: 24, - isLiked: isLiked, - onTap: (oldValue) async { - final isLiked = !oldValue; - bool hasError = false; - if (isLiked) { - final shouldBlockUser = widget.file.uploadedFileID == null; - late ProgressDialog dialog; - if (shouldBlockUser) { - dialog = createProgressDialog( - context, - S.of(context).addingToFavorites, - ); - await dialog.show(); - } - try { - await FavoritesService.instance.addToFavorites( - context, - widget.file, - ); - } catch (e, s) { - _logger.severe(e, s); - hasError = true; - showToast(context, S.of(context).sorryCouldNotAddToFavorites); - } finally { - if (shouldBlockUser) { - await dialog.hide(); - } - } - } else { - try { - await FavoritesService.instance - .removeFromFavorites(context, widget.file); - } catch (e, s) { - _logger.severe(e, s); - hasError = true; - showToast( - context, - S.of(context).sorryCouldNotRemoveFromFavorites, - ); - } - } - return hasError ? oldValue : isLiked; - }, - likeBuilder: (isLiked) { - return Icon( - isLiked ? Icons.favorite_rounded : Icons.favorite_border_rounded, - color: isLiked - ? Colors.pinkAccent - : Colors.white, //same for both themes - size: 24, - ); - }, - ); + return _isLoading + ? const EnteLoadingWidget( + size: 14, + padding: 2, + ) // Add this line + : LikeButton( + size: 24, + isLiked: isLiked, + padding: const EdgeInsets.all(2), + onTap: (oldValue) async { + if (widget.file.uploadedFileID == null || + widget.file.ownerID != + Configuration.instance.getUserID()!) { + setState(() { + _isLoading = true; // Add this line + }); + } + final isLiked = !oldValue; + bool hasError = false; + if (isLiked) { + try { + await FavoritesService.instance.addToFavorites( + context, + widget.file.copyWith(), + ); + } catch (e, s) { + _logger.severe(e, s); + hasError = true; + showToast( + context, + S.of(context).sorryCouldNotAddToFavorites, + ); + } + } else { + try { + await FavoritesService.instance + .removeFromFavorites(context, widget.file.copyWith()); + } catch (e, s) { + _logger.severe(e, s); + hasError = true; + showToast( + context, + S.of(context).sorryCouldNotRemoveFromFavorites, + ); + } + } + setState(() { + _isLoading = false; // Add this line + }); + return hasError ? oldValue : isLiked; + }, + likeBuilder: (isLiked) { + debugPrint( + "File Upload ID ${widget.file.uploadedFileID} & collection ${widget.file.collectionID}", + ); + return Icon( + isLiked + ? Icons.favorite_rounded + : Icons.favorite_border_rounded, + color: isLiked + ? Colors.pinkAccent + : Colors.white, //same for both themes + size: 24, + ); + }, + ); }, ); } diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart index 1026bd7fd4..d2b7a6ec3d 100644 --- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart +++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart @@ -19,11 +19,14 @@ import 'package:photos/models/device_collection.dart'; import 'package:photos/models/gallery_type.dart'; import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; +import 'package:photos/service_locator.dart'; import 'package:photos/services/collections_service.dart'; -import "package:photos/services/feature_flag_service.dart"; import 'package:photos/services/sync_service.dart'; import 'package:photos/services/update_service.dart'; import 'package:photos/ui/actions/collection/collection_sharing_actions.dart'; +import "package:photos/ui/cast/auto.dart"; +import "package:photos/ui/cast/choose.dart"; +import "package:photos/ui/common/popup_item.dart"; import 'package:photos/ui/components/action_sheet_widget.dart'; import 'package:photos/ui/components/buttons/button_widget.dart'; import 'package:photos/ui/components/models/button_type.dart'; @@ -87,16 +90,16 @@ class _GalleryAppBarWidgetState extends State { String? _appBarTitle; late CollectionActions collectionActions; bool isQuickLink = false; - late bool isInternalUser; late GalleryType galleryType; + final ValueNotifier castNotifier = ValueNotifier(0); + @override void initState() { super.initState(); _selectedFilesListener = () { setState(() {}); }; - isInternalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild(); collectionActions = CollectionActions(CollectionsService.instance); widget.selectedFiles.addListener(_selectedFilesListener); _userAuthEventSubscription = @@ -319,263 +322,138 @@ class _GalleryAppBarWidgetState extends State { ), ); } - final List> items = []; - if (galleryType.canRename()) { - items.add( - PopupMenuItem( + + if (widget.collection != null && castService.isSupported) { + actions.add( + Tooltip( + message: "Cast album", + child: IconButton( + icon: ValueListenableBuilder( + valueListenable: castNotifier, + builder: (context, value, child) { + return castService.getActiveSessions().isNotEmpty + ? const Icon(Icons.cast_connected_rounded) + : const Icon(Icons.cast_outlined); + }, + ), + onPressed: () async { + await _castChoiceDialog(); + }, + ), + ), + ); + } + final List> items = []; + items.addAll([ + if (galleryType.canRename()) + EntePopupMenuItem( + isQuickLink + ? S.of(context).convertToAlbum + : S.of(context).renameAlbum, value: AlbumPopupAction.rename, - child: Row( - children: [ - Icon(isQuickLink ? Icons.photo_album_outlined : Icons.edit), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).convertToAlbum - : S.of(context).renameAlbum, - ), - ], - ), + icon: isQuickLink ? Icons.photo_album_outlined : Icons.edit, ), - ); - } - if (galleryType.canSetCover()) { - items.add( - PopupMenuItem( + if (galleryType.canSetCover()) + EntePopupMenuItem( + S.of(context).setCover, value: AlbumPopupAction.setCover, - child: Row( - children: [ - const Icon(Icons.image_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).setCover), - ], - ), + icon: Icons.image_outlined, ), - ); - } - if (galleryType.showMap()) { - items.add( - PopupMenuItem( + if (galleryType.showMap()) + EntePopupMenuItem( + S.of(context).map, value: AlbumPopupAction.map, - child: Row( - children: [ - const Icon(Icons.map_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).map), - ], - ), + icon: Icons.map_outlined, ), - ); - } - - if (galleryType.canSort()) { - items.add( - PopupMenuItem( + if (galleryType.canSort()) + EntePopupMenuItem( + S.of(context).sortAlbumsBy, value: AlbumPopupAction.sort, - child: Row( - children: [ - const Icon(Icons.sort_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - S.of(context).sortAlbumsBy, - ), - ], - ), + icon: Icons.sort_outlined, ), - ); - } - - if (galleryType == GalleryType.uncategorized) { - items.add( - PopupMenuItem( + if (galleryType == GalleryType.uncategorized) + EntePopupMenuItem( + S.of(context).cleanUncategorized, value: AlbumPopupAction.cleanUncategorized, - child: Row( - children: [ - const Icon(Icons.crop_original_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).cleanUncategorized), - ], - ), + icon: Icons.crop_original_outlined, ), - ); - } - if (galleryType.canPin()) { - items.add( - PopupMenuItem( + if (galleryType.canPin()) + EntePopupMenuItem( + widget.collection!.isPinned + ? S.of(context).unpinAlbum + : S.of(context).pinAlbum, value: AlbumPopupAction.pinAlbum, - child: Row( - children: [ - widget.collection!.isPinned - ? const Icon(CupertinoIcons.pin_slash) - : Transform.rotate( - angle: 45 * math.pi / 180, // rotate by 45 degrees - child: const Icon(CupertinoIcons.pin), - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - widget.collection!.isPinned - ? S.of(context).unpinAlbum - : S.of(context).pinAlbum, - ), - ], - ), + iconWidget: widget.collection!.isPinned + ? const Icon(CupertinoIcons.pin_slash) + : Transform.rotate( + angle: 45 * math.pi / 180, // rotate by 45 degrees + child: const Icon(CupertinoIcons.pin), + ), ), - ); - } + ]); final bool isArchived = widget.collection?.isArchived() ?? false; final bool isHidden = widget.collection?.isHidden() ?? false; - // Do not show archive option for favorite collection. If collection is - // already archived, allow user to unarchive that collection. - if (isArchived || (galleryType.canArchive() && !isHidden)) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedArchive, - child: Row( - children: [ - Icon(isArchived ? Icons.unarchive : Icons.archive_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], - ), - ), - ); - } - if (!isArchived && galleryType.canHide()) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedHide, - child: Row( - children: [ - Icon( - isHidden - ? Icons.visibility_outlined - : Icons.visibility_off_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isHidden ? S.of(context).unhide : S.of(context).hide, - ), - ], - ), - ), - ); - } - if (widget.collection != null && isInternalUser) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.playOnTv, - child: Row( - children: [ - const Icon(Icons.tv_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(context.l10n.playOnTv), - ], - ), - ), - ); - } - if (galleryType.canDelete()) { - items.add( - PopupMenuItem( - value: isQuickLink - ? AlbumPopupAction.removeLink - : AlbumPopupAction.delete, - child: Row( - children: [ - Icon( - isQuickLink - ? Icons.remove_circle_outline - : Icons.delete_outline, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).removeLink - : S.of(context).deleteAlbum, - ), - ], + items.addAll( + [ + // Do not show archive option for favorite collection. If collection is + // already archived, allow user to unarchive that collection. + if (isArchived || (galleryType.canArchive() && !isHidden)) + EntePopupMenuItem( + value: AlbumPopupAction.ownedArchive, + isArchived + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + icon: isArchived ? Icons.unarchive : Icons.archive_outlined, ), - ), - ); - } - - if (galleryType == GalleryType.sharedCollection) { - final bool hasShareeArchived = widget.collection!.hasShareeArchived(); - items.add( - PopupMenuItem( - value: AlbumPopupAction.sharedArchive, - child: Row( - children: [ - Icon( - hasShareeArchived ? Icons.unarchive : Icons.archive_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - hasShareeArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], + if (!isArchived && galleryType.canHide()) + EntePopupMenuItem( + value: AlbumPopupAction.ownedHide, + isHidden ? S.of(context).unhide : S.of(context).hide, + icon: isHidden + ? Icons.visibility_outlined + : Icons.visibility_off_outlined, ), - ), - ); - items.add( - PopupMenuItem( - value: AlbumPopupAction.leave, - child: Row( - children: [ - const Icon(Icons.logout), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).leaveAlbum), - ], + if (widget.collection != null) + EntePopupMenuItem( + value: AlbumPopupAction.playOnTv, + context.l10n.playOnTv, + icon: Icons.tv_outlined, ), - ), - ); - } - if (galleryType == GalleryType.localFolder) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.freeUpSpace, - child: Row( - children: [ - const Icon(Icons.delete_sweep_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).freeUpDeviceSpace), - ], + if (galleryType.canDelete()) + EntePopupMenuItem( + isQuickLink ? S.of(context).removeLink : S.of(context).deleteAlbum, + value: isQuickLink + ? AlbumPopupAction.removeLink + : AlbumPopupAction.delete, + icon: isQuickLink + ? Icons.remove_circle_outline + : Icons.delete_outline, ), - ), - ); - } + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + widget.collection!.hasShareeArchived() + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + value: AlbumPopupAction.sharedArchive, + icon: widget.collection!.hasShareeArchived() + ? Icons.unarchive + : Icons.archive_outlined, + ), + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + S.of(context).leaveAlbum, + value: AlbumPopupAction.leave, + icon: Icons.logout, + ), + if (galleryType == GalleryType.localFolder) + EntePopupMenuItem( + S.of(context).freeUpDeviceSpace, + value: AlbumPopupAction.freeUpSpace, + icon: Icons.delete_sweep_outlined, + ), + ], + ); if (items.isNotEmpty) { actions.add( PopupMenuButton( @@ -603,7 +481,7 @@ class _GalleryAppBarWidgetState extends State { } else if (value == AlbumPopupAction.leave) { await _leaveAlbum(context); } else if (value == AlbumPopupAction.playOnTv) { - await castAlbum(); + await _castChoiceDialog(); } else if (value == AlbumPopupAction.freeUpSpace) { await _deleteBackedUpFiles(context); } else if (value == AlbumPopupAction.setCover) { @@ -838,10 +716,62 @@ class _GalleryAppBarWidgetState extends State { setState(() {}); } - Future castAlbum() async { + Future _castChoiceDialog() async { final gw = CastGateway(NetworkClient.instance.enteDio); + if (castService.getActiveSessions().isNotEmpty) { + await showChoiceDialog( + context, + title: S.of(context).stopCastingTitle, + firstButtonLabel: S.of(context).yes, + secondButtonLabel: S.of(context).no, + body: S.of(context).stopCastingBody, + firstButtonOnTap: () async { + gw.revokeAllTokens().ignore(); + await castService.closeActiveCasts(); + }, + ); + castNotifier.value++; + return; + } + // stop any existing cast session gw.revokeAllTokens().ignore(); + if (!Platform.isAndroid) { + await _pairWithPin(gw, ''); + } else { + final result = await showDialog( + context: context, + barrierDismissible: true, + builder: (BuildContext context) { + return const CastChooseDialog(); + }, + ); + if (result == null) { + return; + } + // wait to allow the dialog to close + await Future.delayed(const Duration(milliseconds: 100)); + if (result == ButtonAction.first) { + await showDialog( + context: context, + barrierDismissible: true, + builder: (BuildContext bContext) { + return AutoCastDialog( + (device) async { + await _castPair(bContext, gw, device); + Navigator.pop(bContext); + }, + ); + }, + ); + } + if (result == ButtonAction.second) { + await _pairWithPin(gw, ''); + } + } + } + + Future _pairWithPin(CastGateway gw, String code) async { await showTextInputDialog( context, title: context.l10n.playOnTv, @@ -849,28 +779,63 @@ class _GalleryAppBarWidgetState extends State { submitButtonLabel: S.of(context).pair, textInputType: TextInputType.streetAddress, hintText: context.l10n.deviceCodeHint, + showOnlyLoadingState: true, + alwaysShowSuccessState: false, + initialValue: code, onSubmit: (String text) async { - try { - final code = text.trim(); - final String? publicKey = await gw.getPublicKey(code); - if (publicKey == null) { - showToast(context, S.of(context).deviceNotFound); - return; - } - final String castToken = const Uuid().v4().toString(); - final castPayload = CollectionsService.instance - .getCastData(castToken, widget.collection!, publicKey); - await gw.publishCastPayload( - code, - castPayload, - widget.collection!.id, - castToken, - ); - } catch (e, s) { - _logger.severe("Failed to cast album", e, s); - await showGenericErrorDialog(context: context, error: e); + final bool paired = await _castPair(context, gw, text); + if (!paired) { + Future.delayed(Duration.zero, () => _pairWithPin(gw, code)); } }, ); } + + String lastCode = ''; + Future _castPair( + BuildContext bContext, + CastGateway gw, + String code, + ) async { + try { + if (lastCode == code) { + return false; + } + lastCode = code; + _logger.info("Casting album to device with code $code"); + final String? publicKey = await gw.getPublicKey(code); + if (publicKey == null) { + showToast(context, S.of(context).deviceNotFound); + + return false; + } + final String castToken = const Uuid().v4().toString(); + final castPayload = CollectionsService.instance + .getCastData(castToken, widget.collection!, publicKey); + await gw.publishCastPayload( + code, + castPayload, + widget.collection!.id, + castToken, + ); + _logger.info("cast album completed"); + // showToast(bContext, S.of(context).pairingComplete); + castNotifier.value++; + return true; + } catch (e, s) { + lastCode = ''; + _logger.severe("Failed to cast album", e, s); + if (e is CastIPMismatchException) { + await showErrorDialog( + context, + S.of(context).castIPMismatchTitle, + S.of(context).castIPMismatchBody, + ); + } else { + await showGenericErrorDialog(context: bContext, error: e); + } + castNotifier.value++; + return false; + } + } } diff --git a/mobile/lib/utils/dialog_util.dart b/mobile/lib/utils/dialog_util.dart index f9bd733ae5..f6e9eb021c 100644 --- a/mobile/lib/utils/dialog_util.dart +++ b/mobile/lib/utils/dialog_util.dart @@ -5,7 +5,7 @@ import "package:flutter/services.dart"; import "package:photos/generated/l10n.dart"; import 'package:photos/models/button_result.dart'; import 'package:photos/models/typedefs.dart'; -import "package:photos/services/feature_flag_service.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/theme/colors.dart'; import 'package:photos/ui/common/loading_widget.dart'; import 'package:photos/ui/common/progress_dialog.dart'; @@ -91,8 +91,7 @@ String parseErrorForUI( } } // return generic error if the user is not internal and the error is not in debug mode - if (!(FeatureFlagService.instance.isInternalUserOrDebugBuild() && - kDebugMode)) { + if (!(flagService.internalUser && kDebugMode)) { return genericError; } String errorInfo = ""; diff --git a/mobile/lib/utils/diff_fetcher.dart b/mobile/lib/utils/diff_fetcher.dart index 63a25099d8..e48c1e19ab 100644 --- a/mobile/lib/utils/diff_fetcher.dart +++ b/mobile/lib/utils/diff_fetcher.dart @@ -27,8 +27,9 @@ class DiffFetcher { final bool hasMore = response.data["hasMore"] as bool; final startTime = DateTime.now(); late Set existingUploadIDs; - if(diff.isNotEmpty) { - existingUploadIDs = await FilesDB.instance.getUploadedFileIDs(collectionID); + if (diff.isNotEmpty) { + existingUploadIDs = + await FilesDB.instance.getUploadedFileIDs(collectionID); } final deletedFiles = []; final updatedFiles = []; @@ -96,8 +97,7 @@ class DiffFetcher { updatedFiles.add(file); } _logger.info('[Collection-$collectionID] parsed ${diff.length} ' - 'diff items ( ${updatedFiles.length} updated) in ${DateTime.now() - .difference(startTime).inMilliseconds}ms'); + 'diff items ( ${updatedFiles.length} updated) in ${DateTime.now().difference(startTime).inMilliseconds}ms'); return Diff(updatedFiles, deletedFiles, hasMore, latestUpdatedAtTime); } catch (e, s) { _logger.severe(e, s); diff --git a/mobile/lib/utils/file_download_util.dart b/mobile/lib/utils/file_download_util.dart index f99a435276..a8847e3fdb 100644 --- a/mobile/lib/utils/file_download_util.dart +++ b/mobile/lib/utils/file_download_util.dart @@ -4,14 +4,23 @@ import "package:computer/computer.dart"; import 'package:dio/dio.dart'; import "package:flutter/foundation.dart"; import 'package:logging/logging.dart'; +import 'package:path/path.dart' as file_path; +import "package:photo_manager/photo_manager.dart"; import 'package:photos/core/configuration.dart'; +import "package:photos/core/event_bus.dart"; import 'package:photos/core/network/network.dart'; +import "package:photos/db/files_db.dart"; +import "package:photos/events/local_photos_updated_event.dart"; import 'package:photos/models/file/file.dart'; import "package:photos/models/file/file_type.dart"; +import "package:photos/models/ignored_file.dart"; import 'package:photos/services/collections_service.dart'; +import "package:photos/services/ignored_files_service.dart"; +import "package:photos/services/local_sync_service.dart"; import 'package:photos/utils/crypto_util.dart'; import "package:photos/utils/data_util.dart"; import "package:photos/utils/fake_progress.dart"; +import "package:photos/utils/file_util.dart"; final _logger = Logger("file_download_util"); @@ -115,6 +124,97 @@ Future getFileKeyUsingBgWorker(EnteFile file) async { ); } +Future downloadToGallery(EnteFile file) async { + try { + final FileType type = file.fileType; + final bool downloadLivePhotoOnDroid = + type == FileType.livePhoto && Platform.isAndroid; + AssetEntity? savedAsset; + final File? fileToSave = await getFile(file); + //Disabling notifications for assets changing to insert the file into + //files db before triggering a sync. + await PhotoManager.stopChangeNotify(); + if (type == FileType.image) { + savedAsset = await PhotoManager.editor + .saveImageWithPath(fileToSave!.path, title: file.title!); + } else if (type == FileType.video) { + savedAsset = + await PhotoManager.editor.saveVideo(fileToSave!, title: file.title!); + } else if (type == FileType.livePhoto) { + final File? liveVideoFile = + await getFileFromServer(file, liveVideo: true); + if (liveVideoFile == null) { + throw AssertionError("Live video can not be null"); + } + if (downloadLivePhotoOnDroid) { + await _saveLivePhotoOnDroid(fileToSave!, liveVideoFile, file); + } else { + savedAsset = await PhotoManager.editor.darwin.saveLivePhoto( + imageFile: fileToSave!, + videoFile: liveVideoFile, + title: file.title!, + ); + } + } + + if (savedAsset != null) { + file.localID = savedAsset.id; + await FilesDB.instance.insert(file); + Bus.instance.fire( + LocalPhotosUpdatedEvent( + [file], + source: "download", + ), + ); + } else if (!downloadLivePhotoOnDroid && savedAsset == null) { + _logger.severe('Failed to save assert of type $type'); + } + } catch (e) { + _logger.severe("Failed to save file", e); + rethrow; + } finally { + await PhotoManager.startChangeNotify(); + LocalSyncService.instance.checkAndSync().ignore(); + } +} + +Future _saveLivePhotoOnDroid( + File image, + File video, + EnteFile enteFile, +) async { + debugPrint("Downloading LivePhoto on Droid"); + AssetEntity? savedAsset = await (PhotoManager.editor + .saveImageWithPath(image.path, title: enteFile.title!)); + if (savedAsset == null) { + throw Exception("Failed to save image of live photo"); + } + IgnoredFile ignoreVideoFile = IgnoredFile( + savedAsset.id, + savedAsset.title ?? '', + savedAsset.relativePath ?? 'remoteDownload', + "remoteDownload", + ); + await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); + final videoTitle = file_path.basenameWithoutExtension(enteFile.title!) + + file_path.extension(video.path); + savedAsset = (await (PhotoManager.editor.saveVideo( + video, + title: videoTitle, + ))); + if (savedAsset == null) { + throw Exception("Failed to save video of live photo"); + } + + ignoreVideoFile = IgnoredFile( + savedAsset.id, + savedAsset.title ?? videoTitle, + savedAsset.relativePath ?? 'remoteDownload', + "remoteDownload", + ); + await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); +} + Uint8List _decryptFileKey(Map args) { final encryptedKey = CryptoUtil.base642bin(args["encryptedKey"]); final nonce = CryptoUtil.base642bin(args["keyDecryptionNonce"]); diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart index ec8c213ac1..ba1b23ec37 100644 --- a/mobile/lib/utils/file_uploader.dart +++ b/mobile/lib/utils/file_uploader.dart @@ -29,8 +29,8 @@ import "package:photos/models/metadata/file_magic.dart"; import 'package:photos/models/upload_url.dart'; import "package:photos/models/user_details.dart"; import "package:photos/module/upload/service/multipart.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; -import "package:photos/services/feature_flag_service.dart"; import "package:photos/services/file_magic_service.dart"; import 'package:photos/services/local_sync_service.dart'; import 'package:photos/services/sync_service.dart'; @@ -120,7 +120,7 @@ class FileUploader { _enteDio, _dio, UploadLocksDB.instance, - FeatureFlagService.instance, + flagService, ); Bus.instance.on().listen((event) { if (event.type == EventType.deletedFromDevice || @@ -179,7 +179,7 @@ class FileUploader { ); return CollectionsService.instance - .addToCollection(collectionID, [uploadedFile]).then((aVoid) { + .addOrCopyToCollection(collectionID, [uploadedFile]).then((aVoid) { return uploadedFile; }); }); @@ -365,10 +365,16 @@ class FileUploader { final List connections = await (Connectivity().checkConnectivity()); bool canUploadUnderCurrentNetworkConditions = true; - if (connections.any((element) => element == ConnectivityResult.mobile)) { - canUploadUnderCurrentNetworkConditions = - Configuration.instance.shouldBackupOverMobileData(); + if (!Configuration.instance.shouldBackupOverMobileData()) { + if (connections.any((element) => element == ConnectivityResult.mobile)) { + canUploadUnderCurrentNetworkConditions = false; + } else { + _logger.info( + "mobileBackupDisabled, backing up with connections: ${connections.map((e) => e.name).toString()}", + ); + } } + if (!canUploadUnderCurrentNetworkConditions) { throw WiFiUnavailableError(); } @@ -378,7 +384,13 @@ class FileUploader { if (Platform.isAndroid) { final bool hasPermission = await Permission.accessMediaLocation.isGranted; if (!hasPermission) { - throw NoMediaLocationAccessError(); + final permissionStatus = await Permission.accessMediaLocation.request(); + if (!permissionStatus.isGranted) { + _logger.severe( + "Media location access denied with permission status: ${permissionStatus.name}", + ); + throw NoMediaLocationAccessError(); + } } } } @@ -409,6 +421,16 @@ class FileUploader { _logger.severe('Trying to upload file with missing localID'); return file; } + if (!CollectionsService.instance.allowUpload(collectionID)) { + _logger.warning( + 'Upload not allowed for collection $collectionID', + ); + if (!file.isUploaded && file.generatedID != null) { + _logger.info("Deleting file entry for " + file.toString()); + await FilesDB.instance.deleteByGeneratedID(file.generatedID!); + } + return file; + } final String lockKey = file.localID!; diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart new file mode 100644 index 0000000000..6b9ccafb97 --- /dev/null +++ b/mobile/lib/utils/multipart_upload_util.dart @@ -0,0 +1,157 @@ +// ignore_for_file: implementation_imports + +import "dart:io"; + +import "package:dio/dio.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/constants.dart"; +import "package:photos/core/network/network.dart"; +import 'package:photos/module/upload/model/xml.dart'; +import "package:photos/service_locator.dart"; + +final _enteDio = NetworkClient.instance.enteDio; +final _dio = NetworkClient.instance.getDio(); + +class PartETag extends XmlParsableObject { + final int partNumber; + final String eTag; + + PartETag(this.partNumber, this.eTag); + + @override + String get elementName => "Part"; + + @override + Map toMap() { + return { + "PartNumber": partNumber, + "ETag": eTag, + }; + } +} + +class MultipartUploadURLs { + final String objectKey; + final List partsURLs; + final String completeURL; + + MultipartUploadURLs({ + required this.objectKey, + required this.partsURLs, + required this.completeURL, + }); + + factory MultipartUploadURLs.fromMap(Map map) { + return MultipartUploadURLs( + objectKey: map["urls"]["objectKey"], + partsURLs: (map["urls"]["partURLs"] as List).cast(), + completeURL: map["urls"]["completeURL"], + ); + } +} + +Future calculatePartCount(int fileSize) async { + final partCount = (fileSize / multipartPartSize).ceil(); + return partCount; +} + +Future getMultipartUploadURLs(int count) async { + try { + assert( + flagService.internalUser, + "Multipart upload should not be enabled for external users.", + ); + final response = await _enteDio.get( + "/files/multipart-upload-urls", + queryParameters: { + "count": count, + }, + ); + + return MultipartUploadURLs.fromMap(response.data); + } on Exception catch (e) { + Logger("MultipartUploadURL").severe(e); + rethrow; + } +} + +Future putMultipartFile( + MultipartUploadURLs urls, + File encryptedFile, +) async { + // upload individual parts and get their etags + final etags = await uploadParts(urls.partsURLs, encryptedFile); + + // complete the multipart upload + await completeMultipartUpload(etags, urls.completeURL); + + return urls.objectKey; +} + +Future> uploadParts( + List partsURLs, + File encryptedFile, +) async { + final partsLength = partsURLs.length; + final etags = {}; + + for (int i = 0; i < partsLength; i++) { + final partURL = partsURLs[i]; + final isLastPart = i == partsLength - 1; + final fileSize = isLastPart + ? encryptedFile.lengthSync() % multipartPartSize + : multipartPartSize; + + final response = await _dio.put( + partURL, + data: encryptedFile.openRead( + i * multipartPartSize, + isLastPart ? null : multipartPartSize, + ), + options: Options( + headers: { + Headers.contentLengthHeader: fileSize, + }, + ), + ); + + final eTag = response.headers.value("etag"); + + if (eTag?.isEmpty ?? true) { + throw Exception('ETAG_MISSING'); + } + + etags[i] = eTag!; + } + + return etags; +} + +Future completeMultipartUpload( + Map partEtags, + String completeURL, +) async { + final body = convertJs2Xml({ + 'CompleteMultipartUpload': partEtags.entries + .map( + (e) => PartETag( + e.key + 1, + e.value, + ), + ) + .toList(), + }).replaceAll('"', '').replaceAll('"', ''); + + try { + await _dio.post( + completeURL, + data: body, + options: Options( + contentType: "text/xml", + ), + ); + } catch (e) { + Logger("MultipartUpload").severe(e); + rethrow; + } +} diff --git a/mobile/plugins/ente_cast/.metadata b/mobile/plugins/ente_cast/.metadata new file mode 100644 index 0000000000..9fc7ede54d --- /dev/null +++ b/mobile/plugins/ente_cast/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast/analysis_options.yaml b/mobile/plugins/ente_cast/analysis_options.yaml new file mode 100644 index 0000000000..f04c6cf0f3 --- /dev/null +++ b/mobile/plugins/ente_cast/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast/lib/ente_cast.dart b/mobile/plugins/ente_cast/lib/ente_cast.dart new file mode 100644 index 0000000000..f421a92970 --- /dev/null +++ b/mobile/plugins/ente_cast/lib/ente_cast.dart @@ -0,0 +1,2 @@ +export 'src/model.dart'; +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast/lib/src/model.dart b/mobile/plugins/ente_cast/lib/src/model.dart new file mode 100644 index 0000000000..e86582f76f --- /dev/null +++ b/mobile/plugins/ente_cast/lib/src/model.dart @@ -0,0 +1,5 @@ +// create enum for type of message for cast +enum CastMessageType { + pairCode, + alreadyCasting, +} diff --git a/mobile/plugins/ente_cast/lib/src/service.dart b/mobile/plugins/ente_cast/lib/src/service.dart new file mode 100644 index 0000000000..2ab0961dbd --- /dev/null +++ b/mobile/plugins/ente_cast/lib/src/service.dart @@ -0,0 +1,18 @@ +import "package:ente_cast/src/model.dart"; +import "package:flutter/widgets.dart"; + +abstract class CastService { + bool get isSupported; + Future> searchDevices(); + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + // callback that take a map of string, dynamic + void Function(Map>)? onMessage, + }); + // returns a map of sessionID to deviceNames + Map getActiveSessions(); + + Future closeActiveCasts(); +} diff --git a/mobile/plugins/ente_cast/pubspec.yaml b/mobile/plugins/ente_cast/pubspec.yaml new file mode 100644 index 0000000000..967e147e91 --- /dev/null +++ b/mobile/plugins/ente_cast/pubspec.yaml @@ -0,0 +1,19 @@ +name: ente_cast +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + collection: + dio: ^4.0.6 + flutter: + sdk: flutter + shared_preferences: ^2.0.5 + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/plugins/ente_cast_none/.metadata b/mobile/plugins/ente_cast_none/.metadata new file mode 100644 index 0000000000..9fc7ede54d --- /dev/null +++ b/mobile/plugins/ente_cast_none/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast_none/analysis_options.yaml b/mobile/plugins/ente_cast_none/analysis_options.yaml new file mode 100644 index 0000000000..f04c6cf0f3 --- /dev/null +++ b/mobile/plugins/ente_cast_none/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart b/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart new file mode 100644 index 0000000000..66a7132d8d --- /dev/null +++ b/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart @@ -0,0 +1 @@ +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast_none/lib/src/service.dart b/mobile/plugins/ente_cast_none/lib/src/service.dart new file mode 100644 index 0000000000..c781889733 --- /dev/null +++ b/mobile/plugins/ente_cast_none/lib/src/service.dart @@ -0,0 +1,35 @@ +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/widgets.dart"; + +class CastServiceImpl extends CastService { + @override + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + void Function(Map>)? onMessage, + }) { + throw UnimplementedError(); + } + + @override + bool get isSupported => false; + + @override + Future> searchDevices() { + // TODO: implement searchDevices + throw UnimplementedError(); + } + + @override + Future closeActiveCasts() { + // TODO: implement closeActiveCasts + throw UnimplementedError(); + } + + @override + Map getActiveSessions() { + // TODO: implement getActiveSessions + throw UnimplementedError(); + } +} diff --git a/mobile/plugins/ente_cast_none/pubspec.yaml b/mobile/plugins/ente_cast_none/pubspec.yaml new file mode 100644 index 0000000000..a4559fac53 --- /dev/null +++ b/mobile/plugins/ente_cast_none/pubspec.yaml @@ -0,0 +1,18 @@ +name: ente_cast_none +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + ente_cast: + path: ../ente_cast + flutter: + sdk: flutter + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/plugins/ente_cast_normal/.metadata b/mobile/plugins/ente_cast_normal/.metadata new file mode 100644 index 0000000000..9fc7ede54d --- /dev/null +++ b/mobile/plugins/ente_cast_normal/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast_normal/analysis_options.yaml b/mobile/plugins/ente_cast_normal/analysis_options.yaml new file mode 100644 index 0000000000..f04c6cf0f3 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart b/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart new file mode 100644 index 0000000000..66a7132d8d --- /dev/null +++ b/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart @@ -0,0 +1 @@ +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast_normal/lib/src/service.dart b/mobile/plugins/ente_cast_normal/lib/src/service.dart new file mode 100644 index 0000000000..8a1f2aaf16 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/lib/src/service.dart @@ -0,0 +1,105 @@ +import "dart:developer" as dev; + +import "package:cast/cast.dart"; +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/material.dart"; + +class CastServiceImpl extends CastService { + final String _appId = 'F5BCEC64'; + final String _pairRequestNamespace = 'urn:x-cast:pair-request'; + final Map collectionIDToSessions = {}; + + @override + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + void Function(Map>)? onMessage, + }) async { + final CastDevice castDevice = device as CastDevice; + final session = await CastSessionManager().startSession(castDevice); + session.messageStream.listen((message) { + if (message['type'] == "RECEIVER_STATUS") { + dev.log( + "got RECEIVER_STATUS, Send request to pair", + name: "CastServiceImpl", + ); + session.sendMessage(_pairRequestNamespace, { + "collectionID": collectionID, + }); + } else { + if (onMessage != null && message.containsKey("code")) { + onMessage( + { + CastMessageType.pairCode: message, + }, + ); + } else { + print('receive message: $message'); + } + } + }); + + session.stateStream.listen((state) { + if (state == CastSessionState.connected) { + debugPrint("Send request to pair"); + session.sendMessage(_pairRequestNamespace, {}); + } else if (state == CastSessionState.closed) { + dev.log('Session closed', name: 'CastServiceImpl'); + } + }); + + debugPrint("Send request to launch"); + session.sendMessage(CastSession.kNamespaceReceiver, { + 'type': 'LAUNCH', + 'appId': _appId, // set the appId of your app here + }); + // session.sendMessage('urn:x-cast:pair-request', {}); + } + + @override + Future> searchDevices() { + return CastDiscoveryService() + .search(timeout: const Duration(seconds: 7)) + .then((devices) { + return devices.map((device) => (device.name, device)).toList(); + }); + } + + @override + bool get isSupported => true; + + @override + Future closeActiveCasts() { + final sessions = CastSessionManager().sessions; + for (final session in sessions) { + debugPrint("send close message for ${session.sessionId}"); + Future(() { + session.sendMessage(CastSession.kNamespaceConnection, { + 'type': 'CLOSE', + }); + }).timeout( + const Duration(seconds: 5), + onTimeout: () { + debugPrint('sendMessage timed out after 5 seconds'); + }, + ); + debugPrint("close session ${session.sessionId}"); + session.close(); + } + CastSessionManager().sessions.clear(); + return Future.value(); + } + + @override + Map getActiveSessions() { + final sessions = CastSessionManager().sessions; + final Map result = {}; + for (final session in sessions) { + if (session.state == CastSessionState.connected) { + result[session.sessionId] = session.state.toString(); + } + } + return result; + } +} diff --git a/mobile/plugins/ente_cast_normal/pubspec.lock b/mobile/plugins/ente_cast_normal/pubspec.lock new file mode 100644 index 0000000000..86051800c6 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/pubspec.lock @@ -0,0 +1,333 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + async: + dependency: transitive + description: + name: async + sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c" + url: "https://pub.dev" + source: hosted + version: "2.11.0" + cast: + dependency: "direct main" + description: + path: "." + ref: multicast_version + resolved-ref: "1f39cd4d6efa9363e77b2439f0317bae0c92dda1" + url: "https://github.com/guyluz11/flutter_cast.git" + source: git + version: "2.0.9" + characters: + dependency: transitive + description: + name: characters + sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605" + url: "https://pub.dev" + source: hosted + version: "1.3.0" + collection: + dependency: transitive + description: + name: collection + sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a + url: "https://pub.dev" + source: hosted + version: "1.18.0" + dio: + dependency: transitive + description: + name: dio + sha256: "7d328c4d898a61efc3cd93655a0955858e29a0aa647f0f9e02d59b3bb275e2e8" + url: "https://pub.dev" + source: hosted + version: "4.0.6" + ente_cast: + dependency: "direct main" + description: + path: "../ente_cast" + relative: true + source: path + version: "0.0.1" + ffi: + dependency: transitive + description: + name: ffi + sha256: "493f37e7df1804778ff3a53bd691d8692ddf69702cf4c1c1096a2e41b4779e21" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + file: + dependency: transitive + description: + name: file + sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c" + url: "https://pub.dev" + source: hosted + version: "7.0.0" + fixnum: + dependency: transitive + description: + name: fixnum + sha256: "25517a4deb0c03aa0f32fd12db525856438902d9c16536311e76cdc57b31d7d1" + url: "https://pub.dev" + source: hosted + version: "1.1.0" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_lints: + dependency: "direct dev" + description: + name: flutter_lints + sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1" + url: "https://pub.dev" + source: hosted + version: "3.0.2" + flutter_web_plugins: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" + http: + dependency: transitive + description: + name: http + sha256: "761a297c042deedc1ffbb156d6e2af13886bb305c2a343a4d972504cd67dd938" + url: "https://pub.dev" + source: hosted + version: "1.2.1" + http_parser: + dependency: transitive + description: + name: http_parser + sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b" + url: "https://pub.dev" + source: hosted + version: "4.0.2" + lints: + dependency: transitive + description: + name: lints + sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290 + url: "https://pub.dev" + source: hosted + version: "3.0.0" + material_color_utilities: + dependency: transitive + description: + name: material_color_utilities + sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a" + url: "https://pub.dev" + source: hosted + version: "0.8.0" + meta: + dependency: transitive + description: + name: meta + sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04 + url: "https://pub.dev" + source: hosted + version: "1.11.0" + multicast_dns: + dependency: transitive + description: + name: multicast_dns + sha256: "316cc47a958d4bd3c67bd238fe8b44fdfb6133bad89cb191c0c3bd3edb14e296" + url: "https://pub.dev" + source: hosted + version: "0.3.2+6" + path: + dependency: transitive + description: + name: path + sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af" + url: "https://pub.dev" + source: hosted + version: "1.9.0" + path_provider_linux: + dependency: transitive + description: + name: path_provider_linux + sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279 + url: "https://pub.dev" + source: hosted + version: "2.2.1" + path_provider_platform_interface: + dependency: transitive + description: + name: path_provider_platform_interface + sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + path_provider_windows: + dependency: transitive + description: + name: path_provider_windows + sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170" + url: "https://pub.dev" + source: hosted + version: "2.2.1" + platform: + dependency: transitive + description: + name: platform + sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec" + url: "https://pub.dev" + source: hosted + version: "3.1.4" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02" + url: "https://pub.dev" + source: hosted + version: "2.1.8" + protobuf: + dependency: transitive + description: + name: protobuf + sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d" + url: "https://pub.dev" + source: hosted + version: "3.1.0" + shared_preferences: + dependency: transitive + description: + name: shared_preferences + sha256: d3bbe5553a986e83980916ded2f0b435ef2e1893dfaa29d5a7a790d0eca12180 + url: "https://pub.dev" + source: hosted + version: "2.2.3" + shared_preferences_android: + dependency: transitive + description: + name: shared_preferences_android + sha256: "1ee8bf911094a1b592de7ab29add6f826a7331fb854273d55918693d5364a1f2" + url: "https://pub.dev" + source: hosted + version: "2.2.2" + shared_preferences_foundation: + dependency: transitive + description: + name: shared_preferences_foundation + sha256: "7708d83064f38060c7b39db12aefe449cb8cdc031d6062280087bc4cdb988f5c" + url: "https://pub.dev" + source: hosted + version: "2.3.5" + shared_preferences_linux: + dependency: transitive + description: + name: shared_preferences_linux + sha256: "9f2cbcf46d4270ea8be39fa156d86379077c8a5228d9dfdb1164ae0bb93f1faa" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_platform_interface: + dependency: transitive + description: + name: shared_preferences_platform_interface + sha256: "22e2ecac9419b4246d7c22bfbbda589e3acf5c0351137d87dd2939d984d37c3b" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_web: + dependency: transitive + description: + name: shared_preferences_web + sha256: "9aee1089b36bd2aafe06582b7d7817fd317ef05fc30e6ba14bff247d0933042a" + url: "https://pub.dev" + source: hosted + version: "2.3.0" + shared_preferences_windows: + dependency: transitive + description: + name: shared_preferences_windows + sha256: "841ad54f3c8381c480d0c9b508b89a34036f512482c407e6df7a9c4aa2ef8f59" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c" + url: "https://pub.dev" + source: hosted + version: "1.10.0" + stack_trace: + dependency: "direct main" + description: + name: stack_trace + sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b" + url: "https://pub.dev" + source: hosted + version: "1.11.1" + string_scanner: + dependency: transitive + description: + name: string_scanner + sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde" + url: "https://pub.dev" + source: hosted + version: "1.2.0" + term_glyph: + dependency: transitive + description: + name: term_glyph + sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84 + url: "https://pub.dev" + source: hosted + version: "1.2.1" + typed_data: + dependency: transitive + description: + name: typed_data + sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c + url: "https://pub.dev" + source: hosted + version: "1.3.2" + vector_math: + dependency: transitive + description: + name: vector_math + sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803" + url: "https://pub.dev" + source: hosted + version: "2.1.4" + web: + dependency: transitive + description: + name: web + sha256: "97da13628db363c635202ad97068d47c5b8aa555808e7a9411963c533b449b27" + url: "https://pub.dev" + source: hosted + version: "0.5.1" + win32: + dependency: transitive + description: + name: win32 + sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a" + url: "https://pub.dev" + source: hosted + version: "5.4.0" + xdg_directories: + dependency: transitive + description: + name: xdg_directories + sha256: faea9dee56b520b55a566385b84f2e8de55e7496104adada9962e0bd11bcff1d + url: "https://pub.dev" + source: hosted + version: "1.0.4" +sdks: + dart: ">=3.3.0 <4.0.0" + flutter: ">=3.19.0" diff --git a/mobile/plugins/ente_cast_normal/pubspec.yaml b/mobile/plugins/ente_cast_normal/pubspec.yaml new file mode 100644 index 0000000000..c97d70a84b --- /dev/null +++ b/mobile/plugins/ente_cast_normal/pubspec.yaml @@ -0,0 +1,22 @@ +name: ente_cast_normal +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + cast: + git: + url: https://github.com/guyluz11/flutter_cast.git + ref: multicast_version + ente_cast: + path: ../ente_cast + flutter: + sdk: flutter + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/plugins/ente_feature_flag/.metadata b/mobile/plugins/ente_feature_flag/.metadata new file mode 100644 index 0000000000..9fc7ede54d --- /dev/null +++ b/mobile/plugins/ente_feature_flag/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_feature_flag/analysis_options.yaml b/mobile/plugins/ente_feature_flag/analysis_options.yaml new file mode 100644 index 0000000000..fac60e247c --- /dev/null +++ b/mobile/plugins/ente_feature_flag/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml \ No newline at end of file diff --git a/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart b/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart new file mode 100644 index 0000000000..66a7132d8d --- /dev/null +++ b/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart @@ -0,0 +1 @@ +export 'src/service.dart'; diff --git a/mobile/plugins/ente_feature_flag/lib/src/model.dart b/mobile/plugins/ente_feature_flag/lib/src/model.dart new file mode 100644 index 0000000000..49b2921489 --- /dev/null +++ b/mobile/plugins/ente_feature_flag/lib/src/model.dart @@ -0,0 +1,66 @@ +import "dart:convert"; +import "dart:io"; + +import "package:flutter/foundation.dart"; + +class RemoteFlags { + final bool enableStripe; + final bool disableCFWorker; + final bool mapEnabled; + final bool faceSearchEnabled; + final bool passKeyEnabled; + final bool recoveryKeyVerified; + final bool internalUser; + final bool betaUser; + + RemoteFlags({ + required this.enableStripe, + required this.disableCFWorker, + required this.mapEnabled, + required this.faceSearchEnabled, + required this.passKeyEnabled, + required this.recoveryKeyVerified, + required this.internalUser, + required this.betaUser, + }); + + static RemoteFlags defaultValue = RemoteFlags( + enableStripe: Platform.isAndroid, + disableCFWorker: false, + mapEnabled: false, + faceSearchEnabled: false, + passKeyEnabled: false, + recoveryKeyVerified: false, + internalUser: kDebugMode, + betaUser: kDebugMode, + ); + + String toJson() => json.encode(toMap()); + Map toMap() { + return { + 'enableStripe': enableStripe, + 'disableCFWorker': disableCFWorker, + 'mapEnabled': mapEnabled, + 'faceSearchEnabled': faceSearchEnabled, + 'passKeyEnabled': passKeyEnabled, + 'recoveryKeyVerified': recoveryKeyVerified, + 'internalUser': internalUser, + 'betaUser': betaUser, + }; + } + + factory RemoteFlags.fromMap(Map map) { + return RemoteFlags( + enableStripe: map['enableStripe'] ?? defaultValue.enableStripe, + disableCFWorker: map['disableCFWorker'] ?? defaultValue.disableCFWorker, + mapEnabled: map['mapEnabled'] ?? defaultValue.mapEnabled, + faceSearchEnabled: + map['faceSearchEnabled'] ?? defaultValue.faceSearchEnabled, + passKeyEnabled: map['passKeyEnabled'] ?? defaultValue.passKeyEnabled, + recoveryKeyVerified: + map['recoveryKeyVerified'] ?? defaultValue.recoveryKeyVerified, + internalUser: map['internalUser'] ?? defaultValue.internalUser, + betaUser: map['betaUser'] ?? defaultValue.betaUser, + ); + } +} diff --git a/mobile/plugins/ente_feature_flag/lib/src/service.dart b/mobile/plugins/ente_feature_flag/lib/src/service.dart new file mode 100644 index 0000000000..47539eeb5f --- /dev/null +++ b/mobile/plugins/ente_feature_flag/lib/src/service.dart @@ -0,0 +1,75 @@ +// ignore_for_file: always_use_package_imports + +import "dart:convert"; +import "dart:developer"; +import "dart:io"; + +import "package:dio/dio.dart"; +import "package:flutter/foundation.dart"; +import "package:shared_preferences/shared_preferences.dart"; + +import "model.dart"; + +class FlagService { + final SharedPreferences _prefs; + final Dio _enteDio; + late final bool _usingEnteEmail; + + FlagService(this._prefs, this._enteDio) { + _usingEnteEmail = _prefs.getString("email")?.endsWith("@ente.io") ?? false; + Future.delayed(const Duration(seconds: 5), () { + _fetch(); + }); + } + + RemoteFlags? _flags; + + RemoteFlags get flags { + try { + if (!_prefs.containsKey("remote_flags")) { + _fetch().ignore(); + } + _flags ??= RemoteFlags.fromMap( + jsonDecode(_prefs.getString("remote_flags") ?? "{}"), + ); + return _flags!; + } catch (e) { + debugPrint("Failed to get feature flags $e"); + return RemoteFlags.defaultValue; + } + } + + Future _fetch() async { + try { + if (!_prefs.containsKey("token")) { + log("token not found, skip", name: "FlagService"); + return; + } + log("fetching feature flags", name: "FlagService"); + final response = await _enteDio.get("/remote-store/feature-flags"); + final remoteFlags = RemoteFlags.fromMap(response.data); + await _prefs.setString("remote_flags", remoteFlags.toJson()); + _flags = remoteFlags; + } catch (e) { + debugPrint("Failed to sync feature flags $e"); + } + } + + bool get disableCFWorker => flags.disableCFWorker; + + bool get internalUser => flags.internalUser || _usingEnteEmail || kDebugMode; + + bool get betaUser => flags.betaUser; + + bool get internalOrBetaUser => internalUser || betaUser; + + bool get enableStripe => Platform.isIOS ? false : flags.enableStripe; + + bool get mapEnabled => flags.mapEnabled; + + bool get faceSearchEnabled => flags.faceSearchEnabled; + + bool get passKeyEnabled => flags.passKeyEnabled || internalOrBetaUser; + + bool get recoveryKeyVerified => flags.recoveryKeyVerified; +} diff --git a/mobile/plugins/ente_feature_flag/pubspec.lock b/mobile/plugins/ente_feature_flag/pubspec.lock new file mode 100644 index 0000000000..6760d7c6c5 --- /dev/null +++ b/mobile/plugins/ente_feature_flag/pubspec.lock @@ -0,0 +1,277 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + characters: + dependency: transitive + description: + name: characters + sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605" + url: "https://pub.dev" + source: hosted + version: "1.3.0" + collection: + dependency: "direct main" + description: + name: collection + sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a + url: "https://pub.dev" + source: hosted + version: "1.18.0" + dio: + dependency: "direct main" + description: + name: dio + sha256: "7d328c4d898a61efc3cd93655a0955858e29a0aa647f0f9e02d59b3bb275e2e8" + url: "https://pub.dev" + source: hosted + version: "4.0.6" + ffi: + dependency: transitive + description: + name: ffi + sha256: "493f37e7df1804778ff3a53bd691d8692ddf69702cf4c1c1096a2e41b4779e21" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + file: + dependency: transitive + description: + name: file + sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c" + url: "https://pub.dev" + source: hosted + version: "7.0.0" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_lints: + dependency: "direct dev" + description: + name: flutter_lints + sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1" + url: "https://pub.dev" + source: hosted + version: "3.0.2" + flutter_web_plugins: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" + http_parser: + dependency: transitive + description: + name: http_parser + sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b" + url: "https://pub.dev" + source: hosted + version: "4.0.2" + lints: + dependency: transitive + description: + name: lints + sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290 + url: "https://pub.dev" + source: hosted + version: "3.0.0" + material_color_utilities: + dependency: transitive + description: + name: material_color_utilities + sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a" + url: "https://pub.dev" + source: hosted + version: "0.8.0" + meta: + dependency: transitive + description: + name: meta + sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04 + url: "https://pub.dev" + source: hosted + version: "1.11.0" + path: + dependency: transitive + description: + name: path + sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af" + url: "https://pub.dev" + source: hosted + version: "1.9.0" + path_provider_linux: + dependency: transitive + description: + name: path_provider_linux + sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279 + url: "https://pub.dev" + source: hosted + version: "2.2.1" + path_provider_platform_interface: + dependency: transitive + description: + name: path_provider_platform_interface + sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + path_provider_windows: + dependency: transitive + description: + name: path_provider_windows + sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170" + url: "https://pub.dev" + source: hosted + version: "2.2.1" + platform: + dependency: transitive + description: + name: platform + sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec" + url: "https://pub.dev" + source: hosted + version: "3.1.4" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02" + url: "https://pub.dev" + source: hosted + version: "2.1.8" + shared_preferences: + dependency: "direct main" + description: + name: shared_preferences + sha256: d3bbe5553a986e83980916ded2f0b435ef2e1893dfaa29d5a7a790d0eca12180 + url: "https://pub.dev" + source: hosted + version: "2.2.3" + shared_preferences_android: + dependency: transitive + description: + name: shared_preferences_android + sha256: "1ee8bf911094a1b592de7ab29add6f826a7331fb854273d55918693d5364a1f2" + url: "https://pub.dev" + source: hosted + version: "2.2.2" + shared_preferences_foundation: + dependency: transitive + description: + name: shared_preferences_foundation + sha256: "7708d83064f38060c7b39db12aefe449cb8cdc031d6062280087bc4cdb988f5c" + url: "https://pub.dev" + source: hosted + version: "2.3.5" + shared_preferences_linux: + dependency: transitive + description: + name: shared_preferences_linux + sha256: "9f2cbcf46d4270ea8be39fa156d86379077c8a5228d9dfdb1164ae0bb93f1faa" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_platform_interface: + dependency: transitive + description: + name: shared_preferences_platform_interface + sha256: "22e2ecac9419b4246d7c22bfbbda589e3acf5c0351137d87dd2939d984d37c3b" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_web: + dependency: transitive + description: + name: shared_preferences_web + sha256: "9aee1089b36bd2aafe06582b7d7817fd317ef05fc30e6ba14bff247d0933042a" + url: "https://pub.dev" + source: hosted + version: "2.3.0" + shared_preferences_windows: + dependency: transitive + description: + name: shared_preferences_windows + sha256: "841ad54f3c8381c480d0c9b508b89a34036f512482c407e6df7a9c4aa2ef8f59" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c" + url: "https://pub.dev" + source: hosted + version: "1.10.0" + stack_trace: + dependency: "direct main" + description: + name: stack_trace + sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b" + url: "https://pub.dev" + source: hosted + version: "1.11.1" + string_scanner: + dependency: transitive + description: + name: string_scanner + sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde" + url: "https://pub.dev" + source: hosted + version: "1.2.0" + term_glyph: + dependency: transitive + description: + name: term_glyph + sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84 + url: "https://pub.dev" + source: hosted + version: "1.2.1" + typed_data: + dependency: transitive + description: + name: typed_data + sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c + url: "https://pub.dev" + source: hosted + version: "1.3.2" + vector_math: + dependency: transitive + description: + name: vector_math + sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803" + url: "https://pub.dev" + source: hosted + version: "2.1.4" + web: + dependency: transitive + description: + name: web + sha256: "97da13628db363c635202ad97068d47c5b8aa555808e7a9411963c533b449b27" + url: "https://pub.dev" + source: hosted + version: "0.5.1" + win32: + dependency: transitive + description: + name: win32 + sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a" + url: "https://pub.dev" + source: hosted + version: "5.4.0" + xdg_directories: + dependency: transitive + description: + name: xdg_directories + sha256: faea9dee56b520b55a566385b84f2e8de55e7496104adada9962e0bd11bcff1d + url: "https://pub.dev" + source: hosted + version: "1.0.4" +sdks: + dart: ">=3.3.0 <4.0.0" + flutter: ">=3.19.0" diff --git a/mobile/plugins/ente_feature_flag/pubspec.yaml b/mobile/plugins/ente_feature_flag/pubspec.yaml new file mode 100644 index 0000000000..7507d61f1c --- /dev/null +++ b/mobile/plugins/ente_feature_flag/pubspec.yaml @@ -0,0 +1,19 @@ +name: ente_feature_flag +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + collection: + dio: ^4.0.6 + flutter: + sdk: flutter + shared_preferences: ^2.0.5 + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: \ No newline at end of file diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 393dadc237..ae74068eb8 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -209,6 +209,15 @@ packages: url: "https://pub.dev" source: hosted version: "1.1.1" + cast: + dependency: transitive + description: + path: "." + ref: multicast_version + resolved-ref: "1f39cd4d6efa9363e77b2439f0317bae0c92dda1" + url: "https://github.com/guyluz11/flutter_cast.git" + source: git + version: "2.0.9" characters: dependency: transitive description: @@ -342,10 +351,10 @@ packages: dependency: "direct main" description: name: cupertino_icons - sha256: d57953e10f9f8327ce64a508a355f0b1ec902193f66288e8cb5070e7c47eeb2d + sha256: ba631d1c7f7bef6b729a622b7b752645a2d076dba9976925b8f25725a30e1ee6 url: "https://pub.dev" source: hosted - version: "1.0.6" + version: "1.0.8" dart_style: dependency: transitive description: @@ -354,14 +363,6 @@ packages: url: "https://pub.dev" source: hosted version: "2.3.2" - dartx: - dependency: transitive - description: - name: dartx - sha256: "8b25435617027257d43e6508b5fe061012880ddfdaa75a71d607c3de2a13d244" - url: "https://pub.dev" - source: hosted - version: "1.2.0" dbus: dependency: transitive description: @@ -434,6 +435,27 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.17" + ente_cast: + dependency: "direct main" + description: + path: "plugins/ente_cast" + relative: true + source: path + version: "0.0.1" + ente_cast_normal: + dependency: "direct main" + description: + path: "plugins/ente_cast_normal" + relative: true + source: path + version: "0.0.1" + ente_feature_flag: + dependency: "direct main" + description: + path: "plugins/ente_feature_flag" + relative: true + source: path + version: "0.0.1" equatable: dependency: "direct main" description: @@ -551,10 +573,10 @@ packages: dependency: "direct main" description: name: firebase_core - sha256: a864d1b6afd25497a3b57b016886d1763df52baaa69758a46723164de8d187fe + sha256: "6b1152a5af3b1cfe7e45309e96fc1aa14873f410f7aadb3878aa7812acfa7531" url: "https://pub.dev" source: hosted - version: "2.29.0" + version: "2.30.0" firebase_core_platform_interface: dependency: transitive description: @@ -575,10 +597,10 @@ packages: dependency: "direct main" description: name: firebase_messaging - sha256: e41586e0fd04fe9a40424f8b0053d0832e6d04f49e020cdaf9919209a28497e9 + sha256: "87e3eda0ecdfeadb5fd1cf0dc5153aea5307a0cfca751c4b1ac97bfdd805660e" url: "https://pub.dev" source: hosted - version: "14.7.19" + version: "14.8.1" firebase_messaging_platform_interface: dependency: transitive description: @@ -1086,30 +1108,6 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.4" - isar: - dependency: "direct main" - description: - name: isar - sha256: "99165dadb2cf2329d3140198363a7e7bff9bbd441871898a87e26914d25cf1ea" - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" - isar_flutter_libs: - dependency: "direct main" - description: - name: isar_flutter_libs - sha256: bc6768cc4b9c61aabff77152e7f33b4b17d2fc93134f7af1c3dd51500fe8d5e8 - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" - isar_generator: - dependency: "direct dev" - description: - name: isar_generator - sha256: "76c121e1295a30423604f2f819bc255bc79f852f3bc8743a24017df6068ad133" - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" js: dependency: transitive description: @@ -1416,6 +1414,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.2" + multicast_dns: + dependency: transitive + description: + name: multicast_dns + sha256: "316cc47a958d4bd3c67bd238fe8b44fdfb6133bad89cb191c0c3bd3edb14e296" + url: "https://pub.dev" + source: hosted + version: "0.3.2+6" nested: dependency: transitive description: @@ -1729,6 +1735,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.0" + protobuf: + dependency: transitive + description: + name: protobuf + sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d" + url: "https://pub.dev" + source: hosted + version: "3.1.0" provider: dependency: "direct main" description: @@ -2039,7 +2053,7 @@ packages: source: hosted version: "0.3.0" sqlite3: - dependency: "direct main" + dependency: transitive description: name: sqlite3 sha256: "072128763f1547e3e9b4735ce846bfd226d68019ccda54db4cd427b12dfdedc9" @@ -2174,14 +2188,6 @@ packages: url: "https://pub.dev" source: hosted version: "0.5.9" - time: - dependency: transitive - description: - name: time - sha256: ad8e018a6c9db36cb917a031853a1aae49467a93e0d464683e029537d848c221 - url: "https://pub.dev" - source: hosted - version: "2.1.4" timezone: dependency: transitive description: @@ -2551,14 +2557,6 @@ packages: url: "https://pub.dev" source: hosted version: "1.1.1" - xxh3: - dependency: transitive - description: - name: xxh3 - sha256: a92b30944a9aeb4e3d4f3c3d4ddb3c7816ca73475cd603682c4f8149690f56d7 - url: "https://pub.dev" - source: hosted - version: "1.0.1" yaml: dependency: transitive description: diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 89bee933b1..5b8f7ba734 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.82+602 +version: 0.8.92+612 publish_to: none environment: @@ -39,7 +39,7 @@ dependencies: connectivity_plus: ^6.0.2 cross_file: ^0.3.3 crypto: ^3.0.2 - cupertino_icons: ^1.0.0 + cupertino_icons: ^1.0.8 defer_pointer: ^0.0.2 device_info_plus: ^9.0.3 dio: ^4.0.6 @@ -47,6 +47,12 @@ dependencies: dotted_border: ^2.1.0 dropdown_button2: ^2.0.0 email_validator: ^2.0.1 + ente_cast: + path: plugins/ente_cast + ente_cast_normal: + path: plugins/ente_cast_normal + ente_feature_flag: + path: plugins/ente_feature_flag equatable: ^2.0.5 event_bus: ^2.0.0 exif: ^3.0.0 @@ -59,8 +65,8 @@ dependencies: file_saver: # Use forked version till this PR is merged: https://github.com/incrediblezayed/file_saver/pull/87 git: https://github.com/jesims/file_saver.git - firebase_core: ^2.13.1 - firebase_messaging: ^14.6.2 + firebase_core: ^2.30.0 + firebase_messaging: ^14.8.0 fk_user_agent: ^2.0.1 flutter: sdk: flutter @@ -93,11 +99,9 @@ dependencies: image_editor: ^1.3.0 in_app_purchase: ^3.0.7 intl: ^0.18.0 - isar: ^3.1.0+1 - isar_flutter_libs: ^3.1.0+1 json_annotation: ^4.8.0 latlong2: ^0.9.0 - like_button: ^2.0.2 + like_button: ^2.0.5 loading_animations: ^2.1.0 local_auth: ^2.1.5 local_auth_android: @@ -140,7 +144,6 @@ dependencies: shared_preferences: ^2.0.5 sqflite: ^2.3.0 sqflite_migration: ^0.3.0 - sqlite3: ^2.1.0 sqlite3_flutter_libs: ^0.5.20 sqlite_async: ^0.6.1 step_progress_indicator: ^1.0.2 @@ -191,7 +194,6 @@ dev_dependencies: freezed: ^2.5.2 integration_test: sdk: flutter - isar_generator: ^3.1.0+1 json_serializable: ^6.6.1 test: ^1.22.0 diff --git a/mobile/scripts/build_isar.sh b/mobile/scripts/build_isar.sh deleted file mode 100755 index 1bb1d38f6c..0000000000 --- a/mobile/scripts/build_isar.sh +++ /dev/null @@ -1,17 +0,0 @@ -# TODO: add `rustup@1.25.2` to `srclibs` -# TODO: verify if `gcc-multilib` or `libc-dev` is needed -$$rustup$$/rustup-init.sh -y -source $HOME/.cargo/env -cd thirdparty/isar/ -bash tool/build_android.sh x86 -bash tool/build_android.sh x64 -bash tool/build_android.sh armv7 -bash tool/build_android.sh arm64 -mv libisar_android_arm64.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/ -mv libisar_android_armv7.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/ -mv libisar_android_x64.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/ -mv libisar_android_x86.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/ diff --git a/server/cmd/museum/main.go b/server/cmd/museum/main.go index c451b8b9c0..84c34189d2 100644 --- a/server/cmd/museum/main.go +++ b/server/cmd/museum/main.go @@ -5,6 +5,7 @@ import ( "database/sql" b64 "encoding/base64" "fmt" + "github.com/ente-io/museum/pkg/controller/file_copy" "net/http" "os" "os/signal" @@ -193,7 +194,7 @@ func main() { commonBillController := commonbilling.NewController(storagBonusRepo, userRepo, usageRepo) appStoreController := controller.NewAppStoreController(defaultPlan, billingRepo, fileRepo, userRepo, commonBillController) - + remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository} playStoreController := controller.NewPlayStoreController(defaultPlan, billingRepo, fileRepo, userRepo, storagBonusRepo, commonBillController) stripeController := controller.NewStripeController(plans, stripeClients, @@ -389,9 +390,17 @@ func main() { timeout.WithHandler(healthCheckHandler.PingDBStats), timeout.WithResponse(timeOutResponse), )) + fileCopyCtrl := &file_copy.FileCopyController{ + FileController: fileController, + CollectionCtrl: collectionController, + S3Config: s3Config, + ObjectRepo: objectRepo, + FileRepo: fileRepo, + } fileHandler := &api.FileHandler{ - Controller: fileController, + Controller: fileController, + FileCopyCtrl: fileCopyCtrl, } privateAPI.GET("/files/upload-urls", fileHandler.GetUploadURLs) privateAPI.GET("/files/multipart-upload-urls", fileHandler.GetMultipartUploadURLs) @@ -400,6 +409,7 @@ func main() { privateAPI.GET("/files/preview/:fileID", fileHandler.GetThumbnail) privateAPI.GET("/files/preview/v2/:fileID", fileHandler.GetThumbnail) privateAPI.POST("/files", fileHandler.CreateOrUpdate) + privateAPI.POST("/files/copy", fileHandler.CopyFiles) privateAPI.PUT("/files/update", fileHandler.Update) privateAPI.POST("/files/trash", fileHandler.Trash) privateAPI.POST("/files/size", fileHandler.GetSize) @@ -600,6 +610,7 @@ func main() { UserAuthRepo: userAuthRepo, UserController: userController, FamilyController: familyController, + RemoteStoreController: remoteStoreController, FileRepo: fileRepo, StorageBonusRepo: storagBonusRepo, BillingRepo: billingRepo, @@ -621,6 +632,7 @@ func main() { adminAPI.PUT("/user/change-email", adminHandler.ChangeEmail) adminAPI.DELETE("/user/delete", adminHandler.DeleteUser) adminAPI.POST("/user/recover", adminHandler.RecoverAccount) + adminAPI.POST("/user/update-flag", adminHandler.UpdateFeatureFlag) adminAPI.GET("/email-hash", adminHandler.GetEmailHash) adminAPI.POST("/emails-from-hashes", adminHandler.GetEmailsFromHashes) adminAPI.PUT("/user/subscription", adminHandler.UpdateSubscription) @@ -648,7 +660,6 @@ func main() { privateAPI.DELETE("/authenticator/entity", authenticatorHandler.DeleteEntity) privateAPI.GET("/authenticator/entity/diff", authenticatorHandler.GetDiff) - remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository} dataCleanupController := &dataCleanupCtrl.DeleteUserCleanupController{ Repo: dataCleanupRepository, UserRepo: userRepo, @@ -662,6 +673,7 @@ func main() { privateAPI.POST("/remote-store/update", remoteStoreHandler.InsertOrUpdate) privateAPI.GET("/remote-store", remoteStoreHandler.GetKey) + privateAPI.GET("/remote-store/feature-flags", remoteStoreHandler.GetFeatureFlags) pushHandler := &api.PushHandler{PushController: pushController} privateAPI.POST("/push/token", pushHandler.AddToken) @@ -827,7 +839,7 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR schedule(c, "@every 24h", func() { _ = userAuthRepo.RemoveDeletedTokens(timeUtil.MicrosecondBeforeDays(30)) - _ = castDb.DeleteOldCodes(context.Background(), timeUtil.MicrosecondBeforeDays(1)) + _ = castDb.DeleteOldSessions(context.Background(), timeUtil.MicrosecondBeforeDays(7)) _ = publicCollectionRepo.CleanupAccessHistory(context.Background()) }) @@ -885,6 +897,8 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR }) schedule(c, "@every 30m", func() { + // delete unclaimed codes older than 60 minutes + _ = castDb.DeleteUnclaimedCodes(context.Background(), timeUtil.MicrosecondsBeforeMinutes(60)) dataCleanupCtrl.DeleteDataCron() }) diff --git a/server/docs/docker.md b/server/docs/docker.md index d8f3db9137..a328d734bd 100644 --- a/server/docs/docker.md +++ b/server/docs/docker.md @@ -45,7 +45,7 @@ require you to clone the repository or build any images. + image: ghcr.io/ente-io/server ``` -4. Create an (empty) configuration file. Yyou can later put your custom +4. Create an (empty) configuration file. You can later put your custom configuration in this if needed. ```sh diff --git a/server/ente/cast/entity.go b/server/ente/cast/entity.go index deffa90b97..a54d109fcc 100644 --- a/server/ente/cast/entity.go +++ b/server/ente/cast/entity.go @@ -9,8 +9,7 @@ type CastRequest struct { } type RegisterDeviceRequest struct { - DeviceCode *string `json:"deviceCode"` - PublicKey string `json:"publicKey" binding:"required"` + PublicKey string `json:"publicKey" binding:"required"` } type AuthContext struct { diff --git a/server/ente/collection.go b/server/ente/collection.go index 763d07b9b6..71b4c50ac2 100644 --- a/server/ente/collection.go +++ b/server/ente/collection.go @@ -103,6 +103,17 @@ type AddFilesRequest struct { Files []CollectionFileItem `json:"files" binding:"required"` } +// CopyFileSyncRequest is request object for creating copy of CollectionFileItems, and those copy to the destination collection +type CopyFileSyncRequest struct { + SrcCollectionID int64 `json:"srcCollectionID" binding:"required"` + DstCollection int64 `json:"dstCollectionID" binding:"required"` + CollectionFileItems []CollectionFileItem `json:"files" binding:"required"` +} + +type CopyResponse struct { + OldToNewFileIDMap map[int64]int64 `json:"oldToNewFileIDMap"` +} + // RemoveFilesRequest represents a request to remove files from a collection type RemoveFilesRequest struct { CollectionID int64 `json:"collectionID" binding:"required"` diff --git a/server/ente/errors.go b/server/ente/errors.go index 49aed71518..96e7bd4a1e 100644 --- a/server/ente/errors.go +++ b/server/ente/errors.go @@ -149,6 +149,12 @@ var ErrCastPermissionDenied = ApiError{ HttpStatusCode: http.StatusForbidden, } +var ErrCastIPMismatch = ApiError{ + Code: "CAST_IP_MISMATCH", + Message: "IP mismatch", + HttpStatusCode: http.StatusForbidden, +} + type ErrorCode string const ( diff --git a/server/ente/remotestore.go b/server/ente/remotestore.go index 02eb932326..8f518f2a14 100644 --- a/server/ente/remotestore.go +++ b/server/ente/remotestore.go @@ -13,3 +13,66 @@ type UpdateKeyValueRequest struct { Key string `json:"key" binding:"required"` Value string `json:"value" binding:"required"` } + +type AdminUpdateKeyValueRequest struct { + UserID int64 `json:"userID" binding:"required"` + Key string `json:"key" binding:"required"` + Value string `json:"value" binding:"required"` +} + +type FeatureFlagResponse struct { + EnableStripe bool `json:"enableStripe"` + // If true, the mobile client will stop using CF worker to download files + DisableCFWorker bool `json:"disableCFWorker"` + MapEnabled bool `json:"mapEnabled"` + FaceSearchEnabled bool `json:"faceSearchEnabled"` + PassKeyEnabled bool `json:"passKeyEnabled"` + RecoveryKeyVerified bool `json:"recoveryKeyVerified"` + InternalUser bool `json:"internalUser"` + BetaUser bool `json:"betaUser"` +} + +type FlagKey string + +const ( + RecoveryKeyVerified FlagKey = "recoveryKeyVerified" + MapEnabled FlagKey = "mapEnabled" + FaceSearchEnabled FlagKey = "faceSearchEnabled" + PassKeyEnabled FlagKey = "passKeyEnabled" + IsInternalUser FlagKey = "internalUser" + IsBetaUser FlagKey = "betaUser" +) + +func (k FlagKey) String() string { + return string(k) +} + +// UserEditable returns true if the key is user editable +func (k FlagKey) UserEditable() bool { + switch k { + case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled: + return true + default: + return false + } +} + +func (k FlagKey) IsAdminEditable() bool { + switch k { + case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled: + return false + case IsInternalUser, IsBetaUser, PassKeyEnabled: + return true + default: + return true + } +} + +func (k FlagKey) IsBoolType() bool { + switch k { + case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled, IsInternalUser, IsBetaUser: + return true + default: + return false + } +} diff --git a/server/migrations/84_add_cast_column.down.sql b/server/migrations/84_add_cast_column.down.sql new file mode 100644 index 0000000000..c08fed94e6 --- /dev/null +++ b/server/migrations/84_add_cast_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE casting DROP COLUMN IF EXISTS ip; \ No newline at end of file diff --git a/server/migrations/84_add_cast_column.up.sql b/server/migrations/84_add_cast_column.up.sql new file mode 100644 index 0000000000..828c2e57c2 --- /dev/null +++ b/server/migrations/84_add_cast_column.up.sql @@ -0,0 +1,5 @@ +--- Delete all rows from casting table and add a non-nullable column called ip +BEGIN; +DELETE FROM casting; +ALTER TABLE casting ADD COLUMN ip text NOT NULL; +COMMIT; diff --git a/server/pkg/api/admin.go b/server/pkg/api/admin.go index b153e19bb1..0b6ac18ef9 100644 --- a/server/pkg/api/admin.go +++ b/server/pkg/api/admin.go @@ -3,6 +3,7 @@ package api import ( "errors" "fmt" + "github.com/ente-io/museum/pkg/controller/remotestore" "net/http" "strconv" "strings" @@ -43,6 +44,7 @@ type AdminHandler struct { BillingController *controller.BillingController UserController *user.UserController FamilyController *family.Controller + RemoteStoreController *remotestore.Controller ObjectCleanupController *controller.ObjectCleanupController MailingListsController *controller.MailingListsController DiscordController *discord.DiscordController @@ -260,6 +262,32 @@ func (h *AdminHandler) RemovePasskeys(c *gin.Context) { c.JSON(http.StatusOK, gin.H{}) } +func (h *AdminHandler) UpdateFeatureFlag(c *gin.Context) { + var request ente.AdminUpdateKeyValueRequest + if err := c.ShouldBindJSON(&request); err != nil { + handler.Error(c, stacktrace.Propagate(ente.ErrBadRequest, "Bad request")) + return + } + go h.DiscordController.NotifyAdminAction( + fmt.Sprintf("Admin (%d) updating flag:%s to val:%s for %d", auth.GetUserID(c.Request.Header), request.Key, request.Value, request.UserID)) + + logger := logrus.WithFields(logrus.Fields{ + "user_id": request.UserID, + "admin_id": auth.GetUserID(c.Request.Header), + "req_id": requestid.Get(c), + "req_ctx": "update_feature_flag", + }) + logger.Info("Start update") + err := h.RemoteStoreController.AdminInsertOrUpdate(c, request) + if err != nil { + logger.WithError(err).Error("Failed to update flag") + handler.Error(c, stacktrace.Propagate(err, "")) + return + } + logger.Info("successfully updated flag") + c.JSON(http.StatusOK, gin.H{}) +} + func (h *AdminHandler) CloseFamily(c *gin.Context) { var request ente.AdminOpsForUserRequest diff --git a/server/pkg/api/cast.go b/server/pkg/api/cast.go index 62d5c94784..9012624d32 100644 --- a/server/pkg/api/cast.go +++ b/server/pkg/api/cast.go @@ -1,16 +1,16 @@ package api import ( - entity "github.com/ente-io/museum/ente/cast" - "github.com/ente-io/museum/pkg/controller/cast" - "net/http" - "strconv" - "github.com/ente-io/museum/ente" + entity "github.com/ente-io/museum/ente/cast" "github.com/ente-io/museum/pkg/controller" + "github.com/ente-io/museum/pkg/controller/cast" "github.com/ente-io/museum/pkg/utils/handler" "github.com/ente-io/stacktrace" "github.com/gin-gonic/gin" + "net/http" + "strconv" + "strings" ) // CastHandler exposes request handlers for publicly accessible collections @@ -126,7 +126,7 @@ func (h *CastHandler) GetDiff(c *gin.Context) { } func getDeviceCode(c *gin.Context) string { - return c.Param("deviceCode") + return strings.ToUpper(c.Param("deviceCode")) } func (h *CastHandler) getFileForType(c *gin.Context, objectType ente.ObjectType) { diff --git a/server/pkg/api/file.go b/server/pkg/api/file.go index a65b9e3833..a253c71c2a 100644 --- a/server/pkg/api/file.go +++ b/server/pkg/api/file.go @@ -1,6 +1,8 @@ package api import ( + "fmt" + "github.com/ente-io/museum/pkg/controller/file_copy" "net/http" "os" "strconv" @@ -20,11 +22,13 @@ import ( // FileHandler exposes request handlers for all encrypted file related requests type FileHandler struct { - Controller *controller.FileController + Controller *controller.FileController + FileCopyCtrl *file_copy.FileCopyController } // DefaultMaxBatchSize is the default maximum API batch size unless specified otherwise const DefaultMaxBatchSize = 1000 +const DefaultCopyBatchSize = 100 // CreateOrUpdate creates an entry for a file func (h *FileHandler) CreateOrUpdate(c *gin.Context) { @@ -58,6 +62,25 @@ func (h *FileHandler) CreateOrUpdate(c *gin.Context) { c.JSON(http.StatusOK, response) } +// CopyFiles copies files that are owned by another user +func (h *FileHandler) CopyFiles(c *gin.Context) { + var req ente.CopyFileSyncRequest + if err := c.ShouldBindJSON(&req); err != nil { + handler.Error(c, stacktrace.Propagate(err, "")) + return + } + if len(req.CollectionFileItems) > DefaultCopyBatchSize { + handler.Error(c, stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("more than %d items", DefaultCopyBatchSize)), "")) + return + } + response, err := h.FileCopyCtrl.CopyFiles(c, req) + if err != nil { + handler.Error(c, stacktrace.Propagate(err, "")) + return + } + c.JSON(http.StatusOK, response) +} + // Update updates already existing file func (h *FileHandler) Update(c *gin.Context) { enteApp := auth.GetApp(c) diff --git a/server/pkg/api/remotestore.go b/server/pkg/api/remotestore.go index ea6e621a31..9f03554de8 100644 --- a/server/pkg/api/remotestore.go +++ b/server/pkg/api/remotestore.go @@ -49,3 +49,13 @@ func (h *RemoteStoreHandler) GetKey(c *gin.Context) { } c.JSON(http.StatusOK, resp) } + +// GetFeatureFlags returns all the feature flags and value for given user +func (h *RemoteStoreHandler) GetFeatureFlags(c *gin.Context) { + resp, err := h.Controller.GetFeatureFlags(c) + if err != nil { + handler.Error(c, stacktrace.Propagate(err, "failed to get feature flags")) + return + } + c.JSON(http.StatusOK, resp) +} diff --git a/server/pkg/controller/cast/controller.go b/server/pkg/controller/cast/controller.go index 3b76420cc9..2bb002f81d 100644 --- a/server/pkg/controller/cast/controller.go +++ b/server/pkg/controller/cast/controller.go @@ -6,8 +6,10 @@ import ( "github.com/ente-io/museum/pkg/controller/access" castRepo "github.com/ente-io/museum/pkg/repo/cast" "github.com/ente-io/museum/pkg/utils/auth" + "github.com/ente-io/museum/pkg/utils/network" "github.com/ente-io/stacktrace" "github.com/gin-gonic/gin" + "github.com/sirupsen/logrus" ) type Controller struct { @@ -24,12 +26,23 @@ func NewController(castRepo *castRepo.Repository, } } -func (c *Controller) RegisterDevice(ctx context.Context, request *cast.RegisterDeviceRequest) (string, error) { - return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey) +func (c *Controller) RegisterDevice(ctx *gin.Context, request *cast.RegisterDeviceRequest) (string, error) { + return c.CastRepo.AddCode(ctx, request.PublicKey, network.GetClientIP(ctx)) } -func (c *Controller) GetPublicKey(ctx context.Context, deviceCode string) (string, error) { - return c.CastRepo.GetPubKey(ctx, deviceCode) +func (c *Controller) GetPublicKey(ctx *gin.Context, deviceCode string) (string, error) { + pubKey, ip, err := c.CastRepo.GetPubKeyAndIp(ctx, deviceCode) + if err != nil { + return "", stacktrace.Propagate(err, "") + } + if ip != network.GetClientIP(ctx) { + logrus.WithFields(logrus.Fields{ + "deviceCode": deviceCode, + "ip": ip, + "clientIP": network.GetClientIP(ctx), + }).Warn("GetPublicKey: IP mismatch") + } + return pubKey, nil } func (c *Controller) GetEncCastData(ctx context.Context, deviceCode string) (*string, error) { diff --git a/server/pkg/controller/collection.go b/server/pkg/controller/collection.go index 15c06fa331..911afc6d77 100644 --- a/server/pkg/controller/collection.go +++ b/server/pkg/controller/collection.go @@ -464,6 +464,41 @@ func (c *CollectionController) isRemoveAllowed(ctx *gin.Context, actorUserID int return nil } +func (c *CollectionController) IsCopyAllowed(ctx *gin.Context, actorUserID int64, req ente.CopyFileSyncRequest) error { + // verify that srcCollectionID is accessible by actorUserID + if _, err := c.AccessCtrl.GetCollection(ctx, &access.GetCollectionParams{ + CollectionID: req.SrcCollectionID, + ActorUserID: actorUserID, + }); err != nil { + return stacktrace.Propagate(err, "failed to verify srcCollection access") + } + // verify that dstCollectionID is owned by actorUserID + if _, err := c.AccessCtrl.GetCollection(ctx, &access.GetCollectionParams{ + CollectionID: req.DstCollection, + ActorUserID: actorUserID, + VerifyOwner: true, + }); err != nil { + return stacktrace.Propagate(err, "failed to ownership of the dstCollection access") + } + // verify that all FileIDs exists in the srcCollection + fileIDs := make([]int64, len(req.CollectionFileItems)) + for idx, file := range req.CollectionFileItems { + fileIDs[idx] = file.ID + } + if err := c.CollectionRepo.VerifyAllFileIDsExistsInCollection(ctx, req.SrcCollectionID, fileIDs); err != nil { + return stacktrace.Propagate(err, "failed to verify fileIDs in srcCollection") + } + dsMap, err := c.FileRepo.GetOwnerToFileIDsMap(ctx, fileIDs) + if err != nil { + return err + } + // verify that none of the file belongs to actorUserID + if _, ok := dsMap[actorUserID]; ok { + return ente.NewBadRequestWithMessage("can not copy files owned by actor") + } + return nil +} + // GetDiffV2 returns the changes in user's collections since a timestamp, along with hasMore bool flag. func (c *CollectionController) GetDiffV2(ctx *gin.Context, cID int64, userID int64, sinceTime int64) ([]ente.File, bool, error) { reqContextLogger := log.WithFields(log.Fields{ diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go index 12d173e252..e91d299f15 100644 --- a/server/pkg/controller/file.go +++ b/server/pkg/controller/file.go @@ -59,25 +59,51 @@ const ( DeletedObjectQueueLock = "deleted_objects_queue_lock" ) -// Create adds an entry for a file in the respective tables -func (c *FileController) Create(ctx context.Context, userID int64, file ente.File, userAgent string, app ente.App) (ente.File, error) { +func (c *FileController) validateFileCreateOrUpdateReq(userID int64, file ente.File) error { objectPathPrefix := strconv.FormatInt(userID, 10) + "/" if !strings.HasPrefix(file.File.ObjectKey, objectPathPrefix) || !strings.HasPrefix(file.Thumbnail.ObjectKey, objectPathPrefix) { - return file, stacktrace.Propagate(ente.ErrBadRequest, "Incorrect object key reported") + return stacktrace.Propagate(ente.ErrBadRequest, "Incorrect object key reported") } - collection, err := c.CollectionRepo.Get(file.CollectionID) + isCreateFileReq := file.ID == 0 + // Check for attributes for fileCreation. We don't send key details on update + if isCreateFileReq { + if file.EncryptedKey == "" || file.KeyDecryptionNonce == "" { + return stacktrace.Propagate(ente.ErrBadRequest, "EncryptedKey and KeyDecryptionNonce are required") + } + } + if file.File.DecryptionHeader == "" || file.Thumbnail.DecryptionHeader == "" { + return stacktrace.Propagate(ente.ErrBadRequest, "DecryptionHeader for file & thumb is required") + } + if file.UpdationTime == 0 { + return stacktrace.Propagate(ente.ErrBadRequest, "UpdationTime is required") + } + if isCreateFileReq { + collection, err := c.CollectionRepo.Get(file.CollectionID) + if err != nil { + return stacktrace.Propagate(err, "") + } + // Verify that user owns the collection. + // Warning: Do not remove this check + if collection.Owner.ID != userID { + return stacktrace.Propagate(ente.ErrPermissionDenied, "collection doesn't belong to user") + } + if collection.IsDeleted { + return stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted") + } + if file.OwnerID != userID { + return stacktrace.Propagate(ente.ErrPermissionDenied, "file ownerID doesn't match with userID") + } + } + + return nil +} + +// Create adds an entry for a file in the respective tables +func (c *FileController) Create(ctx context.Context, userID int64, file ente.File, userAgent string, app ente.App) (ente.File, error) { + err := c.validateFileCreateOrUpdateReq(userID, file) if err != nil { return file, stacktrace.Propagate(err, "") } - // Verify that user owns the collection. - // Warning: Do not remove this check - if collection.Owner.ID != userID || file.OwnerID != userID { - return file, stacktrace.Propagate(ente.ErrPermissionDenied, "") - } - if collection.IsDeleted { - return file, stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted") - } - hotDC := c.S3Config.GetHotDataCenter() // sizeOf will do also HEAD check to ensure that the object exists in the // current hot DC @@ -115,7 +141,7 @@ func (c *FileController) Create(ctx context.Context, userID int64, file ente.Fil // all iz well var usage int64 - file, usage, err = c.FileRepo.Create(file, fileSize, thumbnailSize, fileSize+thumbnailSize, collection.Owner.ID, app) + file, usage, err = c.FileRepo.Create(file, fileSize, thumbnailSize, fileSize+thumbnailSize, userID, app) if err != nil { if err == ente.ErrDuplicateFileObjectFound || err == ente.ErrDuplicateThumbnailObjectFound { var existing ente.File @@ -144,9 +170,9 @@ func (c *FileController) Create(ctx context.Context, userID int64, file ente.Fil // Update verifies permissions and updates the specified file func (c *FileController) Update(ctx context.Context, userID int64, file ente.File, app ente.App) (ente.UpdateFileResponse, error) { var response ente.UpdateFileResponse - objectPathPrefix := strconv.FormatInt(userID, 10) + "/" - if !strings.HasPrefix(file.File.ObjectKey, objectPathPrefix) || !strings.HasPrefix(file.Thumbnail.ObjectKey, objectPathPrefix) { - return response, stacktrace.Propagate(ente.ErrBadRequest, "Incorrect object key reported") + err := c.validateFileCreateOrUpdateReq(userID, file) + if err != nil { + return response, stacktrace.Propagate(err, "") } ownerID, err := c.FileRepo.GetOwnerID(file.ID) if err != nil { diff --git a/server/pkg/controller/file_copy/file_copy.go b/server/pkg/controller/file_copy/file_copy.go new file mode 100644 index 0000000000..afab10efee --- /dev/null +++ b/server/pkg/controller/file_copy/file_copy.go @@ -0,0 +1,206 @@ +package file_copy + +import ( + "fmt" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/ente-io/museum/ente" + "github.com/ente-io/museum/pkg/controller" + "github.com/ente-io/museum/pkg/repo" + "github.com/ente-io/museum/pkg/utils/auth" + "github.com/ente-io/museum/pkg/utils/s3config" + enteTime "github.com/ente-io/museum/pkg/utils/time" + "github.com/gin-contrib/requestid" + "github.com/gin-gonic/gin" + "github.com/sirupsen/logrus" + "golang.org/x/sync/errgroup" + "sync" + "time" +) + +const () + +type FileCopyController struct { + S3Config *s3config.S3Config + FileController *controller.FileController + FileRepo *repo.FileRepository + CollectionCtrl *controller.CollectionController + ObjectRepo *repo.ObjectRepository +} + +type copyS3ObjectReq struct { + SourceS3Object ente.S3ObjectKey + DestObjectKey string +} + +type fileCopyInternal struct { + SourceFile ente.File + DestCollectionID int64 + // The FileKey is encrypted with the destination collection's key + EncryptedFileKey string + EncryptedFileKeyNonce string + FileCopyReq *copyS3ObjectReq + ThumbCopyReq *copyS3ObjectReq +} + +func (fci fileCopyInternal) newFile(ownedID int64) ente.File { + newFileAttributes := fci.SourceFile.File + newFileAttributes.ObjectKey = fci.FileCopyReq.DestObjectKey + newThumbAttributes := fci.SourceFile.Thumbnail + newThumbAttributes.ObjectKey = fci.ThumbCopyReq.DestObjectKey + return ente.File{ + OwnerID: ownedID, + CollectionID: fci.DestCollectionID, + EncryptedKey: fci.EncryptedFileKey, + KeyDecryptionNonce: fci.EncryptedFileKeyNonce, + File: newFileAttributes, + Thumbnail: newThumbAttributes, + Metadata: fci.SourceFile.Metadata, + UpdationTime: enteTime.Microseconds(), + IsDeleted: false, + } +} + +func (fc *FileCopyController) CopyFiles(c *gin.Context, req ente.CopyFileSyncRequest) (*ente.CopyResponse, error) { + userID := auth.GetUserID(c.Request.Header) + app := auth.GetApp(c) + logger := logrus.WithFields(logrus.Fields{"req_id": requestid.Get(c), "user_id": userID}) + err := fc.CollectionCtrl.IsCopyAllowed(c, userID, req) + if err != nil { + return nil, err + } + fileIDs := make([]int64, 0, len(req.CollectionFileItems)) + fileToCollectionFileMap := make(map[int64]*ente.CollectionFileItem, len(req.CollectionFileItems)) + for i := range req.CollectionFileItems { + item := &req.CollectionFileItems[i] + fileToCollectionFileMap[item.ID] = item + fileIDs = append(fileIDs, item.ID) + } + s3ObjectsToCopy, err := fc.ObjectRepo.GetObjectsForFileIDs(fileIDs) + if err != nil { + return nil, err + } + // note: this assumes that preview existingFilesToCopy for videos are not tracked inside the object_keys table + if len(s3ObjectsToCopy) != 2*len(fileIDs) { + return nil, ente.NewInternalError(fmt.Sprintf("expected %d objects, got %d", 2*len(fileIDs), len(s3ObjectsToCopy))) + } + // todo:(neeraj) if the total size is greater than 1GB, do an early check if the user can upload the existingFilesToCopy + var totalSize int64 + for _, obj := range s3ObjectsToCopy { + totalSize += obj.FileSize + } + logger.WithField("totalSize", totalSize).Info("total size of existingFilesToCopy to copy") + + // request the uploadUrls using existing method. This is to ensure that orphan objects are automatically cleaned up + // todo:(neeraj) optimize this method by removing the need for getting a signed url for each object + uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app) + if err != nil { + return nil, err + } + existingFilesToCopy, err := fc.FileRepo.GetFileAttributesForCopy(fileIDs) + if err != nil { + return nil, err + } + if len(existingFilesToCopy) != len(fileIDs) { + return nil, ente.NewInternalError(fmt.Sprintf("expected %d existingFilesToCopy, got %d", len(fileIDs), len(existingFilesToCopy))) + } + fileOGS3Object := make(map[int64]*copyS3ObjectReq) + fileThumbS3Object := make(map[int64]*copyS3ObjectReq) + for i, s3Obj := range s3ObjectsToCopy { + if s3Obj.Type == ente.FILE { + fileOGS3Object[s3Obj.FileID] = ©S3ObjectReq{ + SourceS3Object: s3Obj, + DestObjectKey: uploadUrls[i].ObjectKey, + } + } else if s3Obj.Type == ente.THUMBNAIL { + fileThumbS3Object[s3Obj.FileID] = ©S3ObjectReq{ + SourceS3Object: s3Obj, + DestObjectKey: uploadUrls[i].ObjectKey, + } + } else { + return nil, ente.NewInternalError(fmt.Sprintf("unexpected object type %s", s3Obj.Type)) + } + } + fileCopyList := make([]fileCopyInternal, 0, len(existingFilesToCopy)) + for i := range existingFilesToCopy { + file := existingFilesToCopy[i] + collectionItem := fileToCollectionFileMap[file.ID] + if collectionItem.ID != file.ID { + return nil, ente.NewInternalError(fmt.Sprintf("expected collectionItem.ID %d, got %d", file.ID, collectionItem.ID)) + } + fileCopy := fileCopyInternal{ + SourceFile: file, + DestCollectionID: req.DstCollection, + EncryptedFileKey: fileToCollectionFileMap[file.ID].EncryptedKey, + EncryptedFileKeyNonce: fileToCollectionFileMap[file.ID].KeyDecryptionNonce, + FileCopyReq: fileOGS3Object[file.ID], + ThumbCopyReq: fileThumbS3Object[file.ID], + } + fileCopyList = append(fileCopyList, fileCopy) + } + oldToNewFileIDMap := make(map[int64]int64) + var wg sync.WaitGroup + errChan := make(chan error, len(fileCopyList)) + + for _, fileCopy := range fileCopyList { + wg.Add(1) + go func(fileCopy fileCopyInternal) { + defer wg.Done() + newFile, err := fc.createCopy(c, fileCopy, userID, app) + if err != nil { + errChan <- err + return + } + oldToNewFileIDMap[fileCopy.SourceFile.ID] = newFile.ID + }(fileCopy) + } + + // Wait for all goroutines to finish + wg.Wait() + + // Close the error channel and check if there were any errors + close(errChan) + if err, ok := <-errChan; ok { + return nil, err + } + return &ente.CopyResponse{OldToNewFileIDMap: oldToNewFileIDMap}, nil +} + +func (fc *FileCopyController) createCopy(c *gin.Context, fcInternal fileCopyInternal, userID int64, app ente.App) (*ente.File, error) { + // using HotS3Client copy the File and Thumbnail + s3Client := fc.S3Config.GetHotS3Client() + hotBucket := fc.S3Config.GetHotBucket() + g := new(errgroup.Group) + g.Go(func() error { + return copyS3Object(s3Client, hotBucket, fcInternal.FileCopyReq) + }) + g.Go(func() error { + return copyS3Object(s3Client, hotBucket, fcInternal.ThumbCopyReq) + }) + if err := g.Wait(); err != nil { + return nil, err + } + file := fcInternal.newFile(userID) + newFile, err := fc.FileController.Create(c, userID, file, "", app) + if err != nil { + return nil, err + } + return &newFile, nil +} + +// Helper function for S3 object copying. +func copyS3Object(s3Client *s3.S3, bucket *string, req *copyS3ObjectReq) error { + copySource := fmt.Sprintf("%s/%s", *bucket, req.SourceS3Object.ObjectKey) + copyInput := &s3.CopyObjectInput{ + Bucket: bucket, + CopySource: ©Source, + Key: &req.DestObjectKey, + } + start := time.Now() + _, err := s3Client.CopyObject(copyInput) + elapsed := time.Since(start) + if err != nil { + return fmt.Errorf("failed to copy (%s) from %s to %s: %w", req.SourceS3Object.Type, copySource, req.DestObjectKey, err) + } + logrus.WithField("duration", elapsed).WithField("size", req.SourceS3Object.FileSize).Infof("copied (%s) from %s to %s", req.SourceS3Object.Type, copySource, req.DestObjectKey) + return nil +} diff --git a/server/pkg/controller/remotestore/controller.go b/server/pkg/controller/remotestore/controller.go index d41bf7e5f7..bf8e4acfcc 100644 --- a/server/pkg/controller/remotestore/controller.go +++ b/server/pkg/controller/remotestore/controller.go @@ -3,6 +3,7 @@ package remotestore import ( "database/sql" "errors" + "fmt" "github.com/ente-io/museum/ente" "github.com/ente-io/museum/pkg/repo/remotestore" @@ -16,12 +17,22 @@ type Controller struct { Repo *remotestore.Repository } -// Insert of update the key's value +// InsertOrUpdate the key's value func (c *Controller) InsertOrUpdate(ctx *gin.Context, request ente.UpdateKeyValueRequest) error { + if err := _validateRequest(request.Key, request.Value, false); err != nil { + return err + } userID := auth.GetUserID(ctx.Request.Header) return c.Repo.InsertOrUpdate(ctx, userID, request.Key, request.Value) } +func (c *Controller) AdminInsertOrUpdate(ctx *gin.Context, request ente.AdminUpdateKeyValueRequest) error { + if err := _validateRequest(request.Key, request.Value, true); err != nil { + return err + } + return c.Repo.InsertOrUpdate(ctx, request.UserID, request.Key, request.Value) +} + func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetValueResponse, error) { userID := auth.GetUserID(ctx.Request.Header) value, err := c.Repo.GetValue(ctx, userID, req.Key) @@ -34,3 +45,50 @@ func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetV } return &ente.GetValueResponse{Value: value}, nil } + +func (c *Controller) GetFeatureFlags(ctx *gin.Context) (*ente.FeatureFlagResponse, error) { + userID := auth.GetUserID(ctx.Request.Header) + values, err := c.Repo.GetAllValues(ctx, userID) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + response := &ente.FeatureFlagResponse{ + EnableStripe: true, // enable stripe for all + DisableCFWorker: false, + } + for key, value := range values { + flag := ente.FlagKey(key) + if !flag.IsBoolType() { + continue + } + switch flag { + case ente.RecoveryKeyVerified: + response.RecoveryKeyVerified = value == "true" + case ente.MapEnabled: + response.MapEnabled = value == "true" + case ente.FaceSearchEnabled: + response.FaceSearchEnabled = value == "true" + case ente.PassKeyEnabled: + response.PassKeyEnabled = value == "true" + case ente.IsInternalUser: + response.InternalUser = value == "true" + case ente.IsBetaUser: + response.BetaUser = value == "true" + } + } + return response, nil +} + +func _validateRequest(key, value string, byAdmin bool) error { + flag := ente.FlagKey(key) + if !flag.UserEditable() && !byAdmin { + return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not user editable", key)), "key not user editable") + } + if byAdmin && !flag.IsAdminEditable() { + return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not admin editable", key)), "key not admin editable") + } + if flag.IsBoolType() && value != "true" && value != "false" { + return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("value %s is not allowed", value)), "value not allowed") + } + return nil +} diff --git a/server/pkg/controller/storagebonus/referral.go b/server/pkg/controller/storagebonus/referral.go index b452484f41..5bdd951f8d 100644 --- a/server/pkg/controller/storagebonus/referral.go +++ b/server/pkg/controller/storagebonus/referral.go @@ -3,7 +3,7 @@ package storagebonus import ( "database/sql" "errors" - "fmt" + "github.com/ente-io/museum/pkg/utils/random" "github.com/ente-io/museum/ente" entity "github.com/ente-io/museum/ente/storagebonus" @@ -119,7 +119,7 @@ func (c *Controller) GetOrCreateReferralCode(ctx *gin.Context, userID int64) (*s if !errors.Is(err, sql.ErrNoRows) { return nil, stacktrace.Propagate(err, "failed to get storagebonus code") } - code, err := generateAlphaNumString(codeLength) + code, err := random.GenerateAlphaNumString(codeLength) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -131,30 +131,3 @@ func (c *Controller) GetOrCreateReferralCode(ctx *gin.Context, userID int64) (*s } return referralCode, nil } - -// generateAlphaNumString returns AlphaNumeric code of given length -// which exclude number 0 and letter O. The code always starts with an -// alphabet -func generateAlphaNumString(length int) (string, error) { - // Define the alphabet and numbers to be used in the string. - alphabet := "ABCDEFGHIJKLMNPQRSTUVWXYZ" - // Define the alphabet and numbers to be used in the string. - alphaNum := fmt.Sprintf("%s123456789", alphabet) - // Allocate a byte slice with the desired length. - result := make([]byte, length) - // Generate the first letter as an alphabet. - r0, err := auth.GenerateRandomInt(int64(len(alphabet))) - if err != nil { - return "", stacktrace.Propagate(err, "") - } - result[0] = alphabet[r0] - // Generate the remaining characters as alphanumeric. - for i := 1; i < length; i++ { - ri, err := auth.GenerateRandomInt(int64(len(alphaNum))) - if err != nil { - return "", stacktrace.Propagate(err, "") - } - result[i] = alphaNum[ri] - } - return string(result), nil -} diff --git a/server/pkg/middleware/rate_limit.go b/server/pkg/middleware/rate_limit.go index 08e0f00b66..076c050c9f 100644 --- a/server/pkg/middleware/rate_limit.go +++ b/server/pkg/middleware/rate_limit.go @@ -150,6 +150,7 @@ func (r *RateLimitMiddleware) getLimiter(reqPath string, reqMethod string) *limi reqPath == "/public-collection/verify-password" || reqPath == "/family/accept-invite" || reqPath == "/users/srp/attributes" || + (reqPath == "/cast/device-info/" && reqMethod == "POST") || reqPath == "/users/srp/verify-session" || reqPath == "/family/invite-info/:token" || reqPath == "/family/add-member" || diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go index 306c1d481c..2f4446c9d0 100644 --- a/server/pkg/repo/cast/repo.go +++ b/server/pkg/repo/cast/repo.go @@ -7,25 +7,19 @@ import ( "github.com/ente-io/museum/pkg/utils/random" "github.com/ente-io/stacktrace" "github.com/google/uuid" - "strings" + log "github.com/sirupsen/logrus" ) type Repository struct { DB *sql.DB } -func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string) (string, error) { - var codeValue string - var err error - if code == nil || *code == "" { - codeValue, err = random.GenerateSixDigitOtp() - if err != nil { - return "", stacktrace.Propagate(err, "") - } - } else { - codeValue = strings.TrimSpace(*code) +func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (string, error) { + codeValue, err := random.GenerateAlphaNumString(6) + if err != nil { + return "", err } - _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id) VALUES ($1, $2, $3)", codeValue, pubKey, uuid.New()) + _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip) if err != nil { return "", err } @@ -38,17 +32,17 @@ func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code return err } -func (r *Repository) GetPubKey(ctx context.Context, code string) (string, error) { - var pubKey string - row := r.DB.QueryRowContext(ctx, "SELECT public_key FROM casting WHERE code = $1 and is_deleted=false", code) - err := row.Scan(&pubKey) +func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) { + var pubKey, ip string + row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code) + err := row.Scan(&pubKey, &ip) if err != nil { if err == sql.ErrNoRows { - return "", ente.ErrNotFoundError.NewErr("code not found") + return "", "", ente.ErrNotFoundError.NewErr("code not found") } - return "", err + return "", "", err } - return pubKey, nil + return pubKey, ip, nil } func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) { @@ -89,12 +83,27 @@ func (r *Repository) UpdateLastUsedAtForToken(ctx context.Context, token string) return nil } -// DeleteOldCodes that are not associated with a collection and are older than the given time -func (r *Repository) DeleteOldCodes(ctx context.Context, expirtyTime int64) error { - _, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expirtyTime) +// DeleteUnclaimedCodes that are not associated with a collection and are older than the given time +func (r *Repository) DeleteUnclaimedCodes(ctx context.Context, expiryTime int64) error { + result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expiryTime) if err != nil { return err } + if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 { + log.Infof("Deleted %d unclaimed codes", rows) + } + return nil +} + +// DeleteOldSessions where last used at is older than the given time +func (r *Repository) DeleteOldSessions(ctx context.Context, expiryTime int64) error { + result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1", expiryTime) + if err != nil { + return err + } + if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 { + log.Infof("Deleted %d old sessions", rows) + } return nil } diff --git a/server/pkg/repo/collection.go b/server/pkg/repo/collection.go index 16ae853244..9310f33d47 100644 --- a/server/pkg/repo/collection.go +++ b/server/pkg/repo/collection.go @@ -374,6 +374,30 @@ func (repo *CollectionRepository) DoesFileExistInCollections(fileID int64, cIDs return exists, stacktrace.Propagate(err, "") } +// VerifyAllFileIDsExistsInCollection returns error if the fileIDs don't exist in the collection +func (repo *CollectionRepository) VerifyAllFileIDsExistsInCollection(ctx context.Context, cID int64, fileIDs []int64) error { + fileIdMap := make(map[int64]bool) + rows, err := repo.DB.QueryContext(ctx, `SELECT file_id FROM collection_files WHERE collection_id = $1 AND is_deleted = $2 AND file_id = ANY ($3)`, + cID, false, pq.Array(fileIDs)) + if err != nil { + return stacktrace.Propagate(err, "") + } + for rows.Next() { + var fileID int64 + if err := rows.Scan(&fileID); err != nil { + return stacktrace.Propagate(err, "") + } + fileIdMap[fileID] = true + } + // find fileIds that are not present in the collection + for _, fileID := range fileIDs { + if _, ok := fileIdMap[fileID]; !ok { + return stacktrace.Propagate(fmt.Errorf("fileID %d not found in collection %d", fileID, cID), "") + } + } + return nil +} + // GetCollectionShareeRole returns true if the collection is shared with the user func (repo *CollectionRepository) GetCollectionShareeRole(cID int64, userID int64) (*ente.CollectionParticipantRole, error) { var role *ente.CollectionParticipantRole diff --git a/server/pkg/repo/file.go b/server/pkg/repo/file.go index ffa7ea048e..eafc7b570c 100644 --- a/server/pkg/repo/file.go +++ b/server/pkg/repo/file.go @@ -612,6 +612,24 @@ func (repo *FileRepository) GetFileAttributesFromObjectKey(objectKey string) (en return file, nil } +func (repo *FileRepository) GetFileAttributesForCopy(fileIDs []int64) ([]ente.File, error) { + result := make([]ente.File, 0) + rows, err := repo.DB.Query(`SELECT file_id, owner_id, file_decryption_header, thumbnail_decryption_header, metadata_decryption_header, encrypted_metadata, pub_magic_metadata FROM files WHERE file_id = ANY($1)`, pq.Array(fileIDs)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + defer rows.Close() + for rows.Next() { + var file ente.File + err := rows.Scan(&file.ID, &file.OwnerID, &file.File.DecryptionHeader, &file.Thumbnail.DecryptionHeader, &file.Metadata.DecryptionHeader, &file.Metadata.EncryptedData, &file.PubicMagicMetadata) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + result = append(result, file) + } + return result, nil +} + // GetUsage gets the Storage usage of a user // Deprecated: GetUsage is deprecated, use UsageRepository.GetUsage func (repo *FileRepository) GetUsage(userID int64) (int64, error) { diff --git a/server/pkg/repo/object.go b/server/pkg/repo/object.go index f0cc5c6cfb..fdbbbf52c0 100644 --- a/server/pkg/repo/object.go +++ b/server/pkg/repo/object.go @@ -44,6 +44,15 @@ func (repo *ObjectRepository) MarkObjectReplicated(objectKey string, datacenter return result.RowsAffected() } +func (repo *ObjectRepository) GetObjectsForFileIDs(fileIDs []int64) ([]ente.S3ObjectKey, error) { + rows, err := repo.DB.Query(`SELECT file_id, o_type, object_key, size FROM object_keys + WHERE file_id = ANY($1) AND is_deleted=false`, pq.Array(fileIDs)) + if err != nil { + return nil, stacktrace.Propagate(err, "") + } + return convertRowsToObjectKeys(rows) +} + // GetObject returns the ente.S3ObjectKey key for a file id and type func (repo *ObjectRepository) GetObject(fileID int64, objType ente.ObjectType) (ente.S3ObjectKey, error) { // todo: handling of deleted objects diff --git a/server/pkg/repo/remotestore/repository.go b/server/pkg/repo/remotestore/repository.go index dc54b0cfc1..2548f49018 100644 --- a/server/pkg/repo/remotestore/repository.go +++ b/server/pkg/repo/remotestore/repository.go @@ -13,7 +13,6 @@ type Repository struct { DB *sql.DB } -// func (r *Repository) InsertOrUpdate(ctx context.Context, userID int64, key string, value string) error { _, err := r.DB.ExecContext(ctx, `INSERT INTO remote_store(user_id, key_name, key_value) VALUES ($1,$2,$3) ON CONFLICT (user_id, key_name) DO UPDATE SET key_value = $3; @@ -40,3 +39,25 @@ func (r *Repository) GetValue(ctx context.Context, userID int64, key string) (st } return keyValue, nil } + +// GetAllValues fetches and return all the key value pairs for given user_id +func (r *Repository) GetAllValues(ctx context.Context, userID int64) (map[string]string, error) { + rows, err := r.DB.QueryContext(ctx, `SELECT key_name, key_value FROM remote_store + WHERE user_id = $1`, + userID, // $1 + ) + if err != nil { + return nil, stacktrace.Propagate(err, "reading value failed") + } + defer rows.Close() + values := make(map[string]string) + for rows.Next() { + var key, value string + err := rows.Scan(&key, &value) + if err != nil { + return nil, stacktrace.Propagate(err, "reading value failed") + } + values[key] = value + } + return values, nil +} diff --git a/server/pkg/utils/random/generate.go b/server/pkg/utils/random/generate.go index 47932b6603..75a811c8e1 100644 --- a/server/pkg/utils/random/generate.go +++ b/server/pkg/utils/random/generate.go @@ -13,3 +13,30 @@ func GenerateSixDigitOtp() (string, error) { } return fmt.Sprintf("%06d", n), nil } + +// GenerateAlphaNumString returns AlphaNumeric code of given length +// which exclude number 0 and letter O. The code always starts with an +// alphabet +func GenerateAlphaNumString(length int) (string, error) { + // Define the alphabet and numbers to be used in the string. + alphabet := "ABCDEFGHIJKLMNPQRSTUVWXYZ" + // Define the alphabet and numbers to be used in the string. + alphaNum := fmt.Sprintf("%s123456789", alphabet) + // Allocate a byte slice with the desired length. + result := make([]byte, length) + // Generate the first letter as an alphabet. + r0, err := auth.GenerateRandomInt(int64(len(alphabet))) + if err != nil { + return "", stacktrace.Propagate(err, "") + } + result[0] = alphabet[r0] + // Generate the remaining characters as alphanumeric. + for i := 1; i < length; i++ { + ri, err := auth.GenerateRandomInt(int64(len(alphaNum))) + if err != nil { + return "", stacktrace.Propagate(err, "") + } + result[i] = alphaNum[ri] + } + return string(result), nil +} diff --git a/web/apps/auth/src/pages/_app.tsx b/web/apps/auth/src/pages/_app.tsx index bf1093c907..a5aa55f98d 100644 --- a/web/apps/auth/src/pages/_app.tsx +++ b/web/apps/auth/src/pages/_app.tsx @@ -140,7 +140,7 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} diff --git a/web/apps/cast/package.json b/web/apps/cast/package.json index ee318ef619..4f774662ad 100644 --- a/web/apps/cast/package.json +++ b/web/apps/cast/package.json @@ -3,11 +3,13 @@ "version": "0.0.0", "private": true, "dependencies": { + "@/media": "*", "@/next": "*", "@ente/accounts": "*", "@ente/eslint-config": "*", - "@ente/shared": "*", - "jszip": "3.10.1", - "mime-types": "^2.1.35" + "@ente/shared": "*" + }, + "devDependencies": { + "@types/chromecast-caf-receiver": "^6.0.14" } } diff --git a/web/apps/cast/public/images/help-qrcode.webp b/web/apps/cast/public/images/help-qrcode.webp deleted file mode 100644 index 79cd22c999..0000000000 Binary files a/web/apps/cast/public/images/help-qrcode.webp and /dev/null differ diff --git a/web/apps/cast/src/components/FilledCircleCheck.tsx b/web/apps/cast/src/components/FilledCircleCheck.tsx index c0635f138a..ba2292922e 100644 --- a/web/apps/cast/src/components/FilledCircleCheck.tsx +++ b/web/apps/cast/src/components/FilledCircleCheck.tsx @@ -1,6 +1,6 @@ import { styled } from "@mui/material"; -const FilledCircleCheck = () => { +export const FilledCircleCheck: React.FC = () => { return ( @@ -11,8 +11,6 @@ const FilledCircleCheck = () => { ); }; -export default FilledCircleCheck; - const Container = styled("div")` width: 100px; height: 100px; diff --git a/web/apps/cast/src/components/LargeType.tsx b/web/apps/cast/src/components/LargeType.tsx index ecf7a201bb..42ccb65e9f 100644 --- a/web/apps/cast/src/components/LargeType.tsx +++ b/web/apps/cast/src/components/LargeType.tsx @@ -23,7 +23,7 @@ const colourPool = [ "#808000", // Light Olive ]; -export default function LargeType({ chars }: { chars: string[] }) { +export const LargeType = ({ chars }: { chars: string[] }) => { return ( {chars.map((char, i) => ( @@ -41,7 +41,7 @@ export default function LargeType({ chars }: { chars: string[] }) { ))} ); -} +}; const Container = styled("div")` font-size: 4rem; diff --git a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx b/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx index 845416fedc..88f4d7c1fc 100644 --- a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx +++ b/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx @@ -1,6 +1,6 @@ -import FilledCircleCheck from "./FilledCircleCheck"; +import { FilledCircleCheck } from "./FilledCircleCheck"; -export default function PairedSuccessfullyOverlay() { +export const PairedSuccessfullyOverlay: React.FC = () => { return (
); -} +}; diff --git a/web/apps/cast/src/components/PhotoAuditorium.tsx b/web/apps/cast/src/components/Slide.tsx similarity index 70% rename from web/apps/cast/src/components/PhotoAuditorium.tsx rename to web/apps/cast/src/components/Slide.tsx index 6aa2c3990b..8309f8bc2c 100644 --- a/web/apps/cast/src/components/PhotoAuditorium.tsx +++ b/web/apps/cast/src/components/Slide.tsx @@ -1,25 +1,17 @@ -import { useEffect } from "react"; - -interface PhotoAuditoriumProps { +interface SlideViewProps { + /** The URL of the image to show. */ url: string; - nextSlideUrl: string; - showNextSlide: () => void; + /** The URL of the next image that we will transition to. */ + nextURL: string; } -export const PhotoAuditorium: React.FC = ({ - url, - nextSlideUrl, - showNextSlide, -}) => { - useEffect(() => { - const timeoutId = window.setTimeout(() => { - showNextSlide(); - }, 10000); - - return () => { - if (timeoutId) clearTimeout(timeoutId); - }; - }, [showNextSlide]); +/** + * Show the image at {@link url} in a full screen view. + * + * Also show {@link nextURL} in an hidden image view to prepare the browser for + * an imminent transition to it. + */ +export const SlideView: React.FC = ({ url, nextURL }) => { return (
= ({ }} > { - const array = new Uint8Array(length); - window.crypto.getRandomValues(array); - // Modulo operation to ensure each byte is a single digit - for (let i = 0; i < length; i++) { - array[i] = array[i] % 10; - } - return array; -}; +export default function Index() { + const [publicKeyB64, setPublicKeyB64] = useState(); + const [privateKeyB64, setPrivateKeyB64] = useState(); + const [pairingCode, setPairingCode] = useState(); -const convertDataToDecimalString = (data: Uint8Array): string => { - let decimalString = ""; - for (let i = 0; i < data.length; i++) { - decimalString += data[i].toString(); // No need to pad, as each value is a single digit - } - return decimalString; -}; - -export default function PairingMode() { - const [digits, setDigits] = useState([]); - const [publicKeyB64, setPublicKeyB64] = useState(""); - const [privateKeyB64, setPrivateKeyB64] = useState(""); - const [codePending, setCodePending] = useState(true); - const [isCastReady, setIsCastReady] = useState(false); - - const { cast } = useCastReceiver(); + const router = useRouter(); useEffect(() => { init(); }, []); - useEffect(() => { - if (!cast) return; - if (isCastReady) return; - const context = cast.framework.CastReceiverContext.getInstance(); - - try { - const options = new cast.framework.CastReceiverOptions(); - options.customNamespaces = Object.assign({}); - options.customNamespaces["urn:x-cast:pair-request"] = - cast.framework.system.MessageType.JSON; - - options.disableIdleTimeout = true; - - context.addCustomMessageListener( - "urn:x-cast:pair-request", - messageReceiveHandler, - ); - context.start(options); - } catch (e) { - log.error("failed to create cast context", e); - } - setIsCastReady(true); - return () => { - context.stop(); - }; - }, [cast, isCastReady]); - - const messageReceiveHandler = (message: { - type: string; - senderId: string; - data: any; - }) => { - cast.framework.CastReceiverContext.getInstance().sendCustomMessage( - "urn:x-cast:pair-request", - message.senderId, - { - code: digits.join(""), - }, - ); + const init = () => { + register().then((r) => { + setPublicKeyB64(r.publicKeyB64); + setPrivateKeyB64(r.privateKeyB64); + setPairingCode(r.pairingCode); + }); }; - const init = async () => { - const data = generateSecureData(6); - setDigits(convertDataToDecimalString(data).split("")); - const keypair = await generateKeyPair(); - setPublicKeyB64(await toB64(keypair.publicKey)); - setPrivateKeyB64(await toB64(keypair.privateKey)); - }; - - const generateKeyPair = async () => { - await _sodium.ready; - - const keypair = _sodium.crypto_box_keypair(); - - return keypair; - }; - - const pollForCastData = async () => { - if (codePending) { - return; - } - // see if we were acknowledged on the client. - // the client will send us the encrypted payload using our public key that we advertised. - // then, we can decrypt this and store all the necessary info locally so we can play the collection slideshow. - let devicePayload = ""; - try { - const encDastData = await castGateway.getCastData( - `${digits.join("")}`, - ); - if (!encDastData) return; - devicePayload = encDastData; - } catch (e) { - setCodePending(true); - init(); - return; - } - - const decryptedPayload = await boxSealOpen( - devicePayload, - publicKeyB64, - privateKeyB64, - ); - - const decryptedPayloadObj = JSON.parse(atob(decryptedPayload)); - - return decryptedPayloadObj; - }; - - const advertisePublicKey = async (publicKeyB64: string) => { - // hey client, we exist! - try { - await castGateway.registerDevice( - `${digits.join("")}`, - publicKeyB64, - ); - setCodePending(false); - } catch (e) { - // schedule re-try after 5 seconds - setTimeout(() => { - init(); - }, 5000); - return; - } - }; - - const router = useRouter(); - useEffect(() => { - if (digits.length < 1 || !publicKeyB64 || !privateKeyB64) return; + castReceiverLoadingIfNeeded().then((cast) => + advertiseCode(cast, () => pairingCode), + ); + }, []); - const interval = setInterval(async () => { - const data = await pollForCastData(); - if (!data) return; + useEffect(() => { + if (!publicKeyB64 || !privateKeyB64 || !pairingCode) return; + + const interval = setInterval(pollTick, 2000); + return () => clearInterval(interval); + }, [publicKeyB64, privateKeyB64, pairingCode]); + + const pollTick = async () => { + const registration = { publicKeyB64, privateKeyB64, pairingCode }; + try { + const data = await getCastData(registration); + if (!data) { + // No one has connected yet. + return; + } + + log.info("Pairing complete"); storeCastData(data); await router.push("/slideshow"); - }, 1000); - - return () => { - clearInterval(interval); - }; - }, [digits, publicKeyB64, privateKeyB64, codePending]); - - useEffect(() => { - if (!publicKeyB64) return; - advertisePublicKey(publicKeyB64); - }, [publicKeyB64]); + } catch (e) { + log.error("Failed to get cast data", e); + // Start again from the beginning. + setPairingCode(undefined); + init(); + } + }; return ( <> @@ -192,7 +83,8 @@ export default function PairingMode() { fontWeight: "normal", }} > - Enter this code on ente to pair this TV + Enter this code on Ente Photos to pair this + screen
- {codePending ? ( - + {pairingCode ? ( + ) : ( - <> - - + )}

{" "} for help

-
- -
diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx index 774bbd4da9..bd3339b42b 100644 --- a/web/apps/cast/src/pages/slideshow.tsx +++ b/web/apps/cast/src/pages/slideshow.tsx @@ -1,153 +1,52 @@ import log from "@/next/log"; -import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay"; -import { PhotoAuditorium } from "components/PhotoAuditorium"; -import { FILE_TYPE } from "constants/file"; +import { PairedSuccessfullyOverlay } from "components/PairedSuccessfullyOverlay"; +import { SlideView } from "components/Slide"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { - getCastCollection, - getLocalFiles, - syncPublicFiles, -} from "services/cast/castService"; -import { Collection } from "types/collection"; -import { EnteFile } from "types/file"; -import { getPreviewableImage, isRawFileFromFileName } from "utils/file"; - -const renderableFileURLCache = new Map(); +import { readCastData, renderableImageURLs } from "services/cast"; export default function Slideshow() { const [loading, setLoading] = useState(true); - const [castToken, setCastToken] = useState(""); - const [castCollection, setCastCollection] = useState< - Collection | undefined - >(); - const [collectionFiles, setCollectionFiles] = useState([]); - const [currentFileId, setCurrentFileId] = useState(); - const [currentFileURL, setCurrentFileURL] = useState(); - const [nextFileURL, setNextFileURL] = useState(); + const [imageURL, setImageURL] = useState(); + const [nextImageURL, setNextImageURL] = useState(); const router = useRouter(); - const syncCastFiles = async (token: string) => { - try { - const castToken = window.localStorage.getItem("castToken"); - const requestedCollectionKey = - window.localStorage.getItem("collectionKey"); - const collection = await getCastCollection( - castToken, - requestedCollectionKey, - ); - if ( - castCollection === undefined || - castCollection.updationTime !== collection.updationTime - ) { - setCastCollection(collection); - await syncPublicFiles(token, collection, () => {}); - const files = await getLocalFiles(String(collection.id)); - setCollectionFiles( - files.filter((file) => isFileEligibleForCast(file)), - ); + /** Go back to pairing page */ + const pair = () => router.push("/"); + + useEffect(() => { + let stop = false; + + const loop = async () => { + try { + const urlGenerator = renderableImageURLs(readCastData()); + while (!stop) { + const { value: urls, done } = await urlGenerator.next(); + if (done) { + log.warn("Empty collection"); + pair(); + return; + } + + setImageURL(urls[0]); + setNextImageURL(urls[1]); + setLoading(false); + } + } catch (e) { + log.error("Failed to prepare generator", e); + pair(); } - } catch (e) { - log.error("error during sync", e); - router.push("/"); - } - }; + }; - useEffect(() => { - if (castToken) { - const intervalId = setInterval(() => { - syncCastFiles(castToken); - }, 10000); - syncCastFiles(castToken); + void loop(); - return () => clearInterval(intervalId); - } - }, [castToken]); - - const isFileEligibleForCast = (file: EnteFile) => { - const fileType = file.metadata.fileType; - if (fileType !== FILE_TYPE.IMAGE && fileType !== FILE_TYPE.LIVE_PHOTO) - return false; - - if (file.info.fileSize > 100 * 1024 * 1024) return false; - - if (isRawFileFromFileName(file.metadata.title)) return false; - - return true; - }; - - useEffect(() => { - try { - const castToken = window.localStorage.getItem("castToken"); - // Wait 2 seconds to ensure the green tick and the confirmation - // message remains visible for at least 2 seconds before we start - // the slideshow. - const timeoutId = setTimeout(() => { - setCastToken(castToken); - }, 2000); - - return () => clearTimeout(timeoutId); - } catch (e) { - log.error("error during sync", e); - router.push("/"); - } + return () => { + stop = true; + }; }, []); - useEffect(() => { - if (collectionFiles.length < 1) return; - showNextSlide(); - }, [collectionFiles]); - - const showNextSlide = async () => { - const currentIndex = collectionFiles.findIndex( - (file) => file.id === currentFileId, - ); - - const nextIndex = (currentIndex + 1) % collectionFiles.length; - const nextNextIndex = (nextIndex + 1) % collectionFiles.length; - - const nextFile = collectionFiles[nextIndex]; - const nextNextFile = collectionFiles[nextNextIndex]; - - let nextURL = renderableFileURLCache.get(nextFile.id); - let nextNextURL = renderableFileURLCache.get(nextNextFile.id); - - if (!nextURL) { - try { - const blob = await getPreviewableImage(nextFile, castToken); - const url = URL.createObjectURL(blob); - renderableFileURLCache.set(nextFile.id, url); - nextURL = url; - } catch (e) { - return; - } - } - - if (!nextNextURL) { - try { - const blob = await getPreviewableImage(nextNextFile, castToken); - const url = URL.createObjectURL(blob); - renderableFileURLCache.set(nextNextFile.id, url); - nextNextURL = url; - } catch (e) { - return; - } - } - - setLoading(false); - setCurrentFileId(nextFile.id); - setCurrentFileURL(nextURL); - setNextFileURL(nextNextURL); - }; - if (loading) return ; - return ( - - ); + return ; } diff --git a/web/apps/cast/src/services/cast.ts b/web/apps/cast/src/services/cast.ts new file mode 100644 index 0000000000..8ead8962ab --- /dev/null +++ b/web/apps/cast/src/services/cast.ts @@ -0,0 +1,319 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { isNonWebImageFileExtension } from "@/media/formats"; +import { decodeLivePhoto } from "@/media/live-photo"; +import { nameAndExtension } from "@/next/file"; +import log from "@/next/log"; +import { shuffled } from "@/utils/array"; +import { ensure, ensureString } from "@/utils/ensure"; +import ComlinkCryptoWorker from "@ente/shared/crypto"; +import HTTPService from "@ente/shared/network/HTTPService"; +import { getCastFileURL, getEndpoint } from "@ente/shared/network/api"; +import { wait } from "@ente/shared/utils"; +import { detectMediaMIMEType } from "services/detect-type"; +import { + EncryptedEnteFile, + EnteFile, + FileMagicMetadata, + FilePublicMagicMetadata, +} from "types/file"; + +/** + * Save the data received after pairing with a sender into local storage. + * + * We will read in back when we start the slideshow. + */ +export const storeCastData = (payload: unknown) => { + if (!payload || typeof payload != "object") + throw new Error("Unexpected cast data"); + + // Iterate through all the keys of the payload object and save them to + // localStorage. We don't validate here, we'll validate when we read these + // values back in `readCastData`. + for (const key in payload) { + window.localStorage.setItem(key, payload[key]); + } +}; + +interface CastData { + /** A key to decrypt the collection we are casting. */ + collectionKey: string; + /** A credential to use for fetching media files for this cast session. */ + castToken: string; +} + +/** + * Read back the cast data we got after pairing. + * + * Sibling of {@link storeCastData}. It throws an error if the expected data is + * not present in localStorage. + */ +export const readCastData = (): CastData => { + const collectionKey = ensureString(localStorage.getItem("collectionKey")); + const castToken = ensureString(localStorage.getItem("castToken")); + return { collectionKey, castToken }; +}; + +type RenderableImageURLPair = [url: string, nextURL: string]; + +/** + * An async generator function that loops through all the files in the + * collection, returning renderable URLs to each that can be displayed in a + * slideshow. + * + * Each time it resolves with a pair of URLs (a {@link RenderableImageURLPair}), + * one for the next slideshow image, and one for the slideshow image that will + * be displayed after that. It also pre-fetches the next to next URL each time. + * + * If there are no renderable image in the collection, the sequence ends by + * yielding `{done: true}`. + * + * Otherwise when the generator reaches the end of the collection, it starts + * from the beginning again. So the sequence will continue indefinitely for + * non-empty collections. + * + * The generator ignores errors in the fetching and decoding of individual + * images in the collection, skipping the erroneous ones and moving onward to + * the next one. It will however throw if there are errors when getting the + * collection itself. This can happen both the first time, or when we are about + * to loop around to the start of the collection. + * + * @param castData The collection to show and credentials to fetch the files + * within it. + */ +export const renderableImageURLs = async function* (castData: CastData) { + const { collectionKey, castToken } = castData; + + /** + * We have a sliding window of four URLs, with the `urls[1]` being the one + * that is the one currently being shown in the slideshow. + * + * At each step, we shift the window towards the right by shifting out the + * leftmost (oldest) `urls[0]`, and adding a new one at the end. + * + * We can revoke url[0] when we shift it out because we know it is not being + * used anymore. + * + * We need to special case the first two renders to avoid revoking the + * initial URLs that are displayed the first two times. This results in a + * memory leak of the very first objectURL that we display. + */ + const urls: string[] = [""]; + let i = 0; + + /** + * Number of milliseconds to keep the slide on the screen. + */ + const slideDuration = 10000; /* 10 s */ + /** + * Time when we last yielded. + * + * We use this to keep an roughly periodic spacing between yields that + * accounts for the time we spend fetching and processing the images. + */ + let lastYieldTime = Date.now(); + + // The first time around advance the lastYieldTime into the future so that + // we don't wait around too long for the first slide (we do want to wait a + // bit, for the user to see the checkmark animation as reassurance). + lastYieldTime += 7500; /* 7.5 s */ + + while (true) { + const encryptedFiles = shuffled( + await getEncryptedCollectionFiles(castToken), + ); + + let haveEligibleFiles = false; + + for (const encryptedFile of encryptedFiles) { + const file = await decryptEnteFile(encryptedFile, collectionKey); + + if (!isFileEligibleForCast(file)) continue; + + try { + urls.push(await createRenderableURL(castToken, file)); + haveEligibleFiles = true; + } catch (e) { + log.error("Skipping unrenderable file", e); + continue; + } + + if (urls.length < 4) continue; + + const oldestURL = urls.shift(); + if (oldestURL && i !== 1) URL.revokeObjectURL(oldestURL); + i += 1; + + const urlPair: RenderableImageURLPair = [ + ensure(urls[0]), + ensure(urls[1]), + ]; + + const elapsedTime = Date.now() - lastYieldTime; + if (elapsedTime > 0 && elapsedTime < slideDuration) + await wait(slideDuration - elapsedTime); + + lastYieldTime = Date.now(); + yield urlPair; + } + + // This collection does not have any files that we can show. + if (!haveEligibleFiles) return; + } +}; + +/** + * Fetch the list of non-deleted files in the given collection. + * + * The returned files are not decrypted yet, so their metadata will not be + * readable. + */ +const getEncryptedCollectionFiles = async ( + castToken: string, +): Promise => { + let files: EncryptedEnteFile[] = []; + let sinceTime = 0; + let resp; + do { + resp = await HTTPService.get( + `${getEndpoint()}/cast/diff`, + { sinceTime }, + { + "Cache-Control": "no-cache", + "X-Cast-Access-Token": castToken, + }, + ); + const diff = resp.data.diff; + files = files.concat(diff.filter((file: EnteFile) => !file.isDeleted)); + sinceTime = diff.reduce( + (max: number, file: EnteFile) => Math.max(max, file.updationTime), + sinceTime, + ); + } while (resp.data.hasMore); + return files; +}; + +/** + * Decrypt the given {@link EncryptedEnteFile}, returning a {@link EnteFile}. + */ +const decryptEnteFile = async ( + encryptedFile: EncryptedEnteFile, + collectionKey: string, +): Promise => { + const worker = await ComlinkCryptoWorker.getInstance(); + const { + encryptedKey, + keyDecryptionNonce, + metadata, + magicMetadata, + pubMagicMetadata, + ...restFileProps + } = encryptedFile; + const fileKey = await worker.decryptB64( + encryptedKey, + keyDecryptionNonce, + collectionKey, + ); + const fileMetadata = await worker.decryptMetadata( + metadata.encryptedData, + metadata.decryptionHeader, + fileKey, + ); + let fileMagicMetadata: FileMagicMetadata; + let filePubMagicMetadata: FilePublicMagicMetadata; + if (magicMetadata?.data) { + fileMagicMetadata = { + ...encryptedFile.magicMetadata, + data: await worker.decryptMetadata( + magicMetadata.data, + magicMetadata.header, + fileKey, + ), + }; + } + if (pubMagicMetadata?.data) { + filePubMagicMetadata = { + ...pubMagicMetadata, + data: await worker.decryptMetadata( + pubMagicMetadata.data, + pubMagicMetadata.header, + fileKey, + ), + }; + } + const file = { + ...restFileProps, + key: fileKey, + metadata: fileMetadata, + magicMetadata: fileMagicMetadata, + pubMagicMetadata: filePubMagicMetadata, + }; + if (file.pubMagicMetadata?.data.editedTime) { + file.metadata.creationTime = file.pubMagicMetadata.data.editedTime; + } + if (file.pubMagicMetadata?.data.editedName) { + file.metadata.title = file.pubMagicMetadata.data.editedName; + } + return file; +}; + +const isFileEligibleForCast = (file: EnteFile) => { + if (!isImageOrLivePhoto(file)) return false; + if (file.info.fileSize > 100 * 1024 * 1024) return false; + + const [, extension] = nameAndExtension(file.metadata.title); + if (isNonWebImageFileExtension(extension)) return false; + + return true; +}; + +const isImageOrLivePhoto = (file: EnteFile) => { + const fileType = file.metadata.fileType; + return fileType == FILE_TYPE.IMAGE || fileType == FILE_TYPE.LIVE_PHOTO; +}; + +/** + * Create and return a new data URL that can be used to show the given + * {@link file} in our slideshow image viewer. + * + * Once we're done showing the file, the URL should be revoked using + * {@link URL.revokeObjectURL} to free up browser resources. + */ +const createRenderableURL = async (castToken: string, file: EnteFile) => + URL.createObjectURL(await renderableImageBlob(castToken, file)); + +const renderableImageBlob = async (castToken: string, file: EnteFile) => { + const fileName = file.metadata.title; + let blob = await downloadFile(castToken, file); + if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { + const { imageData } = await decodeLivePhoto(fileName, blob); + blob = new Blob([imageData]); + } + const mimeType = await detectMediaMIMEType(new File([blob], fileName)); + if (!mimeType) + throw new Error(`Could not detect MIME type for file ${fileName}`); + return new Blob([blob], { type: mimeType }); +}; + +const downloadFile = async (castToken: string, file: EnteFile) => { + if (!isImageOrLivePhoto(file)) + throw new Error("Can only cast images and live photos"); + + const url = getCastFileURL(file.id); + const resp = await HTTPService.get( + url, + null, + { + "X-Cast-Access-Token": castToken, + }, + { responseType: "arraybuffer" }, + ); + if (resp.data === undefined) throw new Error(`Failed to get ${url}`); + + const cryptoWorker = await ComlinkCryptoWorker.getInstance(); + const decrypted = await cryptoWorker.decryptFile( + new Uint8Array(resp.data), + await cryptoWorker.fromB64(file.file.decryptionHeader), + file.key, + ); + return new Response(decrypted).blob(); +}; diff --git a/web/apps/cast/src/services/cast/castService.ts b/web/apps/cast/src/services/cast/castService.ts deleted file mode 100644 index 84636d3a15..0000000000 --- a/web/apps/cast/src/services/cast/castService.ts +++ /dev/null @@ -1,304 +0,0 @@ -import log from "@/next/log"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { CustomError, parseSharingErrorCodes } from "@ente/shared/error"; -import HTTPService from "@ente/shared/network/HTTPService"; -import { getEndpoint } from "@ente/shared/network/api"; -import localForage from "@ente/shared/storage/localForage"; -import { Collection, CollectionPublicMagicMetadata } from "types/collection"; -import { EncryptedEnteFile, EnteFile } from "types/file"; -import { decryptFile, mergeMetadata, sortFiles } from "utils/file"; - -export interface SavedCollectionFiles { - collectionLocalID: string; - files: EnteFile[]; -} -const ENDPOINT = getEndpoint(); -const COLLECTION_FILES_TABLE = "collection-files"; -const COLLECTIONS_TABLE = "collections"; - -const getLastSyncKey = (collectionUID: string) => `${collectionUID}-time`; - -export const getLocalFiles = async ( - collectionUID: string, -): Promise => { - const localSavedcollectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) || []; - const matchedCollection = localSavedcollectionFiles.find( - (item) => item.collectionLocalID === collectionUID, - ); - return matchedCollection?.files || []; -}; - -const savecollectionFiles = async ( - collectionUID: string, - files: EnteFile[], -) => { - const collectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) || []; - await localForage.setItem( - COLLECTION_FILES_TABLE, - dedupeCollectionFiles([ - { collectionLocalID: collectionUID, files }, - ...collectionFiles, - ]), - ); -}; - -export const getLocalCollections = async (collectionKey: string) => { - const localCollections = - (await localForage.getItem(COLLECTIONS_TABLE)) || []; - const collection = - localCollections.find( - (localSavedPublicCollection) => - localSavedPublicCollection.key === collectionKey, - ) || null; - return collection; -}; - -const saveCollection = async (collection: Collection) => { - const collections = - (await localForage.getItem(COLLECTIONS_TABLE)) ?? []; - await localForage.setItem( - COLLECTIONS_TABLE, - dedupeCollections([collection, ...collections]), - ); -}; - -const dedupeCollections = (collections: Collection[]) => { - const keySet = new Set([]); - return collections.filter((collection) => { - if (!keySet.has(collection.key)) { - keySet.add(collection.key); - return true; - } else { - return false; - } - }); -}; - -const dedupeCollectionFiles = (collectionFiles: SavedCollectionFiles[]) => { - const keySet = new Set([]); - return collectionFiles.filter(({ collectionLocalID: collectionUID }) => { - if (!keySet.has(collectionUID)) { - keySet.add(collectionUID); - return true; - } else { - return false; - } - }); -}; - -async function getSyncTime(collectionUID: string): Promise { - const lastSyncKey = getLastSyncKey(collectionUID); - const lastSyncTime = await localForage.getItem(lastSyncKey); - return lastSyncTime ?? 0; -} - -const updateSyncTime = async (collectionUID: string, time: number) => - await localForage.setItem(getLastSyncKey(collectionUID), time); - -export const syncPublicFiles = async ( - token: string, - collection: Collection, - setPublicFiles: (files: EnteFile[]) => void, -) => { - try { - let files: EnteFile[] = []; - const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false; - const collectionUID = String(collection.id); - const localFiles = await getLocalFiles(collectionUID); - files = [...files, ...localFiles]; - try { - const lastSyncTime = await getSyncTime(collectionUID); - if (collection.updationTime === lastSyncTime) { - return sortFiles(files, sortAsc); - } - const fetchedFiles = await fetchFiles( - token, - collection, - lastSyncTime, - files, - setPublicFiles, - ); - - files = [...files, ...fetchedFiles]; - const latestVersionFiles = new Map(); - files.forEach((file) => { - const uid = `${file.collectionID}-${file.id}`; - if ( - !latestVersionFiles.has(uid) || - latestVersionFiles.get(uid).updationTime < file.updationTime - ) { - latestVersionFiles.set(uid, file); - } - }); - files = []; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - for (const [_, file] of latestVersionFiles) { - if (file.isDeleted) { - continue; - } - files.push(file); - } - await savecollectionFiles(collectionUID, files); - await updateSyncTime(collectionUID, collection.updationTime); - setPublicFiles([...sortFiles(mergeMetadata(files), sortAsc)]); - } catch (e) { - const parsedError = parseSharingErrorCodes(e); - log.error("failed to sync shared collection files", e); - if (parsedError.message === CustomError.TOKEN_EXPIRED) { - throw e; - } - } - return [...sortFiles(mergeMetadata(files), sortAsc)]; - } catch (e) { - log.error("failed to get local or sync shared collection files", e); - throw e; - } -}; - -const fetchFiles = async ( - castToken: string, - collection: Collection, - sinceTime: number, - files: EnteFile[], - setPublicFiles: (files: EnteFile[]) => void, -): Promise => { - try { - let decryptedFiles: EnteFile[] = []; - let time = sinceTime; - let resp; - const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false; - do { - if (!castToken) { - break; - } - resp = await HTTPService.get( - `${ENDPOINT}/cast/diff`, - { - sinceTime: time, - }, - { - "Cache-Control": "no-cache", - "X-Cast-Access-Token": castToken, - }, - ); - decryptedFiles = [ - ...decryptedFiles, - ...(await Promise.all( - resp.data.diff.map(async (file: EncryptedEnteFile) => { - if (!file.isDeleted) { - return await decryptFile(file, collection.key); - } else { - return file; - } - }) as Promise[], - )), - ]; - - if (resp.data.diff.length) { - time = resp.data.diff.slice(-1)[0].updationTime; - } - setPublicFiles( - sortFiles( - mergeMetadata( - [...(files || []), ...decryptedFiles].filter( - (item) => !item.isDeleted, - ), - ), - sortAsc, - ), - ); - } while (resp.data.hasMore); - return decryptedFiles; - } catch (e) { - log.error("Get cast files failed", e); - throw e; - } -}; - -export const getCastCollection = async ( - castToken: string, - collectionKey: string, -): Promise => { - try { - const resp = await HTTPService.get(`${ENDPOINT}/cast/info`, null, { - "Cache-Control": "no-cache", - "X-Cast-Access-Token": castToken, - }); - const fetchedCollection = resp.data.collection; - - const cryptoWorker = await ComlinkCryptoWorker.getInstance(); - - const collectionName = (fetchedCollection.name = - fetchedCollection.name || - (await cryptoWorker.decryptToUTF8( - fetchedCollection.encryptedName, - fetchedCollection.nameDecryptionNonce, - collectionKey, - ))); - - let collectionPublicMagicMetadata: CollectionPublicMagicMetadata; - if (fetchedCollection.pubMagicMetadata?.data) { - collectionPublicMagicMetadata = { - ...fetchedCollection.pubMagicMetadata, - data: await cryptoWorker.decryptMetadata( - fetchedCollection.pubMagicMetadata.data, - fetchedCollection.pubMagicMetadata.header, - collectionKey, - ), - }; - } - - const collection = { - ...fetchedCollection, - name: collectionName, - key: collectionKey, - pubMagicMetadata: collectionPublicMagicMetadata, - }; - await saveCollection(collection); - return collection; - } catch (e) { - log.error("failed to get cast collection", e); - throw e; - } -}; - -export const removeCollection = async ( - collectionUID: string, - collectionKey: string, -) => { - const collections = - (await localForage.getItem(COLLECTIONS_TABLE)) || []; - await localForage.setItem( - COLLECTIONS_TABLE, - collections.filter((collection) => collection.key !== collectionKey), - ); - await removeCollectionFiles(collectionUID); -}; - -export const removeCollectionFiles = async (collectionUID: string) => { - await localForage.removeItem(getLastSyncKey(collectionUID)); - const collectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) ?? []; - await localForage.setItem( - COLLECTION_FILES_TABLE, - collectionFiles.filter( - (collectionFiles) => - collectionFiles.collectionLocalID !== collectionUID, - ), - ); -}; - -export const storeCastData = (payloadObj: Object) => { - // iterate through all the keys in the payload object and set them in localStorage. - for (const key in payloadObj) { - window.localStorage.setItem(key, payloadObj[key]); - } -}; diff --git a/web/apps/cast/src/services/castDownloadManager.ts b/web/apps/cast/src/services/castDownloadManager.ts deleted file mode 100644 index 76b37c082a..0000000000 --- a/web/apps/cast/src/services/castDownloadManager.ts +++ /dev/null @@ -1,103 +0,0 @@ -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { CustomError } from "@ente/shared/error"; -import HTTPService from "@ente/shared/network/HTTPService"; -import { getCastFileURL } from "@ente/shared/network/api"; -import { FILE_TYPE } from "constants/file"; -import { EnteFile } from "types/file"; -import { generateStreamFromArrayBuffer } from "utils/file"; - -class CastDownloadManager { - async downloadFile(castToken: string, file: EnteFile) { - const cryptoWorker = await ComlinkCryptoWorker.getInstance(); - - if ( - file.metadata.fileType === FILE_TYPE.IMAGE || - file.metadata.fileType === FILE_TYPE.LIVE_PHOTO - ) { - const resp = await HTTPService.get( - getCastFileURL(file.id), - null, - { - "X-Cast-Access-Token": castToken, - }, - { responseType: "arraybuffer" }, - ); - if (typeof resp.data === "undefined") { - throw Error(CustomError.REQUEST_FAILED); - } - const decrypted = await cryptoWorker.decryptFile( - new Uint8Array(resp.data), - await cryptoWorker.fromB64(file.file.decryptionHeader), - file.key, - ); - return generateStreamFromArrayBuffer(decrypted); - } - const resp = await fetch(getCastFileURL(file.id), { - headers: { - "X-Cast-Access-Token": castToken, - }, - }); - const reader = resp.body.getReader(); - - const stream = new ReadableStream({ - async start(controller) { - const decryptionHeader = await cryptoWorker.fromB64( - file.file.decryptionHeader, - ); - const fileKey = await cryptoWorker.fromB64(file.key); - const { pullState, decryptionChunkSize } = - await cryptoWorker.initChunkDecryption( - decryptionHeader, - fileKey, - ); - let data = new Uint8Array(); - // The following function handles each data chunk - function push() { - // "done" is a Boolean and value a "Uint8Array" - reader.read().then(async ({ done, value }) => { - // Is there more data to read? - if (!done) { - const buffer = new Uint8Array( - data.byteLength + value.byteLength, - ); - buffer.set(new Uint8Array(data), 0); - buffer.set(new Uint8Array(value), data.byteLength); - if (buffer.length > decryptionChunkSize) { - const fileData = buffer.slice( - 0, - decryptionChunkSize, - ); - const { decryptedData } = - await cryptoWorker.decryptFileChunk( - fileData, - pullState, - ); - controller.enqueue(decryptedData); - data = buffer.slice(decryptionChunkSize); - } else { - data = buffer; - } - push(); - } else { - if (data) { - const { decryptedData } = - await cryptoWorker.decryptFileChunk( - data, - pullState, - ); - controller.enqueue(decryptedData); - data = null; - } - controller.close(); - } - }); - } - - push(); - }, - }); - return stream; - } -} - -export default new CastDownloadManager(); diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts new file mode 100644 index 0000000000..187e19df84 --- /dev/null +++ b/web/apps/cast/src/services/detect-type.ts @@ -0,0 +1,28 @@ +import { KnownFileTypeInfos } from "@/media/file-type"; +import { lowercaseExtension } from "@/next/file"; +import FileType from "file-type"; + +/** + * Try to deduce the MIME type for the given {@link file}. Return the MIME type + * string if successful _and_ if it is an image or a video, otherwise return + * `undefined`. + * + * It first peeks into the file's initial contents to detect the MIME type. If + * that doesn't give any results, it tries to deduce it from the file's name. + */ +export const detectMediaMIMEType = async (file: File): Promise => { + const chunkSizeForTypeDetection = 4100; + const fileChunk = file.slice(0, chunkSizeForTypeDetection); + const chunk = new Uint8Array(await fileChunk.arrayBuffer()); + const result = await FileType.fromBuffer(chunk); + + const mime = result?.mime; + if (mime) { + if (mime.startsWith("image/") || mime.startsWith("video/")) return mime; + else throw new Error(`Detected MIME type ${mime} is not a media file`); + } + + const ext = lowercaseExtension(file.name); + if (!ext) return undefined; + return KnownFileTypeInfos.find((f) => f.extension == ext)?.mimeType; +}; diff --git a/web/apps/cast/src/services/livePhotoService.ts b/web/apps/cast/src/services/livePhotoService.ts deleted file mode 100644 index 789234bd3e..0000000000 --- a/web/apps/cast/src/services/livePhotoService.ts +++ /dev/null @@ -1,32 +0,0 @@ -import JSZip from "jszip"; -import { EnteFile } from "types/file"; -import { - getFileExtensionWithDot, - getFileNameWithoutExtension, -} from "utils/file"; - -class LivePhoto { - image: Uint8Array; - video: Uint8Array; - imageNameTitle: string; - videoNameTitle: string; -} - -export const decodeLivePhoto = async (file: EnteFile, zipBlob: Blob) => { - const originalName = getFileNameWithoutExtension(file.metadata.title); - const zip = await JSZip.loadAsync(zipBlob, { createFolders: true }); - - const livePhoto = new LivePhoto(); - for (const zipFilename in zip.files) { - if (zipFilename.startsWith("image")) { - livePhoto.imageNameTitle = - originalName + getFileExtensionWithDot(zipFilename); - livePhoto.image = await zip.files[zipFilename].async("uint8array"); - } else if (zipFilename.startsWith("video")) { - livePhoto.videoNameTitle = - originalName + getFileExtensionWithDot(zipFilename); - livePhoto.video = await zip.files[zipFilename].async("uint8array"); - } - } - return livePhoto; -}; diff --git a/web/apps/cast/src/services/pair.ts b/web/apps/cast/src/services/pair.ts new file mode 100644 index 0000000000..66f9feddd1 --- /dev/null +++ b/web/apps/cast/src/services/pair.ts @@ -0,0 +1,193 @@ +import log from "@/next/log"; +import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium"; +import castGateway from "@ente/shared/network/cast"; +import { wait } from "@ente/shared/utils"; +import _sodium from "libsodium-wrappers"; +import { type Cast } from "../utils/cast-receiver"; + +export interface Registration { + /** A pairing code shown on the screen. A client can use this to connect. */ + pairingCode: string; + /** The public part of the keypair we registered with the server. */ + publicKeyB64: string; + /** The private part of the keypair we registered with the server. */ + privateKeyB64: string; +} + +/** + * Register a keypair with the server and return a pairing code that can be used + * to connect to us. Phase 1 of the pairing protocol. + * + * [Note: Pairing protocol] + * + * The Chromecast Framework (represented here by our handle to the Chromecast + * Web SDK, {@link cast}) itself is used for only the initial handshake, none of + * the data, even encrypted passes over it thereafter. + * + * The pairing happens in two phases: + * + * Phase 1 - {@link register} + * + * 1. We (the receiver) generate a public/private keypair. and register the + * public part of it with museum. + * + * 2. Museum gives us a pairing "code" in lieu. Show this on the screen. + * + * Phase 2 - {@link advertiseCode} + * + * There are two ways the client can connect - either by sending us a blank + * message over the Chromecast protocol (to which we'll reply with the pairing + * code), or by the user manually entering the pairing code on their screen. + * + * 3. Listen for incoming messages over the Chromecast connection. + * + * 4. The client (our Web or mobile app) will connect using the "sender" + * Chromecast SDK. This will result in a bi-directional channel between us + * ("receiver") and the Ente client app ("sender"). + * + * 5. Thereafter, if at any time the sender disconnects, close the Chromecast + * context. This effectively shuts us down, causing the entire page to get + * reloaded. + * + * 6. After connecting, the sender sends an (empty) message. We reply by sending + * them a message containing the pairing code. This exchange is the only data + * that traverses over the Chromecast connection. + * + * Once the client gets the pairing code (via Chromecast or manual entry), + * they'll let museum know. So in parallel with Phase 2, we perform Phase 3. + * + * Phase 3 - {@link getCastData} in a setInterval. + * + * 7. Keep polling museum to ask it if anyone has claimed that code we vended + * out and used that to send us an payload encrypted using our public key. + * + * 8. When that happens, decrypt that data with our private key, and return this + * payload. It is a JSON object that contains the data we need to initiate a + * slideshow for a particular Ente collection. + * + * Phase 1 (Steps 1 and 2) are done by the {@link register} function, which + * returns a {@link Registration}. + * + * At this time we start showing the pairing code on the UI, and start phase 2, + * {@link advertiseCode} to vend out the pairing code to Chromecast connections. + * + * In parallel, we start Phase 3, calling {@link getCastData} in a loop. Once we + * get a response, we decrypt it to get the data we need to start the slideshow. + */ +export const register = async (): Promise => { + // Generate keypair. + const keypair = await generateKeyPair(); + const publicKeyB64 = await toB64(keypair.publicKey); + const privateKeyB64 = await toB64(keypair.privateKey); + + // Register keypair with museum to get a pairing code. + let pairingCode: string; + // eslint has fixed this spurious warning, but we're not on the latest + // version yet, so add a disable. + // https://github.com/eslint/eslint/pull/18286 + /* eslint-disable no-constant-condition */ + while (true) { + try { + pairingCode = await castGateway.registerDevice(publicKeyB64); + } catch (e) { + log.error("Failed to register public key with server", e); + } + if (pairingCode) break; + // Schedule retry after 10 seconds. + await wait(10000); + } + + return { pairingCode, publicKeyB64, privateKeyB64 }; +}; + +/** + * Listen for incoming messages on the given {@link cast} receiver, replying to + * each of them with a pairing code obtained using the given {@link pairingCode} + * callback. Phase 2 of the pairing protocol. + * + * See: [Note: Pairing protocol]. + */ +export const advertiseCode = ( + cast: Cast, + pairingCode: () => string | undefined, +) => { + // Prepare the Chromecast "context". + const context = cast.framework.CastReceiverContext.getInstance(); + const namespace = "urn:x-cast:pair-request"; + + const options = new cast.framework.CastReceiverOptions(); + // Do not automatically close the connection when the sender disconnects. + options.maxInactivity = 3600; /* 1 hour */ + // TODO:Is this required? The docs say "(The default type of a message bus + // is JSON; if not provided here)." + options.customNamespaces = Object.assign({}); + options.customNamespaces[namespace] = + cast.framework.system.MessageType.JSON; + // TODO: This looks like the only one needed, but a comment with the reason + // might be good. + options.disableIdleTimeout = true; + + // Reply with the code that we have if anyone asks over Chromecast. + const incomingMessageListener = ({ senderId }: { senderId: string }) => { + const code = pairingCode(); + if (!code) { + log.warn( + "Ignoring incoming Chromecast message because we do not yet have a pairing code", + ); + return; + } + + context.sendCustomMessage(namespace, senderId, { code }); + }; + + context.addCustomMessageListener( + namespace, + // We need to cast, the `senderId` is present in the message we get but + // not present in the TypeScript type. + incomingMessageListener as unknown as SystemEventHandler, + ); + + // Shutdown ourselves if the sender disconnects. + // TODO(MR): I assume the page reloads on shutdown. Is that correct? + context.addEventListener( + cast.framework.system.EventType.SENDER_DISCONNECTED, + () => context.stop(), + ); + + // Start listening for Chromecast connections. + context.start(options); +}; + +/** + * Ask museum if anyone has sent a (encrypted) payload corresponding to the + * given pairing code. If so, decrypt it using our private key and return the + * JSON payload. Phase 3 of the pairing protocol. + * + * Returns `undefined` if there hasn't been any data obtained yet. + * + * See: [Note: Pairing protocol]. + */ +export const getCastData = async (registration: Registration) => { + const { pairingCode, publicKeyB64, privateKeyB64 } = registration; + + // The client will send us the encrypted payload using our public key that + // we registered with museum. + const encryptedCastData = await castGateway.getCastData(pairingCode); + if (!encryptedCastData) return; + + // Decrypt it using the private key of the pair and return the plaintext + // payload, which'll be a JSON object containing the data we need to start a + // slideshow for some collection. + const decryptedCastData = await boxSealOpen( + encryptedCastData, + publicKeyB64, + privateKeyB64, + ); + + return JSON.parse(atob(decryptedCastData)); +}; + +const generateKeyPair = async () => { + await _sodium.ready; + return _sodium.crypto_box_keypair(); +}; diff --git a/web/apps/cast/src/services/readerService.ts b/web/apps/cast/src/services/readerService.ts deleted file mode 100644 index 19f9bb9311..0000000000 --- a/web/apps/cast/src/services/readerService.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { convertBytesToHumanReadable } from "@/next/file"; -import log from "@/next/log"; - -export async function getUint8ArrayView(file: Blob): Promise { - try { - return new Uint8Array(await file.arrayBuffer()); - } catch (e) { - log.error( - `Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`, - e, - ); - throw e; - } -} diff --git a/web/apps/cast/src/services/typeDetectionService.ts b/web/apps/cast/src/services/typeDetectionService.ts deleted file mode 100644 index 5acd3844dc..0000000000 --- a/web/apps/cast/src/services/typeDetectionService.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { nameAndExtension } from "@/next/file"; -import log from "@/next/log"; -import { CustomError } from "@ente/shared/error"; -import { FILE_TYPE } from "constants/file"; -import { - KNOWN_NON_MEDIA_FORMATS, - WHITELISTED_FILE_FORMATS, -} from "constants/upload"; -import FileType from "file-type"; -import { FileTypeInfo } from "types/upload"; -import { getUint8ArrayView } from "./readerService"; - -const TYPE_VIDEO = "video"; -const TYPE_IMAGE = "image"; -const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100; - -export async function getFileType(receivedFile: File): Promise { - try { - let fileType: FILE_TYPE; - - const typeResult = await extractFileType(receivedFile); - const mimTypeParts: string[] = typeResult.mime?.split("/"); - if (mimTypeParts?.length !== 2) { - throw Error(CustomError.INVALID_MIME_TYPE(typeResult.mime)); - } - - switch (mimTypeParts[0]) { - case TYPE_IMAGE: - fileType = FILE_TYPE.IMAGE; - break; - case TYPE_VIDEO: - fileType = FILE_TYPE.VIDEO; - break; - default: - throw Error(CustomError.NON_MEDIA_FILE); - } - return { - fileType, - exactType: typeResult.ext, - mimeType: typeResult.mime, - }; - } catch (e) { - const ne = nameAndExtension(receivedFile.name); - const fileFormat = ne[1].toLowerCase(); - const whiteListedFormat = WHITELISTED_FILE_FORMATS.find( - (a) => a.exactType === fileFormat, - ); - if (whiteListedFormat) { - return whiteListedFormat; - } - if (KNOWN_NON_MEDIA_FORMATS.includes(fileFormat)) { - throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); - } - if (e.message === CustomError.NON_MEDIA_FILE) { - log.error(`unsupported file format ${fileFormat}`, e); - throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); - } - log.error(`type detection failed for format ${fileFormat}`, e); - throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat)); - } -} - -async function extractFileType(file: File) { - const fileBlobChunk = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION); - const fileDataChunk = await getUint8ArrayView(fileBlobChunk); - return getFileTypeFromBuffer(fileDataChunk); -} - -async function getFileTypeFromBuffer(buffer: Uint8Array) { - const result = await FileType.fromBuffer(buffer); - if (!result?.mime) { - let logableInfo = ""; - try { - logableInfo = `result: ${JSON.stringify(result)}`; - } catch (e) { - logableInfo = "failed to stringify result"; - } - throw Error(`mimetype missing from file type result - ${logableInfo}`); - } - return result; -} diff --git a/web/apps/cast/src/types/collection/index.ts b/web/apps/cast/src/types/collection/index.ts deleted file mode 100644 index f9ea9ef04b..0000000000 --- a/web/apps/cast/src/types/collection/index.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { CollectionSummaryType, CollectionType } from "constants/collection"; -import { EnteFile } from "types/file"; -import { - EncryptedMagicMetadata, - MagicMetadataCore, - SUB_TYPE, - VISIBILITY_STATE, -} from "types/magicMetadata"; - -export enum COLLECTION_ROLE { - VIEWER = "VIEWER", - OWNER = "OWNER", - COLLABORATOR = "COLLABORATOR", - UNKNOWN = "UNKNOWN", -} - -export interface CollectionUser { - id: number; - email: string; - role: COLLECTION_ROLE; -} - -export interface EncryptedCollection { - id: number; - owner: CollectionUser; - // collection name was unencrypted in the past, so we need to keep it as optional - name?: string; - encryptedKey: string; - keyDecryptionNonce: string; - encryptedName: string; - nameDecryptionNonce: string; - type: CollectionType; - attributes: collectionAttributes; - sharees: CollectionUser[]; - publicURLs?: PublicURL[]; - updationTime: number; - isDeleted: boolean; - magicMetadata: EncryptedMagicMetadata; - pubMagicMetadata: EncryptedMagicMetadata; - sharedMagicMetadata: EncryptedMagicMetadata; -} - -export interface Collection - extends Omit< - EncryptedCollection, - | "encryptedKey" - | "keyDecryptionNonce" - | "encryptedName" - | "nameDecryptionNonce" - | "magicMetadata" - | "pubMagicMetadata" - | "sharedMagicMetadata" - > { - key: string; - name: string; - magicMetadata: CollectionMagicMetadata; - pubMagicMetadata: CollectionPublicMagicMetadata; - sharedMagicMetadata: CollectionShareeMagicMetadata; -} - -// define a method on Collection interface to return the sync key as collection.id-time -// this is used to store the last sync time of a collection in local storage - -export interface PublicURL { - url: string; - deviceLimit: number; - validTill: number; - enableDownload: boolean; - enableCollect: boolean; - passwordEnabled: boolean; - nonce?: string; - opsLimit?: number; - memLimit?: number; -} - -export interface UpdatePublicURL { - collectionID: number; - disablePassword?: boolean; - enableDownload?: boolean; - enableCollect?: boolean; - validTill?: number; - deviceLimit?: number; - passHash?: string; - nonce?: string; - opsLimit?: number; - memLimit?: number; -} - -export interface CreatePublicAccessTokenRequest { - collectionID: number; - validTill?: number; - deviceLimit?: number; -} - -export interface EncryptedFileKey { - id: number; - encryptedKey: string; - keyDecryptionNonce: string; -} - -export interface AddToCollectionRequest { - collectionID: number; - files: EncryptedFileKey[]; -} - -export interface MoveToCollectionRequest { - fromCollectionID: number; - toCollectionID: number; - files: EncryptedFileKey[]; -} - -export interface collectionAttributes { - encryptedPath?: string; - pathDecryptionNonce?: string; -} - -export type CollectionToFileMap = Map; - -export interface RemoveFromCollectionRequest { - collectionID: number; - fileIDs: number[]; -} - -export interface CollectionMagicMetadataProps { - visibility?: VISIBILITY_STATE; - subType?: SUB_TYPE; - order?: number; -} - -export type CollectionMagicMetadata = - MagicMetadataCore; - -export interface CollectionShareeMetadataProps { - visibility?: VISIBILITY_STATE; -} -export type CollectionShareeMagicMetadata = - MagicMetadataCore; - -export interface CollectionPublicMagicMetadataProps { - asc?: boolean; - coverID?: number; -} - -export type CollectionPublicMagicMetadata = - MagicMetadataCore; - -export interface CollectionSummary { - id: number; - name: string; - type: CollectionSummaryType; - coverFile: EnteFile; - latestFile: EnteFile; - fileCount: number; - updationTime: number; - order?: number; -} - -export type CollectionSummaries = Map; -export type CollectionFilesCount = Map; diff --git a/web/apps/cast/src/types/file/index.ts b/web/apps/cast/src/types/file/index.ts index 1813b5416d..c21f04a0ab 100644 --- a/web/apps/cast/src/types/file/index.ts +++ b/web/apps/cast/src/types/file/index.ts @@ -1,9 +1,9 @@ +import type { Metadata } from "@/media/types/file"; import { EncryptedMagicMetadata, MagicMetadataCore, VISIBILITY_STATE, } from "types/magicMetadata"; -import { Metadata } from "types/upload"; export interface MetadataFileAttributes { encryptedData: string; @@ -64,25 +64,6 @@ export interface EnteFile isConverted?: boolean; } -export interface TrashRequest { - items: TrashRequestItems[]; -} - -export interface TrashRequestItems { - fileID: number; - collectionID: number; -} - -export interface FileWithUpdatedMagicMetadata { - file: EnteFile; - updatedMagicMetadata: FileMagicMetadata; -} - -export interface FileWithUpdatedPublicMagicMetadata { - file: EnteFile; - updatedPublicMagicMetadata: FilePublicMagicMetadata; -} - export interface FileMagicMetadataProps { visibility?: VISIBILITY_STATE; filePaths?: string[]; diff --git a/web/apps/cast/src/types/upload/index.ts b/web/apps/cast/src/types/upload/index.ts deleted file mode 100644 index ef44b4a23f..0000000000 --- a/web/apps/cast/src/types/upload/index.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { - B64EncryptionResult, - LocalFileAttributes, -} from "@ente/shared/crypto/types"; -import { FILE_TYPE } from "constants/file"; -import { - FilePublicMagicMetadata, - FilePublicMagicMetadataProps, - MetadataFileAttributes, - S3FileAttributes, -} from "types/file"; -import { EncryptedMagicMetadata } from "types/magicMetadata"; - -export interface DataStream { - stream: ReadableStream; - chunkCount: number; -} - -export function isDataStream(object: any): object is DataStream { - return "stream" in object; -} - -export type Logger = (message: string) => void; - -export interface Metadata { - title: string; - creationTime: number; - modificationTime: number; - latitude: number; - longitude: number; - fileType: FILE_TYPE; - hasStaticThumbnail?: boolean; - hash?: string; - imageHash?: string; - videoHash?: string; - localID?: number; - version?: number; - deviceFolder?: string; -} - -export interface FileTypeInfo { - fileType: FILE_TYPE; - exactType: string; - mimeType?: string; - imageType?: string; - videoType?: string; -} - -export interface UploadURL { - url: string; - objectKey: string; -} - -export interface FileInMemory { - filedata: Uint8Array | DataStream; - thumbnail: Uint8Array; - hasStaticThumbnail: boolean; -} - -export interface FileWithMetadata - extends Omit { - metadata: Metadata; - localID: number; - pubMagicMetadata: FilePublicMagicMetadata; -} - -export interface EncryptedFile { - file: ProcessedFile; - fileKey: B64EncryptionResult; -} -export interface ProcessedFile { - file: LocalFileAttributes; - thumbnail: LocalFileAttributes; - metadata: LocalFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; - localID: number; -} -export interface BackupedFile { - file: S3FileAttributes; - thumbnail: S3FileAttributes; - metadata: MetadataFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; -} - -export interface UploadFile extends BackupedFile { - collectionID: number; - encryptedKey: string; - keyDecryptionNonce: string; -} - -export interface ParsedExtractedMetadata { - location: Location; - creationTime: number; - width: number; - height: number; -} - -// This is used to prompt the user the make upload strategy choice -export interface ImportSuggestion { - rootFolderName: string; - hasNestedFolders: boolean; - hasRootLevelFileWithFolder: boolean; -} - -export interface PublicUploadProps { - token: string; - passwordToken: string; - accessedThroughSharedURL: boolean; -} - -export interface ExtractMetadataResult { - metadata: Metadata; - publicMagicMetadata: FilePublicMagicMetadataProps; -} diff --git a/web/apps/cast/src/utils/cast-receiver.tsx b/web/apps/cast/src/utils/cast-receiver.tsx new file mode 100644 index 0000000000..666a085edc --- /dev/null +++ b/web/apps/cast/src/utils/cast-receiver.tsx @@ -0,0 +1,32 @@ +/// + +export type Cast = typeof cast; + +let _cast: Cast | undefined; +let _loader: Promise | undefined; + +/** + * Load the Chromecast Web Receiver SDK and return a reference to the `cast` + * global object that the SDK attaches to the window. + * + * Calling this function multiple times is fine, once the Chromecast SDK is + * loaded it'll thereafter return the reference to the same object always. + * + * https://developers.google.com/cast/docs/web_receiver/basic + */ +export const castReceiverLoadingIfNeeded = async (): Promise => { + if (_cast) return _cast; + if (_loader) return await _loader; + + _loader = new Promise((resolve) => { + const script = document.createElement("script"); + script.src = + "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; + + script.addEventListener("load", () => resolve(cast)); + document.body.appendChild(script); + }); + const c = await _loader; + _cast = c; + return c; +}; diff --git a/web/apps/cast/src/utils/file/index.ts b/web/apps/cast/src/utils/file/index.ts deleted file mode 100644 index 4f6311cbdf..0000000000 --- a/web/apps/cast/src/utils/file/index.ts +++ /dev/null @@ -1,151 +0,0 @@ -import log from "@/next/log"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { FILE_TYPE, RAW_FORMATS } from "constants/file"; -import CastDownloadManager from "services/castDownloadManager"; -import { decodeLivePhoto } from "services/livePhotoService"; -import { getFileType } from "services/typeDetectionService"; -import { - EncryptedEnteFile, - EnteFile, - FileMagicMetadata, - FilePublicMagicMetadata, -} from "types/file"; - -export function sortFiles(files: EnteFile[], sortAsc = false) { - // sort based on the time of creation time of the file, - // for files with same creation time, sort based on the time of last modification - const factor = sortAsc ? -1 : 1; - return files.sort((a, b) => { - if (a.metadata.creationTime === b.metadata.creationTime) { - return ( - factor * - (b.metadata.modificationTime - a.metadata.modificationTime) - ); - } - return factor * (b.metadata.creationTime - a.metadata.creationTime); - }); -} - -export async function decryptFile( - file: EncryptedEnteFile, - collectionKey: string, -): Promise { - try { - const worker = await ComlinkCryptoWorker.getInstance(); - const { - encryptedKey, - keyDecryptionNonce, - metadata, - magicMetadata, - pubMagicMetadata, - ...restFileProps - } = file; - const fileKey = await worker.decryptB64( - encryptedKey, - keyDecryptionNonce, - collectionKey, - ); - const fileMetadata = await worker.decryptMetadata( - metadata.encryptedData, - metadata.decryptionHeader, - fileKey, - ); - let fileMagicMetadata: FileMagicMetadata; - let filePubMagicMetadata: FilePublicMagicMetadata; - if (magicMetadata?.data) { - fileMagicMetadata = { - ...file.magicMetadata, - data: await worker.decryptMetadata( - magicMetadata.data, - magicMetadata.header, - fileKey, - ), - }; - } - if (pubMagicMetadata?.data) { - filePubMagicMetadata = { - ...pubMagicMetadata, - data: await worker.decryptMetadata( - pubMagicMetadata.data, - pubMagicMetadata.header, - fileKey, - ), - }; - } - return { - ...restFileProps, - key: fileKey, - metadata: fileMetadata, - magicMetadata: fileMagicMetadata, - pubMagicMetadata: filePubMagicMetadata, - }; - } catch (e) { - log.error("file decryption failed", e); - throw e; - } -} - -export function getFileNameWithoutExtension(filename: string) { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return filename; - else return filename.slice(0, lastDotPosition); -} - -export function getFileExtensionWithDot(filename: string) { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return ""; - else return filename.slice(lastDotPosition); -} - -export function generateStreamFromArrayBuffer(data: Uint8Array) { - return new ReadableStream({ - async start(controller: ReadableStreamDefaultController) { - controller.enqueue(data); - controller.close(); - }, - }); -} - -export function isRawFileFromFileName(fileName: string) { - for (const rawFormat of RAW_FORMATS) { - if (fileName.toLowerCase().endsWith(rawFormat)) { - return true; - } - } - return false; -} - -export function mergeMetadata(files: EnteFile[]): EnteFile[] { - return files.map((file) => { - if (file.pubMagicMetadata?.data.editedTime) { - file.metadata.creationTime = file.pubMagicMetadata.data.editedTime; - } - if (file.pubMagicMetadata?.data.editedName) { - file.metadata.title = file.pubMagicMetadata.data.editedName; - } - - return file; - }); -} - -export const getPreviewableImage = async ( - file: EnteFile, - castToken: string, -): Promise => { - try { - let fileBlob = await new Response( - await CastDownloadManager.downloadFile(castToken, file), - ).blob(); - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - const livePhoto = await decodeLivePhoto(file, fileBlob); - fileBlob = new Blob([livePhoto.image]); - } - const fileType = await getFileType( - new File([fileBlob], file.metadata.title), - ); - fileBlob = new Blob([fileBlob], { type: fileType.mimeType }); - return fileBlob; - } catch (e) { - log.error("failed to download file", e); - } -}; diff --git a/web/apps/cast/src/utils/useCastReceiver.tsx b/web/apps/cast/src/utils/useCastReceiver.tsx deleted file mode 100644 index 176b968824..0000000000 --- a/web/apps/cast/src/utils/useCastReceiver.tsx +++ /dev/null @@ -1,44 +0,0 @@ -declare const cast: any; - -import { useEffect, useState } from "react"; - -type Receiver = { - cast: typeof cast; -}; - -const load = (() => { - let promise: Promise | null = null; - - return () => { - if (promise === null) { - promise = new Promise((resolve) => { - const script = document.createElement("script"); - script.src = - "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; - - script.addEventListener("load", () => { - resolve({ - cast, - }); - }); - - document.body.appendChild(script); - }); - } - return promise; - }; -})(); - -export const useCastReceiver = () => { - const [receiver, setReceiver] = useState({ - cast: null, - }); - - useEffect(() => { - load().then((receiver) => { - setReceiver(receiver); - }); - }); - - return receiver; -}; diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json index 6ae109af16..1196b4ddf7 100644 --- a/web/apps/photos/package.json +++ b/web/apps/photos/package.json @@ -3,6 +3,7 @@ "version": "0.0.0", "private": true, "dependencies": { + "@/media": "*", "@/next": "*", "@date-io/date-fns": "^2.14.0", "@ente/accounts": "*", @@ -20,16 +21,15 @@ "exifr": "^7.1.3", "fast-srp-hap": "^2.0.4", "ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm", - "file-type": "^16.5.4", "formik": "^2.1.5", "hdbscan": "0.0.1-alpha.5", "heic-convert": "^2.0.0", "idb": "^7.1.1", - "jszip": "3.10.1", "leaflet": "^1.9.4", "leaflet-defaulticon-compatibility": "^0.1.1", "localforage": "^1.9.0", "memoize-one": "^6.0.0", + "mime-types": "^2.1.35", "ml-matrix": "^6.10.4", "otpauth": "^9.0.2", "p-debounce": "^4.0.0", diff --git a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx index fdabffe846..3d9d061663 100644 --- a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx +++ b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx @@ -161,9 +161,7 @@ export default function AlbumCastDialog(props: Props) { {browserCanCast && ( <> - {t( - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE", - )} + {t("AUTO_CAST_PAIR_DESC")} )} - {t("PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE")} + {t("PAIR_WITH_PIN_DESC")} { log.info( - `[${ - item.id - }] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${ - item.isSourceLoaded - } fetching:${fetching[item.id]}`, + `[${item.id}] getSlideData called for thumbnail: ${!!item.msrc} sourceLoaded: ${!!item.isSourceLoaded} fetching: ${!!fetching[item.id]}`, ); if (!item.msrc) { @@ -327,9 +323,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, url); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -381,7 +375,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, dummyImgSrcUrl); log.info( - `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -415,7 +409,7 @@ const PhotoFrame = ({ true, ); log.info( - `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -433,7 +427,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURLs); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -476,9 +470,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, item.msrc, true); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -495,7 +487,7 @@ const PhotoFrame = ({ } try { log.info( - `[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`, + `[${item.id}] new file getConvertedVideo request ${item.metadata.title}}`, ); fetching[item.id] = true; @@ -504,7 +496,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURL, true); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { diff --git a/web/apps/photos/src/components/PhotoList/dedupe.tsx b/web/apps/photos/src/components/PhotoList/dedupe.tsx index 9c86ba24f1..61b9958ef0 100644 --- a/web/apps/photos/src/components/PhotoList/dedupe.tsx +++ b/web/apps/photos/src/components/PhotoList/dedupe.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { Box, styled } from "@mui/material"; import { @@ -20,6 +19,7 @@ import { } from "react-window"; import { Duplicate } from "services/deduplicationService"; import { EnteFile } from "types/file"; +import { formattedByteSize } from "utils/units"; export enum ITEM_TYPE { TIME = "TIME", @@ -304,10 +304,13 @@ export function DedupePhotoList({ switch (listItem.itemType) { case ITEM_TYPE.SIZE_AND_COUNT: return ( + /*TODO: Translate the full phrase instead of piecing + together parts like this See: + https://crowdin.com/editor/ente-photos-web/9/enus-de?view=comfortable&filter=basic&value=0#8104 + */ {listItem.fileCount} {t("FILES")},{" "} - {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "} - {t("EACH")} + {formattedByteSize(listItem.fileSize || 0)} {t("EACH")} ); case ITEM_TYPE.FILE: { diff --git a/web/apps/photos/src/components/PhotoList/index.tsx b/web/apps/photos/src/components/PhotoList/index.tsx index 48454fa691..5ac6b263ed 100644 --- a/web/apps/photos/src/components/PhotoList/index.tsx +++ b/web/apps/photos/src/components/PhotoList/index.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { formatDate, getDate, isSameDay } from "@ente/shared/time/format"; import { Box, Checkbox, Link, Typography, styled } from "@mui/material"; @@ -25,6 +24,7 @@ import { import { EnteFile } from "types/file"; import { handleSelectCreator } from "utils/photoFrame"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; +import { formattedByteSize } from "utils/units"; const A_DAY = 24 * 60 * 60 * 1000; const FOOTER_HEIGHT = 90; @@ -111,14 +111,13 @@ function getShrinkRatio(width: number, columns: number) { ); } -const ListContainer = styled(Box)<{ - columns: number; - shrinkRatio: number; - groups?: number[]; +const ListContainer = styled(Box, { + shouldForwardProp: (propName) => propName != "gridTemplateColumns", +})<{ + gridTemplateColumns: string; }>` display: grid; - grid-template-columns: ${({ columns, shrinkRatio, groups }) => - getTemplateColumns(columns, shrinkRatio, groups)}; + grid-template-columns: ${(props) => props.gridTemplateColumns}; grid-column-gap: ${GAP_BTW_TILES}px; width: 100%; color: #fff; @@ -235,9 +234,11 @@ const PhotoListRow = React.memo( return ( {renderListItem(timeStampList[index], isScrolling)} @@ -828,8 +829,7 @@ export function PhotoList({ return ( {listItem.fileCount} {t("FILES")},{" "} - {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "} - {t("EACH")} + {formattedByteSize(listItem.fileSize || 0)} {t("EACH")} ); case ITEM_TYPE.FILE: { diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx index 871da2b05f..3a5dbb6bc2 100644 --- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx +++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx @@ -3,7 +3,6 @@ import { FlexWrapper } from "@ente/shared/components/Container"; import Close from "@mui/icons-material/Close"; import Done from "@mui/icons-material/Done"; import { Box, IconButton, TextField } from "@mui/material"; -import { MAX_CAPTION_SIZE } from "constants/file"; import { Formik } from "formik"; import { t } from "i18next"; import { useState } from "react"; @@ -12,6 +11,8 @@ import { changeCaption, updateExistingFilePubMetadata } from "utils/file"; import * as Yup from "yup"; import { SmallLoadingSpinner } from "../styledComponents/SmallLoadingSpinner"; +export const MAX_CAPTION_SIZE = 5000; + interface formValues { caption: string; } diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx index 74ae87380f..e9e27d55e8 100644 --- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx +++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx @@ -1,17 +1,14 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; import { FlexWrapper } from "@ente/shared/components/Container"; import PhotoOutlined from "@mui/icons-material/PhotoOutlined"; import VideocamOutlined from "@mui/icons-material/VideocamOutlined"; import Box from "@mui/material/Box"; -import { FILE_TYPE } from "constants/file"; import { useEffect, useState } from "react"; import { EnteFile } from "types/file"; -import { makeHumanReadableStorage } from "utils/billing"; -import { - changeFileName, - splitFilenameAndExtension, - updateExistingFilePubMetadata, -} from "utils/file"; +import { changeFileName, updateExistingFilePubMetadata } from "utils/file"; +import { formattedByteSize } from "utils/units"; import { FileNameEditDialog } from "./FileNameEditDialog"; import InfoItem from "./InfoItem"; @@ -36,7 +33,7 @@ const getCaption = (file: EnteFile, parsedExifData) => { captionParts.push(resolution); } if (fileSize) { - captionParts.push(makeHumanReadableStorage(fileSize)); + captionParts.push(formattedByteSize(fileSize)); } return ( @@ -65,9 +62,7 @@ export function RenderFileName({ const [extension, setExtension] = useState(); useEffect(() => { - const [filename, extension] = splitFilenameAndExtension( - file.metadata.title, - ); + const [filename, extension] = nameAndExtension(file.metadata.title); setFilename(filename); setExtension(extension); }, [file]); diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx index 34fdb8e34a..a6d37ccf49 100644 --- a/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx @@ -17,7 +17,7 @@ import { t } from "i18next"; import { AppContext } from "pages/_app"; import { GalleryContext } from "pages/gallery"; import { useContext, useEffect, useMemo, useState } from "react"; -import { getEXIFLocation } from "services/upload/exifService"; +import { getEXIFLocation } from "services/exif"; import { EnteFile } from "types/file"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; import { diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index 997ad3d273..42edddbf11 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -42,11 +42,10 @@ import { t } from "i18next"; import mime from "mime-types"; import { AppContext } from "pages/_app"; import { getLocalCollections } from "services/collectionService"; +import { detectFileTypeInfo } from "services/detect-type"; import downloadManager from "services/download"; -import { getFileType } from "services/typeDetectionService"; import uploadManager from "services/upload/uploadManager"; import { EnteFile } from "types/file"; -import { FileWithCollection } from "types/upload"; import { getEditorCloseConfirmationMessage } from "utils/ui"; import ColoursMenu from "./ColoursMenu"; import CropMenu, { cropRegionOfCanvas, getCropRegionArgs } from "./CropMenu"; @@ -486,7 +485,7 @@ const ImageEditorOverlay = (props: IProps) => { if (!canvasRef.current) return; const editedFile = await getEditedFile(); - const fileType = await getFileType(editedFile); + const fileType = await detectFileTypeInfo(editedFile); const tempImgURL = URL.createObjectURL( new Blob([editedFile], { type: fileType.mimeType }), ); @@ -507,15 +506,15 @@ const ImageEditorOverlay = (props: IProps) => { ); const editedFile = await getEditedFile(); - const file: FileWithCollection = { - file: editedFile, - collectionID: props.file.collectionID, + const file = { + uploadItem: editedFile, localID: 1, + collectionID: props.file.collectionID, }; uploadManager.prepareForNewUpload(); uploadManager.showUploadProgressDialog(); - uploadManager.queueFilesForUpload([file], [collection]); + uploadManager.uploadItems([file], [collection]); setFileURL(null); props.onClose(); props.closePhotoViewer(); diff --git a/web/apps/photos/src/components/PhotoViewer/index.tsx b/web/apps/photos/src/components/PhotoViewer/index.tsx index 29da75e534..c7383efb13 100644 --- a/web/apps/photos/src/components/PhotoViewer/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/index.tsx @@ -10,12 +10,13 @@ import { EnteFile } from "types/file"; import { copyFileToClipboard, downloadSingleFile, - getFileExtension, getFileFromURL, - isRawFile, isSupportedRawFormat, } from "utils/file"; +import { FILE_TYPE } from "@/media/file-type"; +import { isNonWebImageFileExtension } from "@/media/formats"; +import { lowercaseExtension } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; import AlbumOutlined from "@mui/icons-material/AlbumOutlined"; @@ -34,7 +35,6 @@ import InfoIcon from "@mui/icons-material/InfoOutlined"; import ReplayIcon from "@mui/icons-material/Replay"; import ZoomInOutlinedIcon from "@mui/icons-material/ZoomInOutlined"; import { Box, Button, styled } from "@mui/material"; -import { FILE_TYPE } from "constants/file"; import { defaultLivePhotoDefaultOptions, photoSwipeV4Events, @@ -43,10 +43,10 @@ import { t } from "i18next"; import isElectron from "is-electron"; import { AppContext } from "pages/_app"; import { GalleryContext } from "pages/gallery"; +import { detectFileTypeInfo } from "services/detect-type"; import downloadManager, { LoadedLivePhotoSourceURL } from "services/download"; +import { getParsedExifData } from "services/exif"; import { trashFiles } from "services/fileService"; -import { getFileType } from "services/typeDetectionService"; -import { getParsedExifData } from "services/upload/exifService"; import { SetFilesDownloadProgressAttributesCreator } from "types/gallery"; import { isClipboardItemPresent } from "utils/common"; import { pauseVideo, playVideo } from "utils/photoFrame"; @@ -348,9 +348,10 @@ function PhotoViewer(props: Iprops) { } function updateShowEditButton(file: EnteFile) { - const extension = getFileExtension(file.metadata.title); + const extension = lowercaseExtension(file.metadata.title); const isSupported = - !isRawFile(extension) || isSupportedRawFormat(extension); + !isNonWebImageFileExtension(extension) || + isSupportedRawFormat(extension); setShowEditButton( file.metadata.fileType === FILE_TYPE.IMAGE && isSupported, ); @@ -594,7 +595,7 @@ function PhotoViewer(props: Iprops) { .image; fileObject = await getFileFromURL(url, file.metadata.title); } - const fileTypeInfo = await getFileType(fileObject); + const fileTypeInfo = await detectFileTypeInfo(fileObject); const exifData = await getParsedExifData( fileObject, fileTypeInfo, @@ -611,9 +612,8 @@ function PhotoViewer(props: Iprops) { } } catch (e) { setExif({ key: file.src, value: null }); - const fileExtension = getFileExtension(file.metadata.title); log.error( - `checkExifAvailable failed for extension ${fileExtension}`, + `checkExifAvailable failed for file ${file.metadata.title}`, e, ); } diff --git a/web/apps/photos/src/components/PlaceholderThumbnails.tsx b/web/apps/photos/src/components/PlaceholderThumbnails.tsx index caafbdce6f..662e422877 100644 --- a/web/apps/photos/src/components/PlaceholderThumbnails.tsx +++ b/web/apps/photos/src/components/PlaceholderThumbnails.tsx @@ -1,8 +1,8 @@ +import { FILE_TYPE } from "@/media/file-type"; import { Overlay } from "@ente/shared/components/Container"; import PhotoOutlined from "@mui/icons-material/PhotoOutlined"; import PlayCircleOutlineOutlined from "@mui/icons-material/PlayCircleOutlineOutlined"; import { styled } from "@mui/material"; -import { FILE_TYPE } from "constants/file"; interface Iprops { fileType: FILE_TYPE; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx index d7cf151e66..3f737b3e0c 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx @@ -1,6 +1,6 @@ +import { FILE_TYPE } from "@/media/file-type"; import CloseIcon from "@mui/icons-material/Close"; import { IconButton } from "@mui/material"; -import { FILE_TYPE } from "constants/file"; import { t } from "i18next"; import memoize from "memoize-one"; import pDebounce from "p-debounce"; diff --git a/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx b/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx index 41d4a0971e..d33c7c9490 100644 --- a/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx @@ -23,7 +23,9 @@ export const SearchMobileBox = styled(FluidContainer)` } `; -export const SearchInputWrapper = styled(CenteredFlex)<{ isOpen: boolean }>` +export const SearchInputWrapper = styled(CenteredFlex, { + shouldForwardProp: (propName) => propName != "isOpen", +})<{ isOpen: boolean }>` background: ${({ theme }) => theme.colors.background.base}; max-width: 484px; margin: auto; diff --git a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx index 6972cc1613..6dc9b851e9 100644 --- a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx +++ b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx @@ -1,4 +1,3 @@ -import log from "@/next/log"; import ChevronRight from "@mui/icons-material/ChevronRight"; import ScienceIcon from "@mui/icons-material/Science"; import { Box, DialogProps, Stack, Typography } from "@mui/material"; @@ -37,13 +36,10 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) { } }; - const toggleCFProxy = async () => { - try { - appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled); - } catch (e) { - log.error("toggleFasterUpload failed", e); - } + const toggleCFProxy = () => { + appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled); }; + const [indexingStatus, setIndexingStatus] = useState({ indexed: 0, pending: 0, diff --git a/web/apps/photos/src/components/Sidebar/DebugSection.tsx b/web/apps/photos/src/components/Sidebar/DebugSection.tsx index 28c65ca8e0..e336374030 100644 --- a/web/apps/photos/src/components/Sidebar/DebugSection.tsx +++ b/web/apps/photos/src/components/Sidebar/DebugSection.tsx @@ -9,10 +9,6 @@ import { useContext, useEffect, useState } from "react"; import { Trans } from "react-i18next"; import { isInternalUser } from "utils/user"; import { testUpload } from "../../../tests/upload.test"; -import { - testZipFileReading, - testZipWithRootFileReadingTest, -} from "../../../tests/zip-file-reading.test"; export default function DebugSection() { const appContext = useContext(AppContext); @@ -62,25 +58,11 @@ export default function DebugSection() { )} {isInternalUser() && ( - <> - - - - - - + )} ); diff --git a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx index a9474a37d9..bdc0d5a84f 100644 --- a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx +++ b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx @@ -19,6 +19,8 @@ export const localeName = (locale: SupportedLocale) => { return "English"; case "fr-FR": return "Français"; + case "de-DE": + return "Deutsch"; case "zh-CN": return "中文"; case "nl-NL": diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx index 4b0ce31b04..8975941ad5 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx @@ -1,7 +1,7 @@ import { SpaceBetweenFlex } from "@ente/shared/components/Container"; import { Box, Typography } from "@mui/material"; import { t } from "i18next"; -import { makeHumanReadableStorage } from "utils/billing"; +import { formattedStorageByteSize } from "utils/units"; import { Progressbar } from "../../styledComponents"; @@ -19,7 +19,7 @@ export function IndividualUsageSection({ usage, storage, fileCount }: Iprops) { marginTop: 1.5, }} > - {`${makeHumanReadableStorage( + {`${formattedStorageByteSize( storage - usage, )} ${t("FREE")}`} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx index 6143044f0d..78a3677972 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx @@ -1,6 +1,6 @@ import { Box, styled, Typography } from "@mui/material"; import { t } from "i18next"; -import { convertBytesToGBs, makeHumanReadableStorage } from "utils/billing"; +import { bytesInGB, formattedStorageByteSize } from "utils/units"; const MobileSmallBox = styled(Box)` display: none; @@ -30,9 +30,9 @@ export default function StorageSection({ usage, storage }: Iprops) { fontWeight={"bold"} sx={{ fontSize: "24px", lineHeight: "30px" }} > - {`${makeHumanReadableStorage(usage, { roundUp: true })} ${t( + {`${formattedStorageByteSize(usage, { round: true })} ${t( "OF", - )} ${makeHumanReadableStorage(storage)} ${t("USED")}`} + )} ${formattedStorageByteSize(storage)} ${t("USED")}`} @@ -40,9 +40,7 @@ export default function StorageSection({ usage, storage }: Iprops) { fontWeight={"bold"} sx={{ fontSize: "24px", lineHeight: "30px" }} > - {`${convertBytesToGBs(usage)} / ${convertBytesToGBs( - storage, - )} ${t("GB")} ${t("USED")}`} + {`${bytesInGB(usage)} / ${bytesInGB(storage)} ${t("GB")} ${t("USED")}`} diff --git a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx index c9c734cd98..6b4a6f43d5 100644 --- a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx +++ b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx @@ -206,7 +206,12 @@ export default function UtilitySection({ closeSidebar }) { closeSidebar={closeSidebar} setLoading={startLoading} /> - + {isElectron() && ( + + )} void; +interface CollectionMappingChoiceModalProps { open: boolean; onClose: () => void; - uploadToSingleCollection: () => void; + didSelect: (mapping: CollectionMapping) => void; } -function UploadStrategyChoiceModal({ - uploadToMultipleCollection, - uploadToSingleCollection, - ...props -}: Props) { - const handleClose = dialogCloseHandler({ - onClose: props.onClose, - }); + +export const CollectionMappingChoiceModal: React.FC< + CollectionMappingChoiceModalProps +> = ({ open, onClose, didSelect }) => { + const handleClose = dialogCloseHandler({ onClose }); return ( - + {t("MULTI_FOLDER_UPLOAD")} @@ -39,8 +36,8 @@ function UploadStrategyChoiceModal({ size="medium" color="accent" onClick={() => { - props.onClose(); - uploadToSingleCollection(); + onClose(); + didSelect("root"); }} > {t("UPLOAD_STRATEGY_SINGLE_COLLECTION")} @@ -52,8 +49,8 @@ function UploadStrategyChoiceModal({ size="medium" color="accent" onClick={() => { - props.onClose(); - uploadToMultipleCollection(); + onClose(); + didSelect("parent"); }} > {t("UPLOAD_STRATEGY_COLLECTION_PER_FOLDER")} @@ -62,5 +59,4 @@ function UploadStrategyChoiceModal({ ); -} -export default UploadStrategyChoiceModal; +}; diff --git a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx index 8f16ef2d98..1acffd561e 100644 --- a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx +++ b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx @@ -1,18 +1,16 @@ -import { useContext, useEffect, useState } from "react"; -import { UploadProgressDialog } from "./dialog"; -import { MinimizedUploadProgress } from "./minimized"; - -import { t } from "i18next"; - import { UPLOAD_STAGES } from "constants/upload"; import UploadProgressContext from "contexts/uploadProgress"; +import { t } from "i18next"; import { AppContext } from "pages/_app"; -import { +import { useContext, useEffect, useState } from "react"; +import type { InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, -} from "types/upload/ui"; +} from "services/upload/uploadManager"; +import { UploadProgressDialog } from "./dialog"; +import { MinimizedUploadProgress } from "./minimized"; interface Props { open: boolean; diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index bb3d4fd9d0..7174306556 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,15 +1,11 @@ +import { basename } from "@/next/file"; import log from "@/next/log"; -import type { Electron } from "@/next/types/ipc"; +import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc"; import { CustomError } from "@ente/shared/error"; import { isPromise } from "@ente/shared/utils"; import DiscFullIcon from "@mui/icons-material/DiscFull"; import UserNameInputDialog from "components/UserNameInputDialog"; -import { - DEFAULT_IMPORT_SUGGESTION, - PICKED_UPLOAD_TYPE, - UPLOAD_STAGES, - UPLOAD_STRATEGY, -} from "constants/upload"; +import { UPLOAD_STAGES } from "constants/upload"; import { t } from "i18next"; import isElectron from "is-electron"; import { AppContext } from "pages/_app"; @@ -17,14 +13,22 @@ import { GalleryContext } from "pages/gallery"; import { useContext, useEffect, useRef, useState } from "react"; import billingService from "services/billingService"; import { getLatestCollections } from "services/collectionService"; -import ImportService from "services/importService"; +import { exportMetadataDirectoryName } from "services/export"; import { getPublicCollectionUID, getPublicCollectionUploaderName, savePublicCollectionUploaderName, } from "services/publicCollectionService"; +import type { FileAndPath, UploadItem } from "services/upload/types"; +import type { + InProgressUpload, + SegregatedFinishedUploads, + UploadCounter, + UploadFileNames, + UploadItemWithCollection, +} from "services/upload/uploadManager"; import uploadManager from "services/upload/uploadManager"; -import watchFolderService from "services/watch"; +import watcher from "services/watch"; import { NotificationAttributes } from "types/Notification"; import { Collection } from "types/collection"; import { @@ -35,34 +39,22 @@ import { SetLoading, UploadTypeSelectorIntent, } from "types/gallery"; -import { - ElectronFile, - FileWithCollection, - ImportSuggestion, -} from "types/upload"; -import { - InProgressUpload, - SegregatedFinishedUploads, - UploadCounter, - UploadFileNames, -} from "types/upload/ui"; import { getOrCreateAlbum } from "utils/collection"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; import { getDownloadAppMessage, getRootLevelFileWithFolderNotAllowMessage, } from "utils/ui"; -import { - filterOutSystemFiles, - getImportSuggestion, - groupFilesBasedOnParentFolder, -} from "utils/upload"; import { SetCollectionNamerAttributes } from "../Collections/CollectionNamer"; +import { CollectionMappingChoiceModal } from "./CollectionMappingChoiceModal"; import UploadProgress from "./UploadProgress"; -import UploadStrategyChoiceModal from "./UploadStrategyChoiceModal"; import UploadTypeSelector from "./UploadTypeSelector"; -const FIRST_ALBUM_NAME = "My First Album"; +enum PICKED_UPLOAD_TYPE { + FILES = "files", + FOLDERS = "folders", + ZIPS = "zips", +} interface Props { syncWithRemote: (force?: boolean, silent?: boolean) => Promise; @@ -78,17 +70,29 @@ interface Props { isFirstUpload?: boolean; uploadTypeSelectorView: boolean; showSessionExpiredMessage: () => void; - showUploadFilesDialog: () => void; - showUploadDirsDialog: () => void; - webFolderSelectorFiles: File[]; - webFileSelectorFiles: File[]; dragAndDropFiles: File[]; + openFileSelector: () => void; + fileSelectorFiles: File[]; + openFolderSelector: () => void; + folderSelectorFiles: File[]; + openZipFileSelector?: () => void; + fileSelectorZipFiles?: File[]; uploadCollection?: Collection; uploadTypeSelectorIntent: UploadTypeSelectorIntent; activeCollection?: Collection; } -export default function Uploader(props: Props) { +export default function Uploader({ + isFirstUpload, + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + ...props +}: Props) { const appContext = useContext(AppContext); const galleryContext = useContext(GalleryContext); const publicCollectionGalleryContext = useContext( @@ -118,15 +122,76 @@ export default function Uploader(props: Props) { const [importSuggestion, setImportSuggestion] = useState( DEFAULT_IMPORT_SUGGESTION, ); - const [electronFiles, setElectronFiles] = useState(null); - const [webFiles, setWebFiles] = useState([]); - const toUploadFiles = useRef(null); + /** + * {@link File}s that the user drag-dropped or selected for uploads (web). + * + * This is the only type of selection that is possible when we're running in + * the browser. + */ + const [webFiles, setWebFiles] = useState([]); + /** + * {@link File}s that the user drag-dropped or selected for uploads, + * augmented with their paths (desktop). + * + * These siblings of {@link webFiles} come into play when we are running in + * the context of our desktop app. + */ + const [desktopFiles, setDesktopFiles] = useState([]); + /** + * Paths of file to upload that we've received over the IPC bridge from the + * code running in the Node.js layer of our desktop app. + * + * Unlike {@link filesWithPaths} which are still user initiated, + * {@link desktopFilePaths} can be set via programmatic action. For example, + * if the user has setup a folder watch, and a new file is added on their + * local file system in one of the watched folders, then the relevant path + * of the new file would get added to {@link desktopFilePaths}. + */ + const [desktopFilePaths, setDesktopFilePaths] = useState([]); + /** + * (zip file path, entry within zip file) tuples for zip files that the user + * is trying to upload. + * + * These are only set when we are running in the context of our desktop app. + * They may be set either on a user action (when the user selects or + * drag-drops zip files) or programmatically (when the app is trying to + * resume pending uploads from a previous session). + */ + const [desktopZipItems, setDesktopZipItems] = useState([]); + + /** + * Consolidated and cleaned list obtained from {@link webFiles}, + * {@link desktopFiles}, {@link desktopFilePaths} and + * {@link desktopZipItems}. + * + * Augment each {@link UploadItem} with its "path" (relative path or name in + * the case of {@link webFiles}, absolute path in the case of + * {@link desktopFiles}, {@link desktopFilePaths}, and the path within the + * zip file for {@link desktopZipItems}). + * + * See the documentation of {@link UploadItem} for more details. + */ + const uploadItemsAndPaths = useRef<[UploadItem, string][]>([]); + + /** + * If true, then the next upload we'll be processing was initiated by our + * desktop app. + */ const isPendingDesktopUpload = useRef(false); + + /** + * If set, this will be the name of the collection that our desktop app + * wishes for us to upload into. + */ const pendingDesktopUploadCollectionName = useRef(""); - // This is set when the user choses a type to upload from the upload type selector dialog + + /** + * This is set to thue user's choice when the user chooses one of the + * predefined type to upload from the upload type selector dialog + */ const pickedUploadType = useRef(null); - const zipPaths = useRef(null); + const currentUploadPromise = useRef>(null); const uploadRunning = useRef(false); const uploaderNameRef = useRef(null); @@ -137,18 +202,13 @@ export default function Uploader(props: Props) { const closeUploadProgress = () => setUploadProgressView(false); const showUserNameInputDialog = () => setUserNameInputDialogView(true); - const setCollectionName = (collectionName: string) => { - isPendingDesktopUpload.current = true; - pendingDesktopUploadCollectionName.current = collectionName; - }; - const handleChoiceModalClose = () => { setChoiceModalView(false); uploadRunning.current = false; }; + const handleCollectionSelectorCancel = () => { uploadRunning.current = false; - appContext.resetSharedFiles(); }; const handleUserNameInputDialogClose = () => { @@ -172,23 +232,42 @@ export default function Uploader(props: Props) { publicCollectionGalleryContext, appContext.isCFProxyDisabled, ); + if (uploadManager.isUploadRunning()) { setUploadProgressView(true); } - if (isElectron()) { - ImportService.getPendingUploads().then( - ({ files: electronFiles, collectionName, type }) => { - log.info(`found pending desktop upload, resuming uploads`); - resumeDesktopUpload(type, electronFiles, collectionName); - }, - ); - watchFolderService.init( - setElectronFiles, - setCollectionName, - props.syncWithRemote, - appContext.setIsFolderSyncRunning, - ); + if (electron) { + const upload = (collectionName: string, filePaths: string[]) => { + isPendingDesktopUpload.current = true; + pendingDesktopUploadCollectionName.current = collectionName; + setDesktopFilePaths(filePaths); + }; + + const requestSyncWithRemote = () => { + props.syncWithRemote().catch((e) => { + log.error( + "Ignoring error when syncing trash changes with remote", + e, + ); + }); + }; + + watcher.init(upload, requestSyncWithRemote); + + electron.pendingUploads().then((pending) => { + if (!pending) return; + + const { collectionName, filePaths, zipItems } = pending; + + log.info( + `Resuming pending of upload of ${filePaths.length + zipItems.length} items${collectionName ? " to collection " + collectionName : ""}`, + ); + isPendingDesktopUpload.current = true; + pendingDesktopUploadCollectionName.current = collectionName; + setDesktopFilePaths(filePaths); + setDesktopZipItems(zipItems); + }); } }, [ publicCollectionGalleryContext.accessedThroughSharedURL, @@ -197,166 +276,191 @@ export default function Uploader(props: Props) { appContext.isCFProxyDisabled, ]); - // this handles the change of selectorFiles changes on web when user selects - // files for upload through the opened file/folder selector or dragAndDrop them - // the webFiles state is update which triggers the upload of those files + // Handle selected files when user selects files for upload through the open + // file / open folder selection dialog, or drag-and-drops them. useEffect(() => { if (appContext.watchFolderView) { // if watch folder dialog is open don't catch the dropped file // as they are folder being dropped for watching return; } - if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS && - props.webFolderSelectorFiles?.length > 0 - ) { - log.info(`received folder upload request`); - setWebFiles(props.webFolderSelectorFiles); - } else if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES && - props.webFileSelectorFiles?.length > 0 - ) { - log.info(`received file upload request`); - setWebFiles(props.webFileSelectorFiles); - } else if (props.dragAndDropFiles?.length > 0) { - isDragAndDrop.current = true; - if (electron) { - const main = async () => { - try { - log.info(`uploading dropped files from desktop app`); - // check and parse dropped files which are zip files - let electronFiles = [] as ElectronFile[]; - for (const file of props.dragAndDropFiles) { - if (file.name.endsWith(".zip")) { - const zipFiles = - await electron.getElectronFilesFromGoogleZip( - (file as any).path, - ); - log.info( - `zip file - ${file.name} contains ${zipFiles.length} files`, - ); - electronFiles = [...electronFiles, ...zipFiles]; - } else { - // type cast to ElectronFile as the file is dropped from desktop app - // type file and ElectronFile should be interchangeable, but currently they have some differences. - // Typescript is giving error - // Conversion of type 'File' to type 'ElectronFile' may be a mistake because neither type sufficiently - // overlaps with the other. If this was intentional, convert the expression to 'unknown' first. - // Type 'File' is missing the following properties from type 'ElectronFile': path, blob - // for now patching by type casting first to unknown and then to ElectronFile - // TODO: fix types and remove type cast - electronFiles.push( - file as unknown as ElectronFile, - ); - } - } - log.info( - `uploading dropped files from desktop app - ${electronFiles.length} files found`, - ); - setElectronFiles(electronFiles); - } catch (e) { - log.error("failed to upload desktop dropped files", e); - setWebFiles(props.dragAndDropFiles); - } - }; - main(); - } else { - log.info(`uploading dropped files from web app`); - setWebFiles(props.dragAndDropFiles); - } + + let files: File[]; + + switch (pickedUploadType.current) { + case PICKED_UPLOAD_TYPE.FILES: + files = fileSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.FOLDERS: + files = folderSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.ZIPS: + files = fileSelectorZipFiles; + break; + + default: + files = dragAndDropFiles; + break; + } + + if (electron) { + desktopFilesAndZipItems(electron, files).then( + ({ fileAndPaths, zipItems }) => { + setDesktopFiles(fileAndPaths); + setDesktopZipItems(zipItems); + }, + ); + } else { + setWebFiles(files); } }, [ - props.dragAndDropFiles, - props.webFileSelectorFiles, - props.webFolderSelectorFiles, + dragAndDropFiles, + fileSelectorFiles, + folderSelectorFiles, + fileSelectorZipFiles, ]); + // Trigger an upload when any of the dependencies change. useEffect(() => { - if ( - electronFiles?.length > 0 || - webFiles?.length > 0 || - appContext.sharedFiles?.length > 0 - ) { - log.info( - `upload request type:${ - electronFiles?.length > 0 - ? "electronFiles" - : webFiles?.length > 0 - ? "webFiles" - : "sharedFiles" - } count ${ - electronFiles?.length ?? - webFiles?.length ?? - appContext?.sharedFiles.length - }`, - ); - if (uploadManager.isUploadRunning()) { - if (watchFolderService.isUploadRunning()) { - log.info( - "watchFolder upload was running, pausing it to run user upload", - ); - // pause watch folder service on user upload - watchFolderService.pauseRunningSync(); - } else { - log.info( - "an upload is already running, rejecting new upload request", - ); - // no-op - // a user upload is already in progress - return; - } - } - uploadRunning.current = true; - props.closeUploadTypeSelector(); - props.setLoading(true); - if (webFiles?.length > 0) { - // File selection by drag and drop or selection of file. - toUploadFiles.current = webFiles; - setWebFiles([]); - } else if (appContext.sharedFiles?.length > 0) { - toUploadFiles.current = appContext.sharedFiles; - appContext.resetSharedFiles(); - } else if (electronFiles?.length > 0) { - // File selection from desktop app - toUploadFiles.current = electronFiles; - setElectronFiles([]); - } + // Re the paths: + // + // - These are not necessarily the full paths. In particular, when + // running on the browser they'll be the relative paths (at best) or + // just the file-name otherwise. + // + // - All the paths use POSIX separators. See inline comments. + const allItemAndPaths = [ + // See: [Note: webkitRelativePath]. In particular, they use POSIX + // separators. + webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]), + // The paths we get from the desktop app all eventually come either + // from electron.selectDirectory or electron.pathForFile, both of + // which return POSIX paths. + desktopFiles.map((fp) => [fp, fp.path]), + desktopFilePaths.map((p) => [p, p]), + // The first path, that of the zip file itself, is POSIX like the + // other paths we get over the IPC boundary. And the second path, + // ze[1], the entry name, uses POSIX separators because that is what + // the ZIP format uses. + desktopZipItems.map((ze) => [ze, ze[1]]), + ].flat() as [UploadItem, string][]; - toUploadFiles.current = filterOutSystemFiles(toUploadFiles.current); - if (toUploadFiles.current.length === 0) { - props.setLoading(false); + if (allItemAndPaths.length == 0) return; + + if (uploadManager.isUploadRunning()) { + if (watcher.isUploadRunning()) { + log.info("Pausing watch folder sync to prioritize user upload"); + watcher.pauseRunningSync(); + } else { + log.info( + "Ignoring new upload request when upload is already running", + ); + return; + } + } + + uploadRunning.current = true; + props.closeUploadTypeSelector(); + props.setLoading(true); + + setWebFiles([]); + setDesktopFiles([]); + setDesktopFilePaths([]); + setDesktopZipItems([]); + + // Remove hidden files (files whose names begins with a "."). + const prunedItemAndPaths = allItemAndPaths.filter( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ([_, p]) => !basename(p).startsWith("."), + ); + + uploadItemsAndPaths.current = prunedItemAndPaths; + if (uploadItemsAndPaths.current.length === 0) { + props.setLoading(false); + return; + } + + const importSuggestion = getImportSuggestion( + pickedUploadType.current, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + prunedItemAndPaths.map(([_, p]) => p), + ); + setImportSuggestion(importSuggestion); + + log.debug(() => "Uploader invoked:"); + log.debug(() => uploadItemsAndPaths.current); + log.debug(() => importSuggestion); + + const _pickedUploadType = pickedUploadType.current; + pickedUploadType.current = null; + props.setLoading(false); + + (async () => { + if (publicCollectionGalleryContext.accessedThroughSharedURL) { + const uploaderName = await getPublicCollectionUploaderName( + getPublicCollectionUID( + publicCollectionGalleryContext.token, + ), + ); + uploaderNameRef.current = uploaderName; + showUserNameInputDialog(); return; } - const importSuggestion = getImportSuggestion( - pickedUploadType.current, - toUploadFiles.current, - ); - setImportSuggestion(importSuggestion); + if (isPendingDesktopUpload.current) { + isPendingDesktopUpload.current = false; + if (pendingDesktopUploadCollectionName.current) { + uploadFilesToNewCollections( + "root", + pendingDesktopUploadCollectionName.current, + ); + pendingDesktopUploadCollectionName.current = null; + } else { + uploadFilesToNewCollections("parent"); + } + return; + } - handleCollectionCreationAndUpload( - importSuggestion, - props.isFirstUpload, - pickedUploadType.current, - publicCollectionGalleryContext.accessedThroughSharedURL, - ); - pickedUploadType.current = null; - props.setLoading(false); - } - }, [webFiles, appContext.sharedFiles, electronFiles]); + if (electron && _pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { + uploadFilesToNewCollections("parent"); + return; + } - const resumeDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electronFiles: ElectronFile[], - collectionName: string, - ) => { - if (electronFiles && electronFiles?.length > 0) { - isPendingDesktopUpload.current = true; - pendingDesktopUploadCollectionName.current = collectionName; - pickedUploadType.current = type; - setElectronFiles(electronFiles); - } - }; + if (isFirstUpload && !importSuggestion.rootFolderName) { + importSuggestion.rootFolderName = t( + "autogenerated_first_album_name", + ); + } + + if (isDragAndDrop.current) { + isDragAndDrop.current = false; + if ( + props.activeCollection && + props.activeCollection.owner.id === galleryContext.user?.id + ) { + uploadFilesToExistingCollection(props.activeCollection); + return; + } + } + + let showNextModal = () => {}; + if (importSuggestion.hasNestedFolders) { + showNextModal = () => setChoiceModalView(true); + } else { + showNextModal = () => + showCollectionCreateModal(importSuggestion.rootFolderName); + } + + props.setCollectionSelectorAttributes({ + callback: uploadFilesToExistingCollection, + onCancel: handleCollectionSelectorCancel, + showNextModal, + intent: CollectionSelectorIntent.upload, + }); + })(); + }, [webFiles, desktopFiles, desktopFilePaths, desktopZipItems]); const preCollectionCreationAction = async () => { props.closeCollectionSelector?.(); @@ -369,103 +473,78 @@ export default function Uploader(props: Props) { collection: Collection, uploaderName?: string, ) => { - try { - log.info( - `upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`, - ); - await preCollectionCreationAction(); - const filesWithCollectionToUpload: FileWithCollection[] = - toUploadFiles.current.map((file, index) => ({ - file, - localID: index, - collectionID: collection.id, - })); - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - [collection], - uploaderName, - ); - } catch (e) { - log.error("Failed to upload files to existing collections", e); - } + await preCollectionCreationAction(); + const uploadItemsWithCollection = uploadItemsAndPaths.current.map( + ([uploadItem], index) => ({ + uploadItem, + localID: index, + collectionID: collection.id, + }), + ); + await waitInQueueAndUploadFiles( + uploadItemsWithCollection, + [collection], + uploaderName, + ); + uploadItemsAndPaths.current = null; }; const uploadFilesToNewCollections = async ( - strategy: UPLOAD_STRATEGY, + mapping: CollectionMapping, collectionName?: string, ) => { - try { - log.info( - `upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`, + await preCollectionCreationAction(); + let uploadItemsWithCollection: UploadItemWithCollection[] = []; + const collections: Collection[] = []; + let collectionNameToUploadItems = new Map(); + if (mapping == "root") { + collectionNameToUploadItems.set( + collectionName, + uploadItemsAndPaths.current.map(([i]) => i), ); - await preCollectionCreationAction(); - let filesWithCollectionToUpload: FileWithCollection[] = []; - const collections: Collection[] = []; - let collectionNameToFilesMap = new Map< - string, - (File | ElectronFile)[] - >(); - if (strategy === UPLOAD_STRATEGY.SINGLE_COLLECTION) { - collectionNameToFilesMap.set( - collectionName, - toUploadFiles.current, - ); - } else { - collectionNameToFilesMap = groupFilesBasedOnParentFolder( - toUploadFiles.current, - ); - } - log.info( - `upload collections - [${[...collectionNameToFilesMap.keys()]}]`, + } else { + collectionNameToUploadItems = groupFilesBasedOnParentFolder( + uploadItemsAndPaths.current, ); - try { - const existingCollection = await getLatestCollections(); - let index = 0; - for (const [ - collectionName, - files, - ] of collectionNameToFilesMap) { - const collection = await getOrCreateAlbum( - collectionName, - existingCollection, - ); - collections.push(collection); - props.setCollections([ - ...existingCollection, - ...collections, - ]); - filesWithCollectionToUpload = [ - ...filesWithCollectionToUpload, - ...files.map((file) => ({ - localID: index++, - collectionID: collection.id, - file, - })), - ]; - } - } catch (e) { - closeUploadProgress(); - log.error("Failed to create album", e); - appContext.setDialogMessage({ - title: t("ERROR"), - - close: { variant: "critical" }, - content: t("CREATE_ALBUM_FAILED"), - }); - throw e; - } - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - collections, - ); - toUploadFiles.current = null; - } catch (e) { - log.error("Failed to upload files to new collections", e); } + try { + const existingCollections = await getLatestCollections(); + let index = 0; + for (const [ + collectionName, + uploadItems, + ] of collectionNameToUploadItems) { + const collection = await getOrCreateAlbum( + collectionName, + existingCollections, + ); + collections.push(collection); + props.setCollections([...existingCollections, ...collections]); + uploadItemsWithCollection = [ + ...uploadItemsWithCollection, + ...uploadItems.map((uploadItem) => ({ + localID: index++, + collectionID: collection.id, + uploadItem, + })), + ]; + } + } catch (e) { + closeUploadProgress(); + log.error("Failed to create album", e); + appContext.setDialogMessage({ + title: t("ERROR"), + close: { variant: "critical" }, + content: t("CREATE_ALBUM_FAILED"), + }); + throw e; + } + await waitInQueueAndUploadFiles(uploadItemsWithCollection, collections); + uploadItemsAndPaths.current = null; }; const waitInQueueAndUploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { @@ -474,7 +553,7 @@ export default function Uploader(props: Props) { currentPromise, async () => await uploadFiles( - filesWithCollectionToUploadIn, + uploadItemsWithCollection, collections, uploaderName, ), @@ -495,55 +574,45 @@ export default function Uploader(props: Props) { } const uploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { try { - log.info("uploadFiles called"); preUploadAction(); if ( electron && !isPendingDesktopUpload.current && - !watchFolderService.isUploadRunning() + !watcher.isUploadRunning() ) { - await ImportService.setToUploadCollection(collections); - if (zipPaths.current) { - await electron.setToUploadFiles( - PICKED_UPLOAD_TYPE.ZIPS, - zipPaths.current, - ); - zipPaths.current = null; - } - await electron.setToUploadFiles( - PICKED_UPLOAD_TYPE.FILES, - filesWithCollectionToUploadIn.map( - ({ file }) => (file as ElectronFile).path, - ), - ); - } - const shouldCloseUploadProgress = - await uploadManager.queueFilesForUpload( - filesWithCollectionToUploadIn, + setPendingUploads( + electron, collections, - uploaderName, + uploadItemsWithCollection + .map(({ uploadItem }) => uploadItem) + .filter((x) => x), ); - if (shouldCloseUploadProgress) { - closeUploadProgress(); } + const wereFilesProcessed = await uploadManager.uploadItems( + uploadItemsWithCollection, + collections, + uploaderName, + ); + if (!wereFilesProcessed) closeUploadProgress(); if (isElectron()) { - if (watchFolderService.isUploadRunning()) { - await watchFolderService.allFileUploadsDone( - filesWithCollectionToUploadIn, + if (watcher.isUploadRunning()) { + await watcher.allFileUploadsDone( + uploadItemsWithCollection, collections, ); - } else if (watchFolderService.isSyncPaused()) { - // resume the service after user upload is done - watchFolderService.resumePausedSync(); + } else if (watcher.isSyncPaused()) { + // Resume folder watch after the user upload that + // interrupted it is done. + watcher.resumePausedSync(); } } } catch (e) { - log.error("failed to upload files", e); + log.error("Failed to upload files", e); showUserFacingError(e.message); closeUploadProgress(); } finally { @@ -553,18 +622,14 @@ export default function Uploader(props: Props) { const retryFailed = async () => { try { - log.info("user retrying failed upload"); - const filesWithCollections = - uploadManager.getFailedFilesWithCollections(); + log.info("Retrying failed uploads"); + const { items, collections } = + uploadManager.getFailedItemsWithCollections(); const uploaderName = uploadManager.getUploaderName(); await preUploadAction(); - await uploadManager.queueFilesForUpload( - filesWithCollections.files, - filesWithCollections.collections, - uploaderName, - ); + await uploadManager.uploadItems(items, collections, uploaderName); } catch (e) { - log.error("retry failed files failed", e); + log.error("Retrying failed uploads failed", e); showUserFacingError(e.message); closeUploadProgress(); } finally { @@ -605,10 +670,7 @@ export default function Uploader(props: Props) { } const uploadToSingleNewCollection = (collectionName: string) => { - uploadFilesToNewCollections( - UPLOAD_STRATEGY.SINGLE_COLLECTION, - collectionName, - ); + uploadFilesToNewCollections("root", collectionName); }; const showCollectionCreateModal = (suggestedName: string) => { @@ -620,136 +682,28 @@ export default function Uploader(props: Props) { }); }; - const handleCollectionCreationAndUpload = async ( - importSuggestion: ImportSuggestion, - isFirstUpload: boolean, - pickedUploadType: PICKED_UPLOAD_TYPE, - accessedThroughSharedURL?: boolean, - ) => { - try { - if (accessedThroughSharedURL) { - log.info( - `uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`, - ); - const uploaderName = await getPublicCollectionUploaderName( - getPublicCollectionUID( - publicCollectionGalleryContext.token, - ), - ); - uploaderNameRef.current = uploaderName; - showUserNameInputDialog(); - return; - } - if (isPendingDesktopUpload.current) { - isPendingDesktopUpload.current = false; - if (pendingDesktopUploadCollectionName.current) { - log.info( - `upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`, - ); - uploadFilesToNewCollections( - UPLOAD_STRATEGY.SINGLE_COLLECTION, - pendingDesktopUploadCollectionName.current, - ); - pendingDesktopUploadCollectionName.current = null; - } else { - log.info( - `pending upload - strategy - "multiple collections" `, - ); - uploadFilesToNewCollections( - UPLOAD_STRATEGY.COLLECTION_PER_FOLDER, - ); - } - return; - } - if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { - log.info("uploading zip files"); - uploadFilesToNewCollections( - UPLOAD_STRATEGY.COLLECTION_PER_FOLDER, - ); - return; - } - if (isFirstUpload && !importSuggestion.rootFolderName) { - importSuggestion.rootFolderName = FIRST_ALBUM_NAME; - } - if (isDragAndDrop.current) { - isDragAndDrop.current = false; - if ( - props.activeCollection && - props.activeCollection.owner.id === galleryContext.user?.id - ) { - uploadFilesToExistingCollection(props.activeCollection); - return; - } - } - let showNextModal = () => {}; - if (importSuggestion.hasNestedFolders) { - log.info(`nested folders detected`); - showNextModal = () => setChoiceModalView(true); - } else { - showNextModal = () => - showCollectionCreateModal(importSuggestion.rootFolderName); - } - props.setCollectionSelectorAttributes({ - callback: uploadFilesToExistingCollection, - onCancel: handleCollectionSelectorCancel, - showNextModal, - intent: CollectionSelectorIntent.upload, - }); - } catch (e) { - log.error("handleCollectionCreationAndUpload failed", e); - } - }; - - const handleDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electron: Electron, - ) => { - let files: ElectronFile[]; - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - files = await electron.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - files = await electron.showUploadDirsDialog(); - } else { - const response = await electron.showUploadZipDialog(); - files = response.files; - zipPaths.current = response.zipPaths; - } - if (files?.length > 0) { - log.info( - ` desktop upload for type:${type} and fileCount: ${files?.length} requested`, - ); - setElectronFiles(files); - props.closeUploadTypeSelector(); - } - }; - - const handleWebUpload = async (type: PICKED_UPLOAD_TYPE) => { - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - props.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - props.showUploadDirsDialog(); - } else { - appContext.setDialogMessage(getDownloadAppMessage()); - } - }; - const cancelUploads = () => { uploadManager.cancelRunningUpload(); }; - const handleUpload = (type) => () => { - if (electron) { - handleDesktopUpload(type, electron); + const handleUpload = (type: PICKED_UPLOAD_TYPE) => { + pickedUploadType.current = type; + if (type === PICKED_UPLOAD_TYPE.FILES) { + openFileSelector(); + } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { + openFolderSelector(); } else { - handleWebUpload(type); + if (openZipFileSelector && electron) { + openZipFileSelector(); + } else { + appContext.setDialogMessage(getDownloadAppMessage()); + } } }; - const handleFileUpload = handleUpload(PICKED_UPLOAD_TYPE.FILES); - const handleFolderUpload = handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); - const handleZipUpload = handleUpload(PICKED_UPLOAD_TYPE.ZIPS); + const handleFileUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FILES); + const handleFolderUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); + const handleZipUpload = () => handleUpload(PICKED_UPLOAD_TYPE.ZIPS); const handlePublicUpload = async ( uploaderName: string, @@ -773,27 +727,42 @@ export default function Uploader(props: Props) { } }; - const handleUploadToSingleCollection = () => { - uploadToSingleNewCollection(importSuggestion.rootFolderName); - }; - - const handleUploadToMultipleCollections = () => { - if (importSuggestion.hasRootLevelFileWithFolder) { - appContext.setDialogMessage( - getRootLevelFileWithFolderNotAllowMessage(), - ); - return; + const didSelectCollectionMapping = (mapping: CollectionMapping) => { + switch (mapping) { + case "root": + uploadToSingleNewCollection( + // rootFolderName would be empty here if one edge case: + // - User drags and drops a mixture of files and folders + // - They select the "upload to multiple albums" option + // - The see the error, close the error + // - Then they select the "upload to single album" option + // + // In such a flow, we'll reach here with an empty + // rootFolderName. The proper fix for this would be + // rearrange the flow and ask them to name the album here, + // but we currently don't have support for chaining modals. + // So in the meanwhile, keep a fallback album name at hand. + importSuggestion.rootFolderName ?? + t("autogenerated_default_album_name"), + ); + break; + case "parent": + if (importSuggestion.hasRootLevelFileWithFolder) { + appContext.setDialogMessage( + getRootLevelFileWithFolderNotAllowMessage(), + ); + } else { + uploadFilesToNewCollections("parent"); + } } - uploadFilesToNewCollections(UPLOAD_STRATEGY.COLLECTION_PER_FOLDER); }; return ( <> - @@ -836,3 +805,143 @@ async function waitAndRun( } await task(); } + +const desktopFilesAndZipItems = async (electron: Electron, files: File[]) => { + const fileAndPaths: FileAndPath[] = []; + let zipItems: ZipItem[] = []; + + for (const file of files) { + const path = electron.pathForFile(file); + if (file.name.endsWith(".zip")) { + zipItems = zipItems.concat(await electron.listZipItems(path)); + } else { + fileAndPaths.push({ file, path }); + } + } + + return { fileAndPaths, zipItems }; +}; + +// This is used to prompt the user the make upload strategy choice +interface ImportSuggestion { + rootFolderName: string; + hasNestedFolders: boolean; + hasRootLevelFileWithFolder: boolean; +} + +const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = { + rootFolderName: "", + hasNestedFolders: false, + hasRootLevelFileWithFolder: false, +}; + +function getImportSuggestion( + uploadType: PICKED_UPLOAD_TYPE, + paths: string[], +): ImportSuggestion { + if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) { + return DEFAULT_IMPORT_SUGGESTION; + } + + const getCharCount = (str: string) => (str.match(/\//g) ?? []).length; + paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2)); + const firstPath = paths[0]; + const lastPath = paths[paths.length - 1]; + + const L = firstPath.length; + let i = 0; + const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/")); + const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/")); + + while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++; + let commonPathPrefix = firstPath.substring(0, i); + + if (commonPathPrefix) { + commonPathPrefix = commonPathPrefix.substring( + 0, + commonPathPrefix.lastIndexOf("/"), + ); + if (commonPathPrefix) { + commonPathPrefix = commonPathPrefix.substring( + commonPathPrefix.lastIndexOf("/") + 1, + ); + } + } + return { + rootFolderName: commonPathPrefix || null, + hasNestedFolders: firstFileFolder !== lastFileFolder, + hasRootLevelFileWithFolder: firstFileFolder === "", + }; +} + +// This function groups files that are that have the same parent folder into collections +// For Example, for user files have a directory structure like this +// a +// / | \ +// b j c +// /|\ / \ +// e f g h i +// +// The files will grouped into 3 collections. +// [a => [j], +// b => [e,f,g], +// c => [h, i]] +const groupFilesBasedOnParentFolder = ( + uploadItemsAndPaths: [UploadItem, string][], +) => { + const result = new Map(); + for (const [uploadItem, pathOrName] of uploadItemsAndPaths) { + let folderPath = pathOrName.substring(0, pathOrName.lastIndexOf("/")); + // If the parent folder of a file is "metadata" + // we consider it to be part of the parent folder + // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json] + // they will both we grouped into the collection "a" + // This is cluster the metadata json files in the same collection as the file it is for + if (folderPath.endsWith(exportMetadataDirectoryName)) { + folderPath = folderPath.substring(0, folderPath.lastIndexOf("/")); + } + const folderName = folderPath.substring( + folderPath.lastIndexOf("/") + 1, + ); + if (!folderName) throw Error("Unexpected empty folder name"); + if (!result.has(folderName)) result.set(folderName, []); + result.get(folderName).push(uploadItem); + } + return result; +}; + +export const setPendingUploads = async ( + electron: Electron, + collections: Collection[], + uploadItems: UploadItem[], +) => { + let collectionName: string | undefined; + /* collection being one suggest one of two things + 1. Either the user has upload to a single existing collection + 2. Created a new single collection to upload to + may have had multiple folder, but chose to upload + to one album + hence saving the collection name when upload collection count is 1 + helps the info of user choosing this options + and on next upload we can directly start uploading to this collection + */ + if (collections.length == 1) { + collectionName = collections[0].name; + } + + const filePaths: string[] = []; + const zipItems: ZipItem[] = []; + for (const item of uploadItems) { + if (item instanceof File) { + throw new Error("Unexpected web file for a desktop pending upload"); + } else if (typeof item == "string") { + filePaths.push(item); + } else if (Array.isArray(item)) { + zipItems.push(item); + } else { + filePaths.push(item.path); + } + } + + await electron.setPendingUploads({ collectionName, filePaths, zipItems }); +}; diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx index 1b110d532b..13e33fc6d3 100644 --- a/web/apps/photos/src/components/UploadSelectorInputs.tsx +++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx @@ -2,12 +2,16 @@ export default function UploadSelectorInputs({ getDragAndDropInputProps, getFileSelectorInputProps, getFolderSelectorInputProps, + getZipFileSelectorInputProps, }) { return ( <> + {getZipFileSelectorInputProps && ( + + )} ); } diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx index b5ff00b291..710a541683 100644 --- a/web/apps/photos/src/components/WatchFolder.tsx +++ b/web/apps/photos/src/components/WatchFolder.tsx @@ -1,3 +1,7 @@ +import { ensureElectron } from "@/next/electron"; +import { basename, dirname } from "@/next/file"; +import type { CollectionMapping, FolderWatch } from "@/next/types/ipc"; +import { ensure } from "@/utils/ensure"; import { FlexWrapper, HorizontalFlex, @@ -23,31 +27,38 @@ import { Typography, } from "@mui/material"; import { styled } from "@mui/material/styles"; -import UploadStrategyChoiceModal from "components/Upload/UploadStrategyChoiceModal"; -import { PICKED_UPLOAD_TYPE, UPLOAD_STRATEGY } from "constants/upload"; +import { CollectionMappingChoiceModal } from "components/Upload/CollectionMappingChoiceModal"; import { t } from "i18next"; import { AppContext } from "pages/_app"; import React, { useContext, useEffect, useState } from "react"; -import watchFolderService from "services/watch"; -import { WatchMapping } from "types/watchFolder"; -import { getImportSuggestion } from "utils/upload"; +import watcher from "services/watch"; interface WatchFolderProps { open: boolean; onClose: () => void; } +/** + * View the state of and manage folder watches. + * + * This is the screen that controls that "watch folder" feature in the app. + */ export const WatchFolder: React.FC = ({ open, onClose }) => { - const [mappings, setMappings] = useState([]); - const [inputFolderPath, setInputFolderPath] = useState(""); + // The folders we are watching + const [watches, setWatches] = useState(); + // Temporarily stash the folder path while we show a choice dialog to the + // user to select the collection mapping. + const [savedFolderPath, setSavedFolderPath] = useState< + string | undefined + >(); + // True when we're showing the choice dialog to ask the user to set the + // collection mapping. const [choiceModalOpen, setChoiceModalOpen] = useState(false); + const appContext = useContext(AppContext); - const electron = globalThis.electron; - useEffect(() => { - if (!electron) return; - watchFolderService.getWatchMappings().then((m) => setMappings(m)); + watcher.getWatches().then((ws) => setWatches(ws)); }, []); useEffect(() => { @@ -64,69 +75,41 @@ export const WatchFolder: React.FC = ({ open, onClose }) => { for (let i = 0; i < folders.length; i++) { const folder: any = folders[i]; const path = (folder.path as string).replace(/\\/g, "/"); - if (await watchFolderService.isFolder(path)) { - await addFolderForWatching(path); + if (await ensureElectron().fs.isDir(path)) { + await selectCollectionMappingAndAddWatch(path); } } }; - const addFolderForWatching = async (path: string) => { - if (!electron) return; - - setInputFolderPath(path); - const files = await electron.getDirFiles(path); - const analysisResult = getImportSuggestion( - PICKED_UPLOAD_TYPE.FOLDERS, - files, - ); - if (analysisResult.hasNestedFolders) { - setChoiceModalOpen(true); + const selectCollectionMappingAndAddWatch = async (path: string) => { + const filePaths = await ensureElectron().watch.findFiles(path); + if (areAllInSameDirectory(filePaths)) { + addWatch(path, "root"); } else { - handleAddWatchMapping(UPLOAD_STRATEGY.SINGLE_COLLECTION, path); + setSavedFolderPath(path); + setChoiceModalOpen(true); } }; - const handleAddFolderClick = async () => { - await handleFolderSelection(); - }; + const addWatch = (folderPath: string, mapping: CollectionMapping) => + watcher.addWatch(folderPath, mapping).then((ws) => setWatches(ws)); - const handleFolderSelection = async () => { - const folderPath = await watchFolderService.selectFolder(); - if (folderPath) { - await addFolderForWatching(folderPath); + const addNewWatch = async () => { + const dirPath = await ensureElectron().selectDirectory(); + if (dirPath) { + await selectCollectionMappingAndAddWatch(dirPath); } }; - const handleAddWatchMapping = async ( - uploadStrategy: UPLOAD_STRATEGY, - folderPath?: string, - ) => { - folderPath = folderPath || inputFolderPath; - await watchFolderService.addWatchMapping( - folderPath.substring(folderPath.lastIndexOf("/") + 1), - folderPath, - uploadStrategy, - ); - setInputFolderPath(""); - setMappings(await watchFolderService.getWatchMappings()); - }; - - const handleRemoveWatchMapping = (mapping: WatchMapping) => { - watchFolderService - .mappingsAfterRemovingFolder(mapping.folderPath) - .then((ms) => setMappings(ms)); - }; + const removeWatch = async (watch: FolderWatch) => + watcher.removeWatch(watch.folderPath).then((ws) => setWatches(ws)); const closeChoiceModal = () => setChoiceModalOpen(false); - const uploadToSingleCollection = () => { + const addWatchWithMapping = (mapping: CollectionMapping) => { closeChoiceModal(); - handleAddWatchMapping(UPLOAD_STRATEGY.SINGLE_COLLECTION); - }; - - const uploadToMultipleCollection = () => { - closeChoiceModal(); - handleAddWatchMapping(UPLOAD_STRATEGY.COLLECTION_PER_FOLDER); + setSavedFolderPath(undefined); + addWatch(ensure(savedFolderPath), mapping); }; return ( @@ -144,15 +127,8 @@ export const WatchFolder: React.FC = ({ open, onClose }) => { - - - ); }; -const MappingsContainer = styled(Box)(() => ({ +interface WatchList { + watches: FolderWatch[]; + removeWatch: (watch: FolderWatch) => void; +} + +const WatchList: React.FC = ({ watches, removeWatch }) => { + return watches.length === 0 ? ( + + ) : ( + + {watches.map((watch) => { + return ( + + ); + })} + + ); +}; + +const WatchesContainer = styled(Box)(() => ({ height: "278px", overflow: "auto", "&::-webkit-scrollbar": { @@ -182,47 +180,9 @@ const MappingsContainer = styled(Box)(() => ({ }, })); -const NoMappingsContainer = styled(VerticallyCentered)({ - textAlign: "left", - alignItems: "flex-start", - marginBottom: "32px", -}); - -const EntryContainer = styled(Box)({ - marginLeft: "12px", - marginRight: "6px", - marginBottom: "12px", -}); - -interface MappingListProps { - mappings: WatchMapping[]; - handleRemoveWatchMapping: (value: WatchMapping) => void; -} - -const MappingList: React.FC = ({ - mappings, - handleRemoveWatchMapping, -}) => { - return mappings.length === 0 ? ( - - ) : ( - - {mappings.map((mapping) => { - return ( - - ); - })} - - ); -}; - -const NoMappingsContent: React.FC = () => { +const NoWatches: React.FC = () => { return ( - + {t("NO_FOLDERS_ADDED")} @@ -243,10 +203,16 @@ const NoMappingsContent: React.FC = () => { - + ); }; +const NoWatchesContainer = styled(VerticallyCentered)({ + textAlign: "left", + alignItems: "flex-start", + marginBottom: "32px", +}); + const CheckmarkIcon: React.FC = () => { return ( { sx={{ display: "inline", fontSize: "15px", - color: (theme) => theme.palette.secondary.main, }} /> ); }; -interface MappingEntryProps { - mapping: WatchMapping; - handleRemoveMapping: (mapping: WatchMapping) => void; +interface WatchEntryProps { + watch: FolderWatch; + removeWatch: (watch: FolderWatch) => void; } -const MappingEntry: React.FC = ({ - mapping, - handleRemoveMapping, -}) => { +const WatchEntry: React.FC = ({ watch, removeWatch }) => { const appContext = React.useContext(AppContext); - const stopWatching = () => { - handleRemoveMapping(mapping); - }; - const confirmStopWatching = () => { appContext.setDialogMessage({ title: t("STOP_WATCHING_FOLDER"), @@ -285,7 +243,7 @@ const MappingEntry: React.FC = ({ variant: "secondary", }, proceed: { - action: stopWatching, + action: () => removeWatch(watch), text: t("YES_STOP"), variant: "critical", }, @@ -295,8 +253,7 @@ const MappingEntry: React.FC = ({ return ( - {mapping && - mapping.uploadStrategy === UPLOAD_STRATEGY.SINGLE_COLLECTION ? ( + {watch.collectionMapping === "root" ? ( @@ -306,41 +263,45 @@ const MappingEntry: React.FC = ({ )} - + - {mapping.folderPath} + {watch.folderPath} - + ); }; +const EntryContainer = styled(Box)({ + marginLeft: "12px", + marginRight: "6px", + marginBottom: "12px", +}); + interface EntryHeadingProps { - mapping: WatchMapping; + watch: FolderWatch; } -const EntryHeading: React.FC = ({ mapping }) => { - const appContext = useContext(AppContext); +const EntryHeading: React.FC = ({ watch }) => { + const folderPath = watch.folderPath; + return ( - {mapping.rootFolderName} - {appContext.isFolderSyncRunning && - watchFolderService.isMappingSyncInProgress(mapping) && ( - - )} + {basename(folderPath)} + {watcher.isSyncingFolder(folderPath) && ( + + )} ); }; -interface MappingEntryOptionsProps { +interface EntryOptionsProps { confirmStopWatching: () => void; } -const MappingEntryOptions: React.FC = ({ - confirmStopWatching, -}) => { +const EntryOptions: React.FC = ({ confirmStopWatching }) => { return ( = ({ ); }; + +/** + * Return true if all the paths in the given list are items that belong to the + * same (arbitrary) directory. + * + * Empty list of paths is considered to be in the same directory. + */ +const areAllInSameDirectory = (paths: string[]) => + new Set(paths.map(dirname)).size == 1; diff --git a/web/apps/photos/src/components/ml/MLSearchSettings.tsx b/web/apps/photos/src/components/ml/MLSearchSettings.tsx index 583b79529c..409df4fc6f 100644 --- a/web/apps/photos/src/components/ml/MLSearchSettings.tsx +++ b/web/apps/photos/src/components/ml/MLSearchSettings.tsx @@ -22,7 +22,7 @@ import { getFaceSearchEnabledStatus, updateFaceSearchEnabledStatus, } from "services/userService"; -import { openLink } from "utils/common"; +import { isInternalUserForML } from "utils/user"; export const MLSearchSettings = ({ open, onClose, onRootClose }) => { const { @@ -255,8 +255,8 @@ function EnableFaceSearch({ open, onClose, enableFaceSearch, onRootClose }) { } function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) { - const showDetails = () => - openLink("https://ente.io/blog/desktop-ml-beta", true); + // const showDetails = () => + // openLink("https://ente.io/blog/desktop-ml-beta", true); return ( @@ -269,25 +269,37 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) { {" "} - + {/* */} +

+ We're putting finishing touches, coming back soon! +

+

+ + Existing indexed faces will continue to show. + +

- - - + {/* + - + > + {t("ML_MORE_DETAILS")} + + */} +
+ )} ); diff --git a/web/apps/photos/src/components/ml/PeopleList.tsx b/web/apps/photos/src/components/ml/PeopleList.tsx index 8e6bc968f7..4691d4b650 100644 --- a/web/apps/photos/src/components/ml/PeopleList.tsx +++ b/web/apps/photos/src/components/ml/PeopleList.tsx @@ -1,11 +1,8 @@ -import { cachedOrNew } from "@/next/blob-cache"; -import { ensureLocalUser } from "@/next/local-user"; import log from "@/next/log"; import { Skeleton, styled } from "@mui/material"; import { Legend } from "components/PhotoViewer/styledComponents/Legend"; import { t } from "i18next"; import React, { useEffect, useState } from "react"; -import machineLearningService from "services/machineLearning/machineLearningService"; import { EnteFile } from "types/file"; import { Face, Person } from "types/machineLearning"; import { getPeopleList, getUnidentifiedFaces } from "utils/machineLearning"; @@ -61,7 +58,7 @@ export const PeopleList = React.memo((props: PeopleListProps) => { } > @@ -140,7 +137,7 @@ export function UnidentifiedFaces(props: { faces.map((face, index) => ( @@ -151,20 +148,24 @@ export function UnidentifiedFaces(props: { } interface FaceCropImageViewProps { - faceId: string; + faceID: string; cacheKey?: string; } const FaceCropImageView: React.FC = ({ - faceId, + faceID, cacheKey, }) => { const [objectURL, setObjectURL] = useState(); useEffect(() => { let didCancel = false; + const electron = globalThis.electron; - if (cacheKey) { + if (faceID && electron) { + electron + .legacyFaceCrop(faceID) + /* cachedOrNew("face-crops", cacheKey, async () => { const user = await ensureLocalUser(); return machineLearningService.regenerateFaceCrop( @@ -172,16 +173,20 @@ const FaceCropImageView: React.FC = ({ user.id, faceId, ); - }).then((blob) => { - if (!didCancel) setObjectURL(URL.createObjectURL(blob)); - }); + })*/ + .then((data) => { + if (data) { + const blob = new Blob([data]); + if (!didCancel) setObjectURL(URL.createObjectURL(blob)); + } + }); } else setObjectURL(undefined); return () => { didCancel = true; if (objectURL) URL.revokeObjectURL(objectURL); }; - }, [faceId, cacheKey]); + }, [faceID, cacheKey]); return objectURL ? ( diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx index 4ef76a491f..0ef4b15947 100644 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx +++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx @@ -5,11 +5,8 @@ import Box from "@mui/material/Box"; import Typography from "@mui/material/Typography"; import { t } from "i18next"; import { Trans } from "react-i18next"; -import { - convertBytesToGBs, - hasAddOnBonus, - isSubscriptionCancelled, -} from "utils/billing"; +import { hasAddOnBonus, isSubscriptionCancelled } from "utils/billing"; +import { bytesInGB } from "utils/units"; import { ManageSubscription } from "../manageSubscription"; import { PeriodToggler } from "../periodToggler"; import Plans from "../plans"; @@ -35,8 +32,7 @@ export default function PaidSubscriptionPlanSelectorCard({ {t("SUBSCRIPTION")} - {convertBytesToGBs(subscription.storage, 2)}{" "} - {t("GB")} + {bytesInGB(subscription.storage, 2)} {t("GB")} @@ -50,7 +46,7 @@ export default function PaidSubscriptionPlanSelectorCard({ diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx index 8b0ce7bd5f..5f7e13deb8 100644 --- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx +++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx @@ -2,7 +2,7 @@ import { SpaceBetweenFlex } from "@ente/shared/components/Container"; import { Box, styled, Typography } from "@mui/material"; import { Trans } from "react-i18next"; -import { makeHumanReadableStorage } from "utils/billing"; +import { formattedStorageByteSize } from "utils/units"; const RowContainer = styled(SpaceBetweenFlex)(({ theme }) => ({ // gap: theme.spacing(1.5), @@ -24,7 +24,7 @@ export function BFAddOnRow({ bonusData, closeModal }) { - {convertBytesToGBs(plan.storage)} + {bytesInGB(plan.storage)} diff --git a/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx b/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx index 8704258f89..8091618a1f 100644 --- a/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx +++ b/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx @@ -1,3 +1,4 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; import { Overlay } from "@ente/shared/components/Container"; import { CustomError } from "@ente/shared/error"; @@ -11,7 +12,6 @@ import { StaticThumbnail, } from "components/PlaceholderThumbnails"; import { TRASH_SECTION } from "constants/collection"; -import { FILE_TYPE } from "constants/file"; import { GAP_BTW_TILES, IMAGE_CONTAINER_MAX_WIDTH } from "constants/gallery"; import { DeduplicateContext } from "pages/deduplicate"; import { GalleryContext } from "pages/gallery"; diff --git a/web/apps/photos/src/constants/ffmpeg.ts b/web/apps/photos/src/constants/ffmpeg.ts index 9ecc41eb5e..fb0d762e5b 100644 --- a/web/apps/photos/src/constants/ffmpeg.ts +++ b/web/apps/photos/src/constants/ffmpeg.ts @@ -1,3 +1,3 @@ -export const INPUT_PATH_PLACEHOLDER = "INPUT"; -export const FFMPEG_PLACEHOLDER = "FFMPEG"; -export const OUTPUT_PATH_PLACEHOLDER = "OUTPUT"; +export const ffmpegPathPlaceholder = "FFMPEG"; +export const inputPathPlaceholder = "INPUT"; +export const outputPathPlaceholder = "OUTPUT"; diff --git a/web/apps/photos/src/constants/file.ts b/web/apps/photos/src/constants/file.ts deleted file mode 100644 index 46065136c9..0000000000 --- a/web/apps/photos/src/constants/file.ts +++ /dev/null @@ -1,43 +0,0 @@ -export const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1); -export const MAX_EDITED_CREATION_TIME = new Date(); - -export const MAX_EDITED_FILE_NAME_LENGTH = 100; -export const MAX_CAPTION_SIZE = 5000; - -export const TYPE_HEIC = "heic"; -export const TYPE_HEIF = "heif"; -export const TYPE_JPEG = "jpeg"; -export const TYPE_JPG = "jpg"; - -export enum FILE_TYPE { - IMAGE, - VIDEO, - LIVE_PHOTO, - OTHERS, -} - -export const RAW_FORMATS = [ - "heic", - "rw2", - "tiff", - "arw", - "cr3", - "cr2", - "raf", - "nef", - "psd", - "dng", - "tif", -]; -export const SUPPORTED_RAW_FORMATS = [ - "heic", - "rw2", - "tiff", - "arw", - "cr3", - "cr2", - "nef", - "psd", - "dng", - "tif", -]; diff --git a/web/apps/photos/src/constants/upload.ts b/web/apps/photos/src/constants/upload.ts index 6d9f63d788..a0103cb6e6 100644 --- a/web/apps/photos/src/constants/upload.ts +++ b/web/apps/photos/src/constants/upload.ts @@ -1,66 +1,4 @@ -import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants"; -import { FILE_TYPE } from "constants/file"; -import { - FileTypeInfo, - ImportSuggestion, - Location, - ParsedExtractedMetadata, -} from "types/upload"; - -// list of format that were missed by type-detection for some files. -export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [ - { fileType: FILE_TYPE.IMAGE, exactType: "jpeg", mimeType: "image/jpeg" }, - { fileType: FILE_TYPE.IMAGE, exactType: "jpg", mimeType: "image/jpeg" }, - { fileType: FILE_TYPE.VIDEO, exactType: "webm", mimeType: "video/webm" }, - { fileType: FILE_TYPE.VIDEO, exactType: "mod", mimeType: "video/mpeg" }, - { fileType: FILE_TYPE.VIDEO, exactType: "mp4", mimeType: "video/mp4" }, - { fileType: FILE_TYPE.IMAGE, exactType: "gif", mimeType: "image/gif" }, - { fileType: FILE_TYPE.VIDEO, exactType: "dv", mimeType: "video/x-dv" }, - { - fileType: FILE_TYPE.VIDEO, - exactType: "wmv", - mimeType: "video/x-ms-asf", - }, - { - fileType: FILE_TYPE.VIDEO, - exactType: "hevc", - mimeType: "video/hevc", - }, - { - fileType: FILE_TYPE.IMAGE, - exactType: "raf", - mimeType: "image/x-fuji-raf", - }, - { - fileType: FILE_TYPE.IMAGE, - exactType: "orf", - mimeType: "image/x-olympus-orf", - }, - - { - fileType: FILE_TYPE.IMAGE, - exactType: "crw", - mimeType: "image/x-canon-crw", - }, - { - fileType: FILE_TYPE.VIDEO, - exactType: "mov", - mimeType: "video/quicktime", - }, -]; - -export const KNOWN_NON_MEDIA_FORMATS = ["xmp", "html", "txt"]; - -export const EXIFLESS_FORMATS = ["gif", "bmp"]; - -// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. -export const MULTIPART_PART_SIZE = 20 * 1024 * 1024; - -export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE; - -export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor( - MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE, -); +import { Location } from "types/metadata"; export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random(); @@ -75,11 +13,6 @@ export enum UPLOAD_STAGES { FINISH, } -export enum UPLOAD_STRATEGY { - SINGLE_COLLECTION, - COLLECTION_PER_FOLDER, -} - export enum UPLOAD_RESULT { FAILED, ALREADY_UPLOADED, @@ -91,57 +24,3 @@ export enum UPLOAD_RESULT { UPLOADED_WITH_STATIC_THUMBNAIL, ADDED_SYMLINK, } - -export enum PICKED_UPLOAD_TYPE { - FILES = "files", - FOLDERS = "folders", - ZIPS = "zips", -} - -export const MAX_FILE_SIZE_SUPPORTED = 4 * 1024 * 1024 * 1024; // 4 GB - -export const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB - -export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = { - location: NULL_LOCATION, - creationTime: null, - width: null, - height: null, -}; - -export const A_SEC_IN_MICROSECONDS = 1e6; - -export const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = { - rootFolderName: "", - hasNestedFolders: false, - hasRootLevelFileWithFolder: false, -}; - -export const BLACK_THUMBNAIL_BASE64 = - "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" + - "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" + - "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" + - "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" + - "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" + - "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" + - "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" + - "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" + - "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" + - "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" + - "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" + - "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + - "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" + - "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + - "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k="; diff --git a/web/apps/photos/src/contexts/uploadProgress.tsx b/web/apps/photos/src/contexts/uploadProgress.tsx index fe5f733b86..b25df7d65b 100644 --- a/web/apps/photos/src/contexts/uploadProgress.tsx +++ b/web/apps/photos/src/contexts/uploadProgress.tsx @@ -1,11 +1,11 @@ import { UPLOAD_STAGES } from "constants/upload"; import { createContext } from "react"; -import { +import type { InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, -} from "types/upload/ui"; +} from "services/upload/uploadManager"; interface UploadProgressContextType { open: boolean; diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx index c31256f138..0e80d0df9f 100644 --- a/web/apps/photos/src/pages/_app.tsx +++ b/web/apps/photos/src/pages/_app.tsx @@ -5,7 +5,7 @@ import { logStartupBanner, logUnhandledErrorsAndRejections, } from "@/next/log-web"; -import { AppUpdateInfo } from "@/next/types/ipc"; +import { AppUpdate } from "@/next/types/ipc"; import { APPS, APP_TITLES, @@ -80,8 +80,6 @@ const redirectMap = new Map([ type AppContextType = { showNavBar: (show: boolean) => void; - sharedFiles: File[]; - resetSharedFiles: () => void; mlSearchEnabled: boolean; mapEnabled: boolean; updateMlSearchEnabled: (enabled: boolean) => Promise; @@ -91,8 +89,6 @@ type AppContextType = { closeMessageDialog: () => void; setDialogMessage: SetDialogBoxAttributes; setNotificationAttributes: SetNotificationAttributes; - isFolderSyncRunning: boolean; - setIsFolderSyncRunning: (isRunning: boolean) => void; watchFolderView: boolean; setWatchFolderView: (isOpen: boolean) => void; watchFolderFiles: FileList; @@ -116,7 +112,6 @@ export default function App({ Component, pageProps }: AppProps) { typeof window !== "undefined" && !window.navigator.onLine, ); const [showNavbar, setShowNavBar] = useState(false); - const [sharedFiles, setSharedFiles] = useState(null); const [redirectName, setRedirectName] = useState(null); const [mlSearchEnabled, setMlSearchEnabled] = useState(false); const [mapEnabled, setMapEnabled] = useState(false); @@ -128,7 +123,6 @@ export default function App({ Component, pageProps }: AppProps) { useState(null); const [messageDialogView, setMessageDialogView] = useState(false); const [dialogBoxV2View, setDialogBoxV2View] = useState(false); - const [isFolderSyncRunning, setIsFolderSyncRunning] = useState(false); const [watchFolderView, setWatchFolderView] = useState(false); const [watchFolderFiles, setWatchFolderFiles] = useState(null); const isMobile = useMediaQuery("(max-width:428px)"); @@ -160,9 +154,9 @@ export default function App({ Component, pageProps }: AppProps) { const electron = globalThis.electron; if (!electron) return; - const showUpdateDialog = (updateInfo: AppUpdateInfo) => { - if (updateInfo.autoUpdatable) { - setDialogMessage(getUpdateReadyToInstallMessage(updateInfo)); + const showUpdateDialog = (update: AppUpdate) => { + if (update.autoUpdatable) { + setDialogMessage(getUpdateReadyToInstallMessage(update)); } else { setNotificationAttributes({ endIcon: , @@ -170,7 +164,7 @@ export default function App({ Component, pageProps }: AppProps) { message: t("UPDATE_AVAILABLE"), onClick: () => setDialogMessage( - getUpdateAvailableForDownloadMessage(updateInfo), + getUpdateAvailableForDownloadMessage(update), ), }); } @@ -230,7 +224,6 @@ export default function App({ Component, pageProps }: AppProps) { const setUserOnline = () => setOffline(false); const setUserOffline = () => setOffline(true); - const resetSharedFiles = () => setSharedFiles(null); useEffect(() => { const redirectTo = async (redirect) => { @@ -355,22 +348,8 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} - {sharedFiles && - (router.pathname === "/gallery" ? ( - - {t("files_to_be_uploaded", { - count: sharedFiles.length, - })} - - ) : ( - - {t("login_to_upload_files", { - count: sharedFiles.length, - })} - - ))} syncWithRemote(false, true)); } }; @@ -438,18 +445,8 @@ export default function Gallery() { } let collectionURL = ""; if (activeCollectionID !== ALL_SECTION) { - collectionURL += "?collection="; - if (activeCollectionID === ARCHIVE_SECTION) { - collectionURL += t("ARCHIVE_SECTION_NAME"); - } else if (activeCollectionID === TRASH_SECTION) { - collectionURL += t("TRASH"); - } else if (activeCollectionID === DUMMY_UNCATEGORIZED_COLLECTION) { - collectionURL += t("UNCATEGORIZED"); - } else if (activeCollectionID === HIDDEN_ITEMS_SECTION) { - collectionURL += t("HIDDEN_ITEMS_SECTION_NAME"); - } else { - collectionURL += activeCollectionID; - } + // TODO: Is this URL param even used? + collectionURL = `?collection=${activeCollectionID}`; } const href = `/gallery${collectionURL}`; router.push(href, undefined, { shallow: true }); @@ -1023,6 +1020,7 @@ export default function Gallery() { getDragAndDropInputProps={getDragAndDropInputProps} getFileSelectorInputProps={getFileSelectorInputProps} getFolderSelectorInputProps={getFolderSelectorInputProps} + getZipFileSelectorInputProps={getZipFileSelectorInputProps} /> {blockingLoad && ( @@ -1112,7 +1110,6 @@ export default function Gallery() { null, false, )} - uploadTypeSelectorIntent={uploadTypeSelectorIntent} setLoading={setBlockingLoad} setCollectionNamerAttributes={setCollectionNamerAttributes} setShouldDisableDropzone={setShouldDisableDropzone} @@ -1121,13 +1118,18 @@ export default function Gallery() { isFirstUpload={ !hasNonSystemCollections(collectionSummaries) } - webFileSelectorFiles={webFileSelectorFiles} - webFolderSelectorFiles={webFolderSelectorFiles} - dragAndDropFiles={dragAndDropFiles} - uploadTypeSelectorView={uploadTypeSelectorView} - showUploadFilesDialog={openFileSelector} - showUploadDirsDialog={openFolderSelector} - showSessionExpiredMessage={showSessionExpiredMessage} + {...{ + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + uploadTypeSelectorIntent, + uploadTypeSelectorView, + showSessionExpiredMessage, + }} /> void) | null = null; - private unsupportedPlatform = false; constructor() { this.liveEmbeddingExtractionQueue = new PQueue({ @@ -85,7 +84,7 @@ class CLIPService { } isPlatformSupported = () => { - return isElectron() && !this.unsupportedPlatform; + return isElectron(); }; private logoutHandler = async () => { @@ -99,9 +98,6 @@ class CLIPService { setupOnFileUploadListener = async () => { try { - if (this.unsupportedPlatform) { - return; - } if (this.onFileUploadedHandler) { log.info("file upload listener already setup"); return; @@ -188,26 +184,12 @@ class CLIPService { } }; - getTextEmbedding = async (text: string): Promise => { - try { - return ensureElectron().clipTextEmbedding(text); - } catch (e) { - if (e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)) { - this.unsupportedPlatform = true; - } - log.error("Failed to compute CLIP text embedding", e); - throw e; - } + getTextEmbeddingIfAvailable = async (text: string) => { + return ensureElectron().clipTextEmbeddingIfAvailable(text); }; private runClipEmbeddingExtraction = async (canceller: AbortController) => { try { - if (this.unsupportedPlatform) { - log.info( - `skipping clip embedding extraction, platform unsupported`, - ); - return; - } const user = getData(LS_KEYS.USER); if (!user) { return; @@ -254,11 +236,6 @@ class CLIPService { e, ); } - if ( - e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM) - ) { - this.unsupportedPlatform = true; - } if ( e?.message === CustomError.REQUEST_CANCELLED || e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM) diff --git a/web/apps/photos/src/services/deduplicationService.ts b/web/apps/photos/src/services/deduplicationService.ts index 9d8ab399ff..1683e554c4 100644 --- a/web/apps/photos/src/services/deduplicationService.ts +++ b/web/apps/photos/src/services/deduplicationService.ts @@ -1,11 +1,11 @@ +import { hasFileHash } from "@/media/file"; +import { FILE_TYPE } from "@/media/file-type"; +import type { Metadata } from "@/media/types/file"; import log from "@/next/log"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; -import { FILE_TYPE } from "constants/file"; import { EnteFile } from "types/file"; -import { Metadata } from "types/upload"; -import { hasFileHash } from "utils/upload"; const ENDPOINT = getEndpoint(); diff --git a/web/apps/photos/src/services/detect-type.ts b/web/apps/photos/src/services/detect-type.ts new file mode 100644 index 0000000000..e92e10bf82 --- /dev/null +++ b/web/apps/photos/src/services/detect-type.ts @@ -0,0 +1,99 @@ +import { + FILE_TYPE, + KnownFileTypeInfos, + KnownNonMediaFileExtensions, + type FileTypeInfo, +} from "@/media/file-type"; +import { lowercaseExtension } from "@/next/file"; +import { CustomError } from "@ente/shared/error"; +import FileType from "file-type"; + +/** + * Read the file's initial contents or use the file's name to detect its type. + * + * This function first reads an initial chunk of the file and tries to detect + * the file's {@link FileTypeInfo} from it. If that doesn't work, it then falls + * back to using the file's name to detect it. + * + * If neither of these two approaches work, it throws an exception. + * + * If we were able to detect the file type, but it is explicitly not a media + * (image or video) format that we support, this function throws an error with + * the message `CustomError.UNSUPPORTED_FILE_FORMAT`. + * + * @param file A {@link File} object + * + * @returns The detected {@link FileTypeInfo}. + */ +export const detectFileTypeInfo = async (file: File): Promise => + detectFileTypeInfoFromChunk(() => readInitialChunkOfFile(file), file.name); + +/** + * The lower layer implementation of the type detector. + * + * Usually, when the code already has a {@link File} object at hand, it is + * easier to use the higher level {@link detectFileTypeInfo} function. + * + * However, this lower level function is also exposed for use in cases like + * during upload where we might not have a File object and would like to provide + * the initial chunk of the file's contents in a different way. + * + * @param readInitialChunk A function to call to read the initial chunk of the + * file's data. There is no strict requirement for the size of the chunk this + * function should return, generally the first few KBs should be good. + * + * @param fileNameOrPath The full path or just the file name of the file whose + * type we're trying to determine. This is used by the fallback layer that tries + * to detect the type info from the file's extension. + */ +export const detectFileTypeInfoFromChunk = async ( + readInitialChunk: () => Promise, + fileNameOrPath: string, +): Promise => { + try { + const typeResult = await detectFileTypeFromBuffer( + await readInitialChunk(), + ); + + const mimeType = typeResult.mime; + + let fileType: FILE_TYPE; + if (mimeType.startsWith("image/")) { + fileType = FILE_TYPE.IMAGE; + } else if (mimeType.startsWith("video/")) { + fileType = FILE_TYPE.VIDEO; + } else { + throw new Error(CustomError.UNSUPPORTED_FILE_FORMAT); + } + + return { + fileType, + // See https://github.com/sindresorhus/file-type/blob/main/core.d.ts + // for the full list of ext values. + extension: typeResult.ext, + mimeType, + }; + } catch (e) { + const extension = lowercaseExtension(fileNameOrPath); + const known = KnownFileTypeInfos.find((f) => f.extension == extension); + if (known) return known; + + if (KnownNonMediaFileExtensions.includes(extension)) + throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); + + throw e; + } +}; + +const readInitialChunkOfFile = async (file: File) => { + const chunkSizeForTypeDetection = 4100; + const chunk = file.slice(0, chunkSizeForTypeDetection); + return new Uint8Array(await chunk.arrayBuffer()); +}; + +const detectFileTypeFromBuffer = async (buffer: Uint8Array) => { + const result = await FileType.fromBuffer(buffer); + if (!result) + throw Error("Could not deduce file type from the file's contents"); + return result; +}; diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts index 41af5c0557..7b0171da11 100644 --- a/web/apps/photos/src/services/download/index.ts +++ b/web/apps/photos/src/services/download/index.ts @@ -1,3 +1,5 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; import { openCache, type BlobCache } from "@/next/blob-cache"; import log from "@/next/log"; import { APPS } from "@ente/shared/apps/constants"; @@ -5,13 +7,12 @@ import ComlinkCryptoWorker from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; +import { isPlaybackPossible } from "@ente/shared/media/video-playback"; import { Remote } from "comlink"; -import { FILE_TYPE } from "constants/file"; +import isElectron from "is-electron"; +import * as ffmpeg from "services/ffmpeg"; import { EnteFile } from "types/file"; -import { - generateStreamFromArrayBuffer, - getRenderableFileURL, -} from "utils/file"; +import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file"; import { PhotosDownloadClient } from "./clients/photos"; import { PublicAlbumsDownloadClient } from "./clients/publicAlbums"; @@ -149,7 +150,7 @@ class DownloadManagerImpl { this.ensureInitialized(); const key = file.id.toString(); - const cached = await this.thumbnailCache.get(key); + const cached = await this.thumbnailCache?.get(key); if (cached) return new Uint8Array(await cached.arrayBuffer()); if (localOnly) return null; @@ -303,7 +304,7 @@ class DownloadManagerImpl { if (cachedBlob) res = new Response(cachedBlob); else { res = await this.downloadClient.downloadFileStream(file); - this?.fileCache.put(cacheKey, await res.blob()); + this.fileCache?.put(cacheKey, await res.blob()); } const reader = res.body.getReader(); @@ -467,3 +468,155 @@ function createDownloadClient( return new PhotosDownloadClient(token, timeout); } } + +async function getRenderableFileURL( + file: EnteFile, + fileBlob: Blob, + originalFileURL: string, + forceConvert: boolean, +): Promise { + let srcURLs: SourceURLs["url"]; + switch (file.metadata.fileType) { + case FILE_TYPE.IMAGE: { + const convertedBlob = await getRenderableImage( + file.metadata.title, + fileBlob, + ); + const convertedURL = getFileObjectURL( + originalFileURL, + fileBlob, + convertedBlob, + ); + srcURLs = convertedURL; + break; + } + case FILE_TYPE.LIVE_PHOTO: { + srcURLs = await getRenderableLivePhotoURL( + file, + fileBlob, + forceConvert, + ); + break; + } + case FILE_TYPE.VIDEO: { + const convertedBlob = await getPlayableVideo( + file.metadata.title, + fileBlob, + forceConvert, + ); + const convertedURL = getFileObjectURL( + originalFileURL, + fileBlob, + convertedBlob, + ); + srcURLs = convertedURL; + break; + } + default: { + srcURLs = originalFileURL; + break; + } + } + + let isOriginal: boolean; + if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { + isOriginal = false; + } else { + isOriginal = (srcURLs as string) === (originalFileURL as string); + } + + return { + url: srcURLs, + isOriginal, + isRenderable: + file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs, + type: + file.metadata.fileType === FILE_TYPE.LIVE_PHOTO + ? "livePhoto" + : "normal", + }; +} + +const getFileObjectURL = ( + originalFileURL: string, + originalBlob: Blob, + convertedBlob: Blob, +) => { + const convertedURL = convertedBlob + ? convertedBlob === originalBlob + ? originalFileURL + : URL.createObjectURL(convertedBlob) + : null; + return convertedURL; +}; + +async function getRenderableLivePhotoURL( + file: EnteFile, + fileBlob: Blob, + forceConvert: boolean, +): Promise { + const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob); + + const getRenderableLivePhotoImageURL = async () => { + try { + const imageBlob = new Blob([livePhoto.imageData]); + const convertedImageBlob = await getRenderableImage( + livePhoto.imageFileName, + imageBlob, + ); + + return URL.createObjectURL(convertedImageBlob); + } catch (e) { + //ignore and return null + return null; + } + }; + + const getRenderableLivePhotoVideoURL = async () => { + try { + const videoBlob = new Blob([livePhoto.videoData]); + const convertedVideoBlob = await getPlayableVideo( + livePhoto.videoFileName, + videoBlob, + forceConvert, + true, + ); + return URL.createObjectURL(convertedVideoBlob); + } catch (e) { + //ignore and return null + return null; + } + }; + + return { + image: getRenderableLivePhotoImageURL, + video: getRenderableLivePhotoVideoURL, + }; +} + +async function getPlayableVideo( + videoNameTitle: string, + videoBlob: Blob, + forceConvert = false, + runOnWeb = false, +) { + try { + const isPlayable = await isPlaybackPossible( + URL.createObjectURL(videoBlob), + ); + if (isPlayable && !forceConvert) { + return videoBlob; + } else { + if (!forceConvert && !runOnWeb && !isElectron()) { + return null; + } + // TODO(MR): This might not work for very large (~ GB) videos. Test. + log.info(`Converting video ${videoNameTitle} to mp4`); + const convertedVideoData = await ffmpeg.convertToMP4(videoBlob); + return new Blob([convertedVideoData]); + } + } catch (e) { + log.error("Video conversion failed", e); + return null; + } +} diff --git a/web/apps/photos/src/services/embeddingService.ts b/web/apps/photos/src/services/embeddingService.ts index a4309e314c..36af848424 100644 --- a/web/apps/photos/src/services/embeddingService.ts +++ b/web/apps/photos/src/services/embeddingService.ts @@ -86,7 +86,11 @@ export const syncEmbeddings = async () => { allLocalFiles.forEach((file) => { fileIdToKeyMap.set(file.id, file.key); }); - await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings); + await cleanupDeletedEmbeddings( + allLocalFiles, + allEmbeddings, + EMBEDDINGS_TABLE, + ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); for (const model of models) { let modelLastSinceTime = await getModelEmbeddingSyncTime(model); @@ -168,7 +172,11 @@ export const syncFileEmbeddings = async () => { allLocalFiles.forEach((file) => { fileIdToKeyMap.set(file.id, file.key); }); - await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings); + await cleanupDeletedEmbeddings( + allLocalFiles, + allEmbeddings, + FILE_EMBEDING_TABLE, + ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); for (const model of models) { let modelLastSinceTime = await getModelEmbeddingSyncTime(model); @@ -289,6 +297,7 @@ export const putEmbedding = async ( export const cleanupDeletedEmbeddings = async ( allLocalFiles: EnteFile[], allLocalEmbeddings: Embedding[] | FileML[], + tableName: string, ) => { const activeFileIds = new Set(); allLocalFiles.forEach((file) => { @@ -302,6 +311,6 @@ export const cleanupDeletedEmbeddings = async ( log.info( `cleanupDeletedEmbeddings embeddingsCount: ${allLocalEmbeddings.length} remainingEmbeddingsCount: ${remainingEmbeddings.length}`, ); - await localForage.setItem(EMBEDDINGS_TABLE, remainingEmbeddings); + await localForage.setItem(tableName, remainingEmbeddings); } }; diff --git a/web/apps/photos/src/services/upload/exifService.ts b/web/apps/photos/src/services/exif.ts similarity index 79% rename from web/apps/photos/src/services/upload/exifService.ts rename to web/apps/photos/src/services/exif.ts index a26075b3af..073a695f75 100644 --- a/web/apps/photos/src/services/upload/exifService.ts +++ b/web/apps/photos/src/services/exif.ts @@ -1,12 +1,10 @@ +import { type FileTypeInfo } from "@/media/file-type"; import log from "@/next/log"; -import { CustomError } from "@ente/shared/error"; import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; -import { EXIFLESS_FORMATS, NULL_LOCATION } from "constants/upload"; +import { NULL_LOCATION } from "constants/upload"; import exifr from "exifr"; import piexif from "piexifjs"; -import { FileTypeInfo, Location } from "types/upload"; - -const EXIFR_UNSUPPORTED_FILE_FORMAT_MESSAGE = "Unknown file format"; +import type { Location, ParsedExtractedMetadata } from "types/metadata"; type ParsedEXIFData = Record & Partial<{ @@ -36,15 +34,59 @@ type RawEXIFData = Record & ImageHeight: number; }>; +const exifTagsNeededForParsingImageMetadata = [ + "DateTimeOriginal", + "CreateDate", + "ModifyDate", + "GPSLatitude", + "GPSLongitude", + "GPSLatitudeRef", + "GPSLongitudeRef", + "DateCreated", + "ExifImageWidth", + "ExifImageHeight", + "ImageWidth", + "ImageHeight", + "PixelXDimension", + "PixelYDimension", + "MetadataDate", +]; + +/** + * Read EXIF data from an image {@link file} and use that to construct and + * return an {@link ParsedExtractedMetadata}. + * + * This function is tailored for use when we upload files. + */ +export const parseImageMetadata = async ( + file: File, + fileTypeInfo: FileTypeInfo, +): Promise => { + const exifData = await getParsedExifData( + file, + fileTypeInfo, + exifTagsNeededForParsingImageMetadata, + ); + + return { + location: getEXIFLocation(exifData), + creationTime: getEXIFTime(exifData), + width: exifData?.imageWidth ?? null, + height: exifData?.imageHeight ?? null, + }; +}; + export async function getParsedExifData( receivedFile: File, - fileTypeInfo: FileTypeInfo, + { extension }: FileTypeInfo, tags?: string[], ): Promise { + const exifLessFormats = ["gif", "bmp"]; + const exifrUnsupportedFileFormatMessage = "Unknown file format"; + try { - if (EXIFLESS_FORMATS.includes(fileTypeInfo.exactType)) { - return null; - } + if (exifLessFormats.includes(extension)) return null; + const exifData: RawEXIFData = await exifr.parse(receivedFile, { reviveValues: false, tiff: true, @@ -66,16 +108,11 @@ export async function getParsedExifData( : exifData; return parseExifData(filteredExifData); } catch (e) { - if (e.message === EXIFR_UNSUPPORTED_FILE_FORMAT_MESSAGE) { - log.error( - `exif library unsupported format ${fileTypeInfo.exactType}`, - e, - ); + if (e.message == exifrUnsupportedFileFormatMessage) { + log.error(`EXIFR does not support ${extension} files`, e); + return undefined; } else { - log.error( - `get parsed exif data failed for file type ${fileTypeInfo.exactType}`, - e, - ); + log.error(`Failed to parse EXIF data for a ${extension} file`, e); throw e; } } @@ -130,14 +167,7 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = ImageWidth; parsedExif.imageHeight = ImageHeight; } else { - log.error( - `Image dimension parsing failed - ImageWidth or ImageHeight is not a number ${JSON.stringify( - { - ImageWidth, - ImageHeight, - }, - )}`, - ); + log.warn("EXIF: Ignoring non-numeric ImageWidth or ImageHeight"); } } else if (ExifImageWidth && ExifImageHeight) { if ( @@ -147,13 +177,8 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = ExifImageWidth; parsedExif.imageHeight = ExifImageHeight; } else { - log.error( - `Image dimension parsing failed - ExifImageWidth or ExifImageHeight is not a number ${JSON.stringify( - { - ExifImageWidth, - ExifImageHeight, - }, - )}`, + log.warn( + "EXIF: Ignoring non-numeric ExifImageWidth or ExifImageHeight", ); } } else if (PixelXDimension && PixelYDimension) { @@ -164,13 +189,8 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = PixelXDimension; parsedExif.imageHeight = PixelYDimension; } else { - log.error( - `Image dimension parsing failed - PixelXDimension or PixelYDimension is not a number ${JSON.stringify( - { - PixelXDimension, - PixelYDimension, - }, - )}`, + log.warn( + "EXIF: Ignoring non-numeric PixelXDimension or PixelYDimension", ); } } @@ -180,7 +200,7 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { function parseEXIFDate(dateTimeString: string) { try { if (typeof dateTimeString !== "string" || dateTimeString === "") { - throw Error(CustomError.NOT_A_DATE); + throw new Error("Invalid date string"); } // Check and parse date in the format YYYYMMDD @@ -211,7 +231,7 @@ function parseEXIFDate(dateTimeString: string) { typeof day === "undefined" || Number.isNaN(day) ) { - throw Error(CustomError.NOT_A_DATE); + throw new Error("Invalid date"); } let date: Date; if ( @@ -227,7 +247,7 @@ function parseEXIFDate(dateTimeString: string) { date = new Date(year, month - 1, day, hour, minute, second); } if (Number.isNaN(+date)) { - throw Error(CustomError.NOT_A_DATE); + throw new Error("Invalid date"); } return date; } catch (e) { @@ -249,7 +269,7 @@ export function parseEXIFLocation( gpsLatitude.length !== 3 || gpsLongitude.length !== 3 ) { - throw Error(CustomError.NOT_A_LOCATION); + throw new Error("Invalid EXIF location"); } const latitude = convertDMSToDD( gpsLatitude[0], @@ -265,16 +285,14 @@ export function parseEXIFLocation( ); return { latitude, longitude }; } catch (e) { - log.error( - `Failed to parseEXIFLocation ${JSON.stringify({ - gpsLatitude, - gpsLatitudeRef, - gpsLongitude, - gpsLongitudeRef, - })}`, - e, - ); - return NULL_LOCATION; + const p = { + gpsLatitude, + gpsLatitudeRef, + gpsLongitude, + gpsLongitudeRef, + }; + log.error(`Failed to parse EXIF location ${JSON.stringify(p)}`, e); + return { ...NULL_LOCATION }; } } @@ -291,7 +309,7 @@ function convertDMSToDD( export function getEXIFLocation(exifData: ParsedEXIFData): Location { if (!exifData || (!exifData.latitude && exifData.latitude !== 0)) { - return NULL_LOCATION; + return { ...NULL_LOCATION }; } return { latitude: exifData.latitude, longitude: exifData.longitude }; } diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts index 7d62798823..b02e05a428 100644 --- a/web/apps/photos/src/services/export/index.ts +++ b/web/apps/photos/src/services/export/index.ts @@ -1,3 +1,6 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; +import type { Metadata } from "@/media/types/file"; import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; import { CustomError } from "@ente/shared/error"; @@ -5,12 +8,11 @@ import { Events, eventBus } from "@ente/shared/events"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; import { formatDateTimeShort } from "@ente/shared/time/format"; import { User } from "@ente/shared/user/types"; -import { sleep } from "@ente/shared/utils"; +import { wait } from "@ente/shared/utils"; import QueueProcessor, { CancellationStatus, RequestCanceller, } from "@ente/shared/utils/queueProcessor"; -import { FILE_TYPE } from "constants/file"; import { Collection } from "types/collection"; import { CollectionExportNames, @@ -21,7 +23,6 @@ import { FileExportNames, } from "types/export"; import { EnteFile } from "types/file"; -import { Metadata } from "types/upload"; import { constructCollectionNameMap, getCollectionUserFacingName, @@ -38,7 +39,6 @@ import { writeStream } from "utils/native-stream"; import { getAllLocalCollections } from "../collectionService"; import downloadManager from "../download"; import { getAllLocalFiles } from "../fileService"; -import { decodeLivePhoto } from "../livePhotoService"; import { migrateExport } from "./migration"; /** Name of the JSON file in which we keep the state of the export. */ @@ -46,13 +46,13 @@ const exportRecordFileName = "export_status.json"; /** * Name of the top level directory which we create underneath the selected - * directory when the user starts an export to the filesystem. + * directory when the user starts an export to the file system. */ const exportDirectoryName = "Ente Photos"; /** - * Name of the directory in which we put our metadata when exporting to the - * filesystem. + * Name of the directory in which we put our metadata when exporting to the file + * system. */ export const exportMetadataDirectoryName = "metadata"; @@ -547,6 +547,9 @@ class ExportService { isCanceled: CancellationStatus, ) { const fs = ensureElectron().fs; + const rmdirIfExists = async (dirPath: string) => { + if (await fs.exists(dirPath)) await fs.rmdir(dirPath); + }; try { const exportRecord = await this.getExportRecord(exportFolder); const collectionIDPathMap = @@ -581,11 +584,11 @@ class ExportService { ); try { // delete the collection metadata folder - await fs.rmdir( + await rmdirIfExists( getMetadataFolderExportPath(collectionExportPath), ); // delete the collection folder - await fs.rmdir(collectionExportPath); + await rmdirIfExists(collectionExportPath); } catch (e) { await this.addCollectionExportedRecord( exportFolder, @@ -919,7 +922,7 @@ class ExportService { e.message === CustomError.EXPORT_RECORD_JSON_PARSING_FAILED && retry ) { - await sleep(1000); + await wait(1000); return await this.getExportRecord(folder, false); } if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) { @@ -994,6 +997,7 @@ class ExportService { file, ); await writeStream( + electron, `${collectionExportPath}/${fileExportName}`, updatedFileStream, ); @@ -1015,18 +1019,18 @@ class ExportService { fileStream: ReadableStream, file: EnteFile, ) { - const electron = ensureElectron(); + const fs = ensureElectron().fs; const fileBlob = await new Response(fileStream).blob(); - const livePhoto = await decodeLivePhoto(file, fileBlob); + const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob); const imageExportName = await safeFileName( collectionExportPath, - livePhoto.imageNameTitle, - electron.fs.exists, + livePhoto.imageFileName, + fs.exists, ); const videoExportName = await safeFileName( collectionExportPath, - livePhoto.videoNameTitle, - electron.fs.exists, + livePhoto.videoFileName, + fs.exists, ); const livePhotoExportName = getLivePhotoExportName( imageExportName, @@ -1038,18 +1042,23 @@ class ExportService { livePhotoExportName, ); try { - const imageStream = generateStreamFromArrayBuffer(livePhoto.image); + const imageStream = generateStreamFromArrayBuffer( + livePhoto.imageData, + ); await this.saveMetadataFile( collectionExportPath, imageExportName, file, ); await writeStream( + electron, `${collectionExportPath}/${imageExportName}`, imageStream, ); - const videoStream = generateStreamFromArrayBuffer(livePhoto.video); + const videoStream = generateStreamFromArrayBuffer( + livePhoto.videoData, + ); await this.saveMetadataFile( collectionExportPath, videoExportName, @@ -1057,13 +1066,12 @@ class ExportService { ); try { await writeStream( + electron, `${collectionExportPath}/${videoExportName}`, videoStream, ); } catch (e) { - await electron.fs.rm( - `${collectionExportPath}/${imageExportName}`, - ); + await fs.rm(`${collectionExportPath}/${imageExportName}`); throw e; } } catch (e) { @@ -1373,7 +1381,7 @@ const isExportInProgress = (exportStage: ExportStage) => * * Also move its associated metadata JSON to Trash. * - * @param exportDir The root directory on the user's filesystem where we are + * @param exportDir The root directory on the user's file system where we are * exporting to. * */ const moveToTrash = async ( @@ -1393,17 +1401,19 @@ const moveToTrash = async ( if (await fs.exists(filePath)) { await fs.mkdirIfNeeded(trashDir); - const trashFilePath = await safeFileName(trashDir, fileName, fs.exists); + const trashFileName = await safeFileName(trashDir, fileName, fs.exists); + const trashFilePath = `${trashDir}/${trashFileName}`; await fs.rename(filePath, trashFilePath); } if (await fs.exists(metadataFilePath)) { await fs.mkdirIfNeeded(metadataTrashDir); - const metadataTrashFilePath = await safeFileName( + const metadataTrashFileName = await safeFileName( metadataTrashDir, metadataFileName, fs.exists, ); - await fs.rename(filePath, metadataTrashFilePath); + const metadataTrashFilePath = `${metadataTrashDir}/${metadataTrashFileName}`; + await fs.rename(metadataFilePath, metadataTrashFilePath); } }; diff --git a/web/apps/photos/src/services/export/migration.ts b/web/apps/photos/src/services/export/migration.ts index b90c12e1c2..9404ddde5b 100644 --- a/web/apps/photos/src/services/export/migration.ts +++ b/web/apps/photos/src/services/export/migration.ts @@ -1,13 +1,14 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; import { ensureElectron } from "@/next/electron"; +import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { sleep } from "@ente/shared/utils"; -import { FILE_TYPE } from "constants/file"; +import { wait } from "@ente/shared/utils"; import { getLocalCollections } from "services/collectionService"; import downloadManager from "services/download"; import { getAllLocalFiles } from "services/fileService"; -import { decodeLivePhoto } from "services/livePhotoService"; import { Collection } from "types/collection"; import { CollectionExportNames, @@ -21,7 +22,6 @@ import { } from "types/export"; import { EnteFile } from "types/file"; import { getNonEmptyPersonalCollections } from "utils/collection"; -import { splitFilenameAndExtension } from "utils/ffmpeg"; import { getIDBasedSortedFiles, getPersonalFiles, @@ -305,7 +305,7 @@ async function getFileExportNamesFromExportedFiles( ); let success = 0; for (const file of exportedFiles) { - await sleep(0); + await wait(0); const collectionPath = exportedCollectionPaths.get(file.collectionID); log.debug( () => @@ -318,15 +318,18 @@ async function getFileExportNamesFromExportedFiles( if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { const fileStream = await downloadManager.getFile(file); const fileBlob = await new Response(fileStream).blob(); - const livePhoto = await decodeLivePhoto(file, fileBlob); + const { imageFileName, videoFileName } = await decodeLivePhoto( + file.metadata.title, + fileBlob, + ); const imageExportName = getUniqueFileExportNameForMigration( collectionPath, - livePhoto.imageNameTitle, + imageFileName, usedFilePaths, ); const videoExportName = getUniqueFileExportNameForMigration( collectionPath, - livePhoto.videoNameTitle, + videoFileName, usedFilePaths, ); fileExportName = getLivePhotoExportName( @@ -498,9 +501,7 @@ const getUniqueFileExportNameForMigration = ( .get(collectionPath) ?.has(getFileSavePath(collectionPath, fileExportName)) ) { - const filenameParts = splitFilenameAndExtension( - sanitizeFilename(filename), - ); + const filenameParts = nameAndExtension(sanitizeFilename(filename)); if (filenameParts[1]) { fileExportName = `${filenameParts[0]}(${count}).${filenameParts[1]}`; } else { diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts new file mode 100644 index 0000000000..4dfdb3f641 --- /dev/null +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -0,0 +1,294 @@ +import type { Electron } from "@/next/types/ipc"; +import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; +import { Remote } from "comlink"; +import { + ffmpegPathPlaceholder, + inputPathPlaceholder, + outputPathPlaceholder, +} from "constants/ffmpeg"; +import { NULL_LOCATION } from "constants/upload"; +import type { ParsedExtractedMetadata } from "types/metadata"; +import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; +import { + toDataOrPathOrZipEntry, + type DesktopUploadItem, + type UploadItem, +} from "./upload/types"; + +/** + * Generate a thumbnail for the given video using a wasm FFmpeg running in a web + * worker. + * + * This function is called during upload, when we need to generate thumbnails + * for the new files that the user is adding. + * + * @param blob The input video blob. + * + * @returns JPEG data of the generated thumbnail. + * + * See also {@link generateVideoThumbnailNative}. + */ +export const generateVideoThumbnailWeb = async (blob: Blob) => + _generateVideoThumbnail((seekTime: number) => + ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg", 0), + ); + +const _generateVideoThumbnail = async ( + thumbnailAtTime: (seekTime: number) => Promise, +) => { + try { + // Try generating thumbnail at seekTime 1 second. + return await thumbnailAtTime(1); + } catch (e) { + // If that fails, try again at the beginning. If even this throws, let + // it fail. + return await thumbnailAtTime(0); + } +}; + +/** + * Generate a thumbnail for the given video using a native FFmpeg binary bundled + * with our desktop app. + * + * This function is called during upload, when we need to generate thumbnails + * for the new files that the user is adding. + * + * @param dataOrPath The input video's data or the path to the video on the + * user's local file system. See: [Note: Reading a UploadItem]. + * + * @returns JPEG data of the generated thumbnail. + * + * See also {@link generateVideoThumbnailNative}. + */ +export const generateVideoThumbnailNative = async ( + electron: Electron, + desktopUploadItem: DesktopUploadItem, +) => + _generateVideoThumbnail((seekTime: number) => + electron.ffmpegExec( + makeGenThumbnailCommand(seekTime), + toDataOrPathOrZipEntry(desktopUploadItem), + "jpeg", + 0, + ), + ); + +const makeGenThumbnailCommand = (seekTime: number) => [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-ss", + `00:00:0${seekTime}`, + "-vframes", + "1", + "-vf", + "scale=-1:720", + outputPathPlaceholder, +]; + +/** + * Extract metadata from the given video + * + * When we're running in the context of our desktop app _and_ we're passed a + * file path , this uses the native FFmpeg bundled with our desktop app. + * Otherwise it uses a wasm FFmpeg running in a web worker. + * + * This function is called during upload, when we need to extract the metadata + * of videos that the user is uploading. + * + * @param uploadItem A {@link File}, or the absolute path to a file on the + * user's local filesytem. A path can only be provided when we're running in the + * context of our desktop app. + */ +export const extractVideoMetadata = async ( + uploadItem: UploadItem, +): Promise => { + const command = extractVideoMetadataCommand; + const outputData = + uploadItem instanceof File + ? await ffmpegExecWeb(command, uploadItem, "txt", 0) + : await electron.ffmpegExec( + command, + toDataOrPathOrZipEntry(uploadItem), + "txt", + 0, + ); + + return parseFFmpegExtractedMetadata(outputData); +}; + +// Options: +// +// - `-c [short for codex] copy` +// - copy is the [stream_specifier](ffmpeg.org/ffmpeg.html#Stream-specifiers) +// - copies all the stream without re-encoding +// +// - `-map_metadata` +// - http://ffmpeg.org/ffmpeg.html#Advanced-options (search for map_metadata) +// - copies all stream metadata to the output +// +// - `-f ffmetadata` +// - https://ffmpeg.org/ffmpeg-formats.html#Metadata-1 +// - dump metadata from media files into a simple INI-like utf-8 text file +// +const extractVideoMetadataCommand = [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-c", + "copy", + "-map_metadata", + "0", + "-f", + "ffmetadata", + outputPathPlaceholder, +]; + +enum MetadataTags { + CREATION_TIME = "creation_time", + APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier", + APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto", + APPLE_CREATION_DATE = "com.apple.quicktime.creationdate", + APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709", + LOCATION = "location", +} + +function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) { + const metadataString = new TextDecoder().decode(encodedMetadata); + const metadataPropertyArray = metadataString.split("\n"); + const metadataKeyValueArray = metadataPropertyArray.map((property) => + property.split("="), + ); + const validKeyValuePairs = metadataKeyValueArray.filter( + (keyValueArray) => keyValueArray.length === 2, + ) as Array<[string, string]>; + + const metadataMap = Object.fromEntries(validKeyValuePairs); + + const location = parseAppleISOLocation( + metadataMap[MetadataTags.APPLE_LOCATION_ISO] ?? + metadataMap[MetadataTags.LOCATION], + ); + + const creationTime = parseCreationTime( + metadataMap[MetadataTags.APPLE_CREATION_DATE] ?? + metadataMap[MetadataTags.CREATION_TIME], + ); + const parsedMetadata: ParsedExtractedMetadata = { + creationTime, + location: { + latitude: location.latitude, + longitude: location.longitude, + }, + width: null, + height: null, + }; + return parsedMetadata; +} + +function parseAppleISOLocation(isoLocation: string) { + let location = { ...NULL_LOCATION }; + if (isoLocation) { + const [latitude, longitude] = isoLocation + .match(/(\+|-)\d+\.*\d+/g) + .map((x) => parseFloat(x)); + + location = { latitude, longitude }; + } + return location; +} + +function parseCreationTime(creationTime: string) { + let dateTime = null; + if (creationTime) { + dateTime = validateAndGetCreationUnixTimeInMicroSeconds( + new Date(creationTime), + ); + } + return dateTime; +} + +/** + * Run the given FFmpeg command using a wasm FFmpeg running in a web worker. + * + * As a rough ballpark, currently the native FFmpeg integration in the desktop + * app is 10-20x faster than the wasm one. See: [Note: FFmpeg in Electron]. + */ +const ffmpegExecWeb = async ( + command: string[], + blob: Blob, + outputFileExtension: string, + timeoutMs: number, +) => { + const worker = await workerFactory.lazy(); + return await worker.exec(command, blob, outputFileExtension, timeoutMs); +}; + +/** + * Convert a video from a format that is not supported in the browser to MP4. + * + * This function is called when the user views a video or a live photo, and we + * want to play it back. The idea is to convert it to MP4 which has much more + * universal support in browsers. + * + * @param blob The video blob. + * + * @returns The mp4 video data. + */ +export const convertToMP4 = async (blob: Blob) => + ffmpegExecNativeOrWeb( + [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ], + blob, + "mp4", + 30 * 1000, + ); + +/** + * Run the given FFmpeg command using a native FFmpeg binary when we're running + * in the context of our desktop app, otherwise using the browser based wasm + * FFmpeg implemenation. + * + * See also: {@link ffmpegExecWeb}. + */ +const ffmpegExecNativeOrWeb = async ( + command: string[], + blob: Blob, + outputFileExtension: string, + timeoutMs: number, +) => { + const electron = globalThis.electron; + if (electron) + return electron.ffmpegExec( + command, + new Uint8Array(await blob.arrayBuffer()), + outputFileExtension, + timeoutMs, + ); + else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs); +}; + +/** Lazily create a singleton instance of our worker */ +class WorkerFactory { + private instance: Promise>; + + async lazy() { + if (!this.instance) this.instance = createComlinkWorker().remote; + return this.instance; + } +} + +const workerFactory = new WorkerFactory(); + +const createComlinkWorker = () => + new ComlinkWorker( + "ffmpeg-worker", + new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), + ); diff --git a/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts b/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts deleted file mode 100644 index 49aee9868e..0000000000 --- a/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { ElectronFile } from "types/upload"; -import ComlinkFFmpegWorker from "utils/comlink/ComlinkFFmpegWorker"; - -export interface IFFmpeg { - run: ( - cmd: string[], - inputFile: File | ElectronFile, - outputFilename: string, - dontTimeout?: boolean, - ) => Promise; -} - -class FFmpegFactory { - private client: IFFmpeg; - async getFFmpegClient() { - if (!this.client) { - const electron = globalThis.electron; - if (electron) { - this.client = { - run(cmd, inputFile, outputFilename, dontTimeout) { - return electron.runFFmpegCmd( - cmd, - inputFile, - outputFilename, - dontTimeout, - ); - }, - }; - } else { - this.client = await ComlinkFFmpegWorker.getInstance(); - } - } - return this.client; - } -} - -export default new FFmpegFactory(); diff --git a/web/apps/photos/src/services/ffmpeg/ffmpegService.ts b/web/apps/photos/src/services/ffmpeg/ffmpegService.ts deleted file mode 100644 index 0a6a66cb05..0000000000 --- a/web/apps/photos/src/services/ffmpeg/ffmpegService.ts +++ /dev/null @@ -1,100 +0,0 @@ -import log from "@/next/log"; -import { - FFMPEG_PLACEHOLDER, - INPUT_PATH_PLACEHOLDER, - OUTPUT_PATH_PLACEHOLDER, -} from "constants/ffmpeg"; -import { ElectronFile } from "types/upload"; -import { parseFFmpegExtractedMetadata } from "utils/ffmpeg"; -import ffmpegFactory from "./ffmpegFactory"; - -export async function generateVideoThumbnail( - file: File | ElectronFile, -): Promise { - try { - let seekTime = 1; - const ffmpegClient = await ffmpegFactory.getFFmpegClient(); - while (seekTime >= 0) { - try { - return await ffmpegClient.run( - [ - FFMPEG_PLACEHOLDER, - "-i", - INPUT_PATH_PLACEHOLDER, - "-ss", - `00:00:0${seekTime}`, - "-vframes", - "1", - "-vf", - "scale=-1:720", - OUTPUT_PATH_PLACEHOLDER, - ], - file, - "thumb.jpeg", - ); - } catch (e) { - if (seekTime === 0) { - throw e; - } - } - seekTime--; - } - } catch (e) { - log.error("ffmpeg generateVideoThumbnail failed", e); - throw e; - } -} - -export async function extractVideoMetadata(file: File | ElectronFile) { - try { - const ffmpegClient = await ffmpegFactory.getFFmpegClient(); - // https://stackoverflow.com/questions/9464617/retrieving-and-saving-media-metadata-using-ffmpeg - // -c [short for codex] copy[(stream_specifier)[ffmpeg.org/ffmpeg.html#Stream-specifiers]] => copies all the stream without re-encoding - // -map_metadata [http://ffmpeg.org/ffmpeg.html#Advanced-options search for map_metadata] => copies all stream metadata to the out - // -f ffmetadata [https://ffmpeg.org/ffmpeg-formats.html#Metadata-1] => dump metadata from media files into a simple UTF-8-encoded INI-like text file - const metadata = await ffmpegClient.run( - [ - FFMPEG_PLACEHOLDER, - "-i", - INPUT_PATH_PLACEHOLDER, - "-c", - "copy", - "-map_metadata", - "0", - "-f", - "ffmetadata", - OUTPUT_PATH_PLACEHOLDER, - ], - file, - `metadata.txt`, - ); - return parseFFmpegExtractedMetadata( - new Uint8Array(await metadata.arrayBuffer()), - ); - } catch (e) { - log.error("ffmpeg extractVideoMetadata failed", e); - throw e; - } -} - -export async function convertToMP4(file: File | ElectronFile) { - try { - const ffmpegClient = await ffmpegFactory.getFFmpegClient(); - return await ffmpegClient.run( - [ - FFMPEG_PLACEHOLDER, - "-i", - INPUT_PATH_PLACEHOLDER, - "-preset", - "ultrafast", - OUTPUT_PATH_PLACEHOLDER, - ], - file, - "output.mp4", - true, - ); - } catch (e) { - log.error("ffmpeg convertToMP4 failed", e); - throw e; - } -} diff --git a/web/apps/photos/src/services/updateCreationTimeWithExif.ts b/web/apps/photos/src/services/fix-exif.ts similarity index 94% rename from web/apps/photos/src/services/updateCreationTimeWithExif.ts rename to web/apps/photos/src/services/fix-exif.ts index 667ae44f4e..f47e4c5ed0 100644 --- a/web/apps/photos/src/services/updateCreationTimeWithExif.ts +++ b/web/apps/photos/src/services/fix-exif.ts @@ -1,15 +1,15 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; import type { FixOption } from "components/FixCreationTime"; -import { FILE_TYPE } from "constants/file"; -import { getFileType } from "services/typeDetectionService"; +import { detectFileTypeInfo } from "services/detect-type"; import { EnteFile } from "types/file"; import { changeFileCreationTime, updateExistingFilePubMetadata, } from "utils/file"; import downloadManager from "./download"; -import { getParsedExifData } from "./upload/exifService"; +import { getParsedExifData } from "./exif"; const EXIF_TIME_TAGS = [ "DateTimeOriginal", @@ -53,7 +53,7 @@ export async function updateCreationTimeWithExif( [fileBlob], file.metadata.title, ); - const fileTypeInfo = await getFileType(fileObject); + const fileTypeInfo = await detectFileTypeInfo(fileObject); const exifData = await getParsedExifData( fileObject, fileTypeInfo, diff --git a/web/apps/photos/src/services/heic-convert/service.ts b/web/apps/photos/src/services/heic-convert.ts similarity index 57% rename from web/apps/photos/src/services/heic-convert/service.ts rename to web/apps/photos/src/services/heic-convert.ts index 0dc6506128..c2ea198391 100644 --- a/web/apps/photos/src/services/heic-convert/service.ts +++ b/web/apps/photos/src/services/heic-convert.ts @@ -1,11 +1,20 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { CustomError } from "@ente/shared/error"; import { retryAsyncFunction } from "@ente/shared/utils"; import QueueProcessor from "@ente/shared/utils/queueProcessor"; -import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker"; -import { DedicatedConvertWorker } from "worker/convert.worker"; +import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker"; + +/** + * Convert a HEIC image to a JPEG. + * + * Behind the scenes, it uses a web worker pool to do the conversion using a + * WASM HEIC conversion package. + * + * @param heicBlob The HEIC blob to convert. + * @returns The JPEG blob. + */ +export const heicToJPEG = (heicBlob: Blob) => converter.convert(heicBlob); const WORKER_POOL_SIZE = 2; const WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS = [100, 100]; @@ -14,20 +23,18 @@ const BREATH_TIME_IN_MICROSECONDS = 1000; class HEICConverter { private convertProcessor = new QueueProcessor(); - private workerPool: ComlinkWorker[] = []; - private ready: Promise; + private workerPool: ComlinkWorker[] = []; - constructor() { - this.ready = this.init(); - } - private async init() { + private initIfNeeded() { + if (this.workerPool.length > 0) return; this.workerPool = []; - for (let i = 0; i < WORKER_POOL_SIZE; i++) { - this.workerPool.push(getDedicatedConvertWorker()); - } + for (let i = 0; i < WORKER_POOL_SIZE; i++) + this.workerPool.push(createComlinkWorker()); } + async convert(fileBlob: Blob): Promise { - await this.ready; + this.initIfNeeded(); + const response = this.convertProcessor.queueUpRequest(() => retryAsyncFunction(async () => { const convertWorker = this.workerPool.shift(); @@ -42,18 +49,11 @@ class HEICConverter { }, WAIT_TIME_IN_MICROSECONDS); const startTime = Date.now(); const convertedHEIC = - await worker.convertHEICToJPEG( - fileBlob, - ); - log.info( - `originalFileSize:${convertBytesToHumanReadable( - fileBlob?.size, - )},convertedFileSize:${convertBytesToHumanReadable( - convertedHEIC?.size, - )}, heic conversion time: ${ - Date.now() - startTime - }ms `, + await worker.heicToJPEG(fileBlob); + const ms = Math.round( + Date.now() - startTime, ); + log.debug(() => `heic => jpeg (${ms} ms)`); clearTimeout(timeout); resolve(convertedHEIC); } catch (e) { @@ -65,18 +65,7 @@ class HEICConverter { ); if (!convertedHEIC || convertedHEIC?.size === 0) { log.error( - `converted heic fileSize is Zero - ${JSON.stringify( - { - originalFileSize: - convertBytesToHumanReadable( - fileBlob?.size ?? 0, - ), - convertedFileSize: - convertBytesToHumanReadable( - convertedHEIC?.size ?? 0, - ), - }, - )}`, + `Converted HEIC file is empty (original was ${fileBlob?.size} bytes)`, ); } await new Promise((resolve) => { @@ -88,13 +77,14 @@ class HEICConverter { this.workerPool.push(convertWorker); return convertedHEIC; } catch (e) { - log.error("heic conversion failed", e); + log.error("HEIC conversion failed", e); convertWorker.terminate(); - this.workerPool.push(getDedicatedConvertWorker()); + this.workerPool.push(createComlinkWorker()); throw e; } }, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS), ); + try { return await response.promise; } catch (e) { @@ -107,4 +97,11 @@ class HEICConverter { } } -export default new HEICConverter(); +/** The singleton instance of {@link HEICConverter}. */ +const converter = new HEICConverter(); + +const createComlinkWorker = () => + new ComlinkWorker( + "heic-convert-worker", + new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)), + ); diff --git a/web/apps/photos/src/services/heicConversionService.ts b/web/apps/photos/src/services/heicConversionService.ts deleted file mode 100644 index 1897814946..0000000000 --- a/web/apps/photos/src/services/heicConversionService.ts +++ /dev/null @@ -1,14 +0,0 @@ -import log from "@/next/log"; -import WasmHEICConverterService from "./heic-convert/service"; - -class HeicConversionService { - async convert(heicFileData: Blob): Promise { - try { - return await WasmHEICConverterService.convert(heicFileData); - } catch (e) { - log.error("failed to convert heic file", e); - throw e; - } - } -} -export default new HeicConversionService(); diff --git a/web/apps/photos/src/services/importService.ts b/web/apps/photos/src/services/importService.ts deleted file mode 100644 index 6d2c46a85b..0000000000 --- a/web/apps/photos/src/services/importService.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { ensureElectron } from "@/next/electron"; -import log from "@/next/log"; -import { PICKED_UPLOAD_TYPE } from "constants/upload"; -import { Collection } from "types/collection"; -import { ElectronFile, FileWithCollection } from "types/upload"; - -interface PendingUploads { - files: ElectronFile[]; - collectionName: string; - type: PICKED_UPLOAD_TYPE; -} - -class ImportService { - async getPendingUploads(): Promise { - try { - const pendingUploads = - (await ensureElectron().getPendingUploads()) as PendingUploads; - return pendingUploads; - } catch (e) { - if (e?.message?.includes("ENOENT: no such file or directory")) { - // ignore - } else { - log.error("failed to getPendingUploads ", e); - } - return { files: [], collectionName: null, type: null }; - } - } - - async setToUploadCollection(collections: Collection[]) { - let collectionName: string = null; - /* collection being one suggest one of two things - 1. Either the user has upload to a single existing collection - 2. Created a new single collection to upload to - may have had multiple folder, but chose to upload - to one album - hence saving the collection name when upload collection count is 1 - helps the info of user choosing this options - and on next upload we can directly start uploading to this collection - */ - if (collections.length === 1) { - collectionName = collections[0].name; - } - await ensureElectron().setToUploadCollection(collectionName); - } - - async updatePendingUploads(files: FileWithCollection[]) { - const filePaths = []; - for (const fileWithCollection of files) { - if (fileWithCollection.isLivePhoto) { - filePaths.push( - (fileWithCollection.livePhotoAssets.image as ElectronFile) - .path, - (fileWithCollection.livePhotoAssets.video as ElectronFile) - .path, - ); - } else { - filePaths.push((fileWithCollection.file as ElectronFile).path); - } - } - await ensureElectron().setToUploadFiles( - PICKED_UPLOAD_TYPE.FILES, - filePaths, - ); - } - - async cancelRemainingUploads() { - const electron = ensureElectron(); - await electron.setToUploadCollection(null); - await electron.setToUploadFiles(PICKED_UPLOAD_TYPE.ZIPS, []); - await electron.setToUploadFiles(PICKED_UPLOAD_TYPE.FILES, []); - } -} - -export default new ImportService(); diff --git a/web/apps/photos/src/services/livePhotoService.ts b/web/apps/photos/src/services/livePhotoService.ts deleted file mode 100644 index 4d96e812cc..0000000000 --- a/web/apps/photos/src/services/livePhotoService.ts +++ /dev/null @@ -1,45 +0,0 @@ -import JSZip from "jszip"; -import { EnteFile } from "types/file"; -import { - getFileExtensionWithDot, - getFileNameWithoutExtension, -} from "utils/file"; - -class LivePhoto { - image: Uint8Array; - video: Uint8Array; - imageNameTitle: string; - videoNameTitle: string; -} - -export const decodeLivePhoto = async (file: EnteFile, zipBlob: Blob) => { - const originalName = getFileNameWithoutExtension(file.metadata.title); - const zip = await JSZip.loadAsync(zipBlob, { createFolders: true }); - - const livePhoto = new LivePhoto(); - for (const zipFilename in zip.files) { - if (zipFilename.startsWith("image")) { - livePhoto.imageNameTitle = - originalName + getFileExtensionWithDot(zipFilename); - livePhoto.image = await zip.files[zipFilename].async("uint8array"); - } else if (zipFilename.startsWith("video")) { - livePhoto.videoNameTitle = - originalName + getFileExtensionWithDot(zipFilename); - livePhoto.video = await zip.files[zipFilename].async("uint8array"); - } - } - return livePhoto; -}; - -export const encodeLivePhoto = async (livePhoto: LivePhoto) => { - const zip = new JSZip(); - zip.file( - "image" + getFileExtensionWithDot(livePhoto.imageNameTitle), - livePhoto.image, - ); - zip.file( - "video" + getFileExtensionWithDot(livePhoto.videoNameTitle), - livePhoto.video, - ); - return await zip.generateAsync({ type: "uint8array" }); -}; diff --git a/web/apps/photos/src/services/locationSearchService.ts b/web/apps/photos/src/services/locationSearchService.ts index 2aa2b6bacd..354c87a712 100644 --- a/web/apps/photos/src/services/locationSearchService.ts +++ b/web/apps/photos/src/services/locationSearchService.ts @@ -1,6 +1,6 @@ import log from "@/next/log"; import { LocationTagData } from "types/entity"; -import { Location } from "types/upload"; +import { Location } from "types/metadata"; export interface City { city: string; diff --git a/web/apps/photos/src/services/machineLearning/faceService.ts b/web/apps/photos/src/services/machineLearning/faceService.ts index 052ed020d5..1dedadf151 100644 --- a/web/apps/photos/src/services/machineLearning/faceService.ts +++ b/web/apps/photos/src/services/machineLearning/faceService.ts @@ -144,8 +144,10 @@ class FaceService { syncContext.faceEmbeddingService.faceSize, imageBitmap, ); - const blurValues = - syncContext.blurDetectionService.detectBlur(faceImages); + const blurValues = syncContext.blurDetectionService.detectBlur( + faceImages, + newMlFile.faces, + ); newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i])); imageBitmap.close(); diff --git a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts index 14178a5351..3357e21ccd 100644 --- a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts +++ b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts @@ -1,6 +1,7 @@ import { BlurDetectionMethod, BlurDetectionService, + Face, Versioned, } from "types/machineLearning"; import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image"; @@ -16,18 +17,20 @@ class LaplacianBlurDetectionService implements BlurDetectionService { }; } - public detectBlur(alignedFaces: Float32Array): number[] { + public detectBlur(alignedFaces: Float32Array, faces: Face[]): number[] { const numFaces = Math.round( alignedFaces.length / (mobileFaceNetFaceSize * mobileFaceNetFaceSize * 3), ); const blurValues: number[] = []; for (let i = 0; i < numFaces; i++) { + const face = faces[i]; + const direction = getFaceDirection(face); const faceImage = createGrayscaleIntMatrixFromNormalized2List( alignedFaces, i, ); - const laplacian = this.applyLaplacian(faceImage); + const laplacian = this.applyLaplacian(faceImage, direction); const variance = this.calculateVariance(laplacian); blurValues.push(variance); } @@ -61,42 +64,77 @@ class LaplacianBlurDetectionService implements BlurDetectionService { return variance; } - private padImage(image: number[][]): number[][] { + private padImage( + image: number[][], + removeSideColumns: number = 56, + direction: FaceDirection = "straight", + ): number[][] { + // Exception is removeSideColumns is not even + if (removeSideColumns % 2 != 0) { + throw new Error("removeSideColumns must be even"); + } const numRows = image.length; const numCols = image[0].length; + const paddedNumCols = numCols + 2 - removeSideColumns; + const paddedNumRows = numRows + 2; // Create a new matrix with extra padding const paddedImage: number[][] = Array.from( - { length: numRows + 2 }, - () => new Array(numCols + 2).fill(0), + { length: paddedNumRows }, + () => new Array(paddedNumCols).fill(0), ); // Copy original image into the center of the padded image - for (let i = 0; i < numRows; i++) { - for (let j = 0; j < numCols; j++) { - paddedImage[i + 1][j + 1] = image[i][j]; + if (direction === "straight") { + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = + image[i][j + Math.round(removeSideColumns / 2)]; + } + } + } // If the face is facing left, we only take the right side of the face image + else if (direction === "left") { + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns]; + } + } + } // If the face is facing right, we only take the left side of the face image + else if (direction === "right") { + for (let i = 0; i < numRows; i++) { + for (let j = 0; j < paddedNumCols - 2; j++) { + paddedImage[i + 1][j + 1] = image[i][j]; + } } } // Reflect padding // Top and bottom rows - for (let j = 1; j <= numCols; j++) { + for (let j = 1; j <= paddedNumCols - 2; j++) { paddedImage[0][j] = paddedImage[2][j]; // Top row paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row } // Left and right columns for (let i = 0; i < numRows + 2; i++) { paddedImage[i][0] = paddedImage[i][2]; // Left column - paddedImage[i][numCols + 1] = paddedImage[i][numCols - 1]; // Right column + paddedImage[i][paddedNumCols - 1] = + paddedImage[i][paddedNumCols - 3]; // Right column } return paddedImage; } - private applyLaplacian(image: number[][]): number[][] { - const paddedImage: number[][] = this.padImage(image); - const numRows = image.length; - const numCols = image[0].length; + private applyLaplacian( + image: number[][], + direction: FaceDirection = "straight", + ): number[][] { + const paddedImage: number[][] = this.padImage( + image, + undefined, + direction, + ); + const numRows = paddedImage.length - 2; + const numCols = paddedImage[0].length - 2; // Create an output image initialized to 0 const outputImage: number[][] = Array.from({ length: numRows }, () => @@ -129,3 +167,45 @@ class LaplacianBlurDetectionService implements BlurDetectionService { } export default new LaplacianBlurDetectionService(); + +type FaceDirection = "left" | "right" | "straight"; + +const getFaceDirection = (face: Face): FaceDirection => { + const landmarks = face.detection.landmarks; + const leftEye = landmarks[0]; + const rightEye = landmarks[1]; + const nose = landmarks[2]; + const leftMouth = landmarks[3]; + const rightMouth = landmarks[4]; + + const eyeDistanceX = Math.abs(rightEye.x - leftEye.x); + const eyeDistanceY = Math.abs(rightEye.y - leftEye.y); + const mouthDistanceY = Math.abs(rightMouth.y - leftMouth.y); + + const faceIsUpright = + Math.max(leftEye.y, rightEye.y) + 0.5 * eyeDistanceY < nose.y && + nose.y + 0.5 * mouthDistanceY < Math.min(leftMouth.y, rightMouth.y); + + const noseStickingOutLeft = + nose.x < Math.min(leftEye.x, rightEye.x) && + nose.x < Math.min(leftMouth.x, rightMouth.x); + + const noseStickingOutRight = + nose.x > Math.max(leftEye.x, rightEye.x) && + nose.x > Math.max(leftMouth.x, rightMouth.x); + + const noseCloseToLeftEye = + Math.abs(nose.x - leftEye.x) < 0.2 * eyeDistanceX; + const noseCloseToRightEye = + Math.abs(nose.x - rightEye.x) < 0.2 * eyeDistanceX; + + // if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) { + if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) { + return "left"; + // } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) { + } else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) { + return "right"; + } + + return "straight"; +}; diff --git a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts index c5df14b224..d1c5e9db5e 100644 --- a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts +++ b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts @@ -1,8 +1,8 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { eventBus, Events } from "@ente/shared/events"; import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers"; -import { FILE_TYPE } from "constants/file"; import debounce from "debounce"; import PQueue from "p-queue"; import { JobResult } from "types/common/job"; diff --git a/web/apps/photos/src/services/machineLearning/readerService.ts b/web/apps/photos/src/services/machineLearning/readerService.ts index a18b3c9082..62aebdbd1f 100644 --- a/web/apps/photos/src/services/machineLearning/readerService.ts +++ b/web/apps/photos/src/services/machineLearning/readerService.ts @@ -1,5 +1,5 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; -import { FILE_TYPE } from "constants/file"; import { MLSyncContext, MLSyncFileContext } from "types/machineLearning"; import { getLocalFileImageBitmap, diff --git a/web/apps/photos/src/services/readerService.ts b/web/apps/photos/src/services/readerService.ts deleted file mode 100644 index e410144cfe..0000000000 --- a/web/apps/photos/src/services/readerService.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { convertBytesToHumanReadable } from "@/next/file"; -import log from "@/next/log"; -import { ElectronFile } from "types/upload"; - -export async function getUint8ArrayView( - file: Blob | ElectronFile, -): Promise { - try { - return new Uint8Array(await file.arrayBuffer()); - } catch (e) { - log.error( - `Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`, - e, - ); - throw e; - } -} - -export function getFileStream(file: File, chunkSize: number) { - const fileChunkReader = fileChunkReaderMaker(file, chunkSize); - - const stream = new ReadableStream({ - async pull(controller: ReadableStreamDefaultController) { - const chunk = await fileChunkReader.next(); - if (chunk.done) { - controller.close(); - } else { - controller.enqueue(chunk.value); - } - }, - }); - const chunkCount = Math.ceil(file.size / chunkSize); - return { - stream, - chunkCount, - }; -} - -export async function getElectronFileStream( - file: ElectronFile, - chunkSize: number, -) { - const chunkCount = Math.ceil(file.size / chunkSize); - return { - stream: await file.stream(), - chunkCount, - }; -} - -async function* fileChunkReaderMaker(file: File, chunkSize: number) { - let offset = 0; - while (offset < file.size) { - const blob = file.slice(offset, chunkSize + offset); - const fileChunk = await getUint8ArrayView(blob); - yield fileChunk; - offset += chunkSize; - } - return null; -} diff --git a/web/apps/photos/src/services/searchService.ts b/web/apps/photos/src/services/searchService.ts index 408c3daa5f..96c574b9dd 100644 --- a/web/apps/photos/src/services/searchService.ts +++ b/web/apps/photos/src/services/searchService.ts @@ -1,7 +1,6 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; -import { CustomError } from "@ente/shared/error"; import * as chrono from "chrono-node"; -import { FILE_TYPE } from "constants/file"; import { t } from "i18next"; import { Collection } from "types/collection"; import { EntityType, LocationTag, LocationTagData } from "types/entity"; @@ -287,24 +286,20 @@ async function getLocationSuggestions(searchPhrase: string) { return [...locationTagSuggestions, ...citySearchSuggestions]; } -async function getClipSuggestion(searchPhrase: string): Promise { - try { - if (!clipService.isPlatformSupported()) { - return null; - } - - const clipResults = await searchClip(searchPhrase); - return { - type: SuggestionType.CLIP, - value: clipResults, - label: searchPhrase, - }; - } catch (e) { - if (!e.message?.includes(CustomError.MODEL_DOWNLOAD_PENDING)) { - log.error("getClipSuggestion failed", e); - } +async function getClipSuggestion( + searchPhrase: string, +): Promise { + if (!clipService.isPlatformSupported()) { return null; } + + const clipResults = await searchClip(searchPhrase); + if (!clipResults) return undefined; + return { + type: SuggestionType.CLIP, + value: clipResults, + label: searchPhrase, + }; } function searchCollection( @@ -374,9 +369,14 @@ async function searchLocationTag(searchPhrase: string): Promise { return matchedLocationTags; } -async function searchClip(searchPhrase: string): Promise { +const searchClip = async ( + searchPhrase: string, +): Promise => { + const textEmbedding = + await clipService.getTextEmbeddingIfAvailable(searchPhrase); + if (!textEmbedding) return undefined; + const imageEmbeddings = await getLocalEmbeddings(); - const textEmbedding = await clipService.getTextEmbedding(searchPhrase); const clipSearchResult = new Map( ( await Promise.all( @@ -394,7 +394,7 @@ async function searchClip(searchPhrase: string): Promise { ); return clipSearchResult; -} +}; function convertSuggestionToSearchQuery(option: Suggestion): Search { switch (option.type) { diff --git a/web/apps/photos/src/services/typeDetectionService.ts b/web/apps/photos/src/services/typeDetectionService.ts deleted file mode 100644 index 5ff8f01692..0000000000 --- a/web/apps/photos/src/services/typeDetectionService.ts +++ /dev/null @@ -1,96 +0,0 @@ -import log from "@/next/log"; -import { CustomError } from "@ente/shared/error"; -import { FILE_TYPE } from "constants/file"; -import { - KNOWN_NON_MEDIA_FORMATS, - WHITELISTED_FILE_FORMATS, -} from "constants/upload"; -import FileType, { FileTypeResult } from "file-type"; -import { ElectronFile, FileTypeInfo } from "types/upload"; -import { getFileExtension } from "utils/file"; -import { getUint8ArrayView } from "./readerService"; - -const TYPE_VIDEO = "video"; -const TYPE_IMAGE = "image"; -const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100; - -export async function getFileType( - receivedFile: File | ElectronFile, -): Promise { - try { - let fileType: FILE_TYPE; - let typeResult: FileTypeResult; - - if (receivedFile instanceof File) { - typeResult = await extractFileType(receivedFile); - } else { - typeResult = await extractElectronFileType(receivedFile); - } - - const mimTypeParts: string[] = typeResult.mime?.split("/"); - - if (mimTypeParts?.length !== 2) { - throw Error(CustomError.INVALID_MIME_TYPE(typeResult.mime)); - } - switch (mimTypeParts[0]) { - case TYPE_IMAGE: - fileType = FILE_TYPE.IMAGE; - break; - case TYPE_VIDEO: - fileType = FILE_TYPE.VIDEO; - break; - default: - throw Error(CustomError.NON_MEDIA_FILE); - } - return { - fileType, - exactType: typeResult.ext, - mimeType: typeResult.mime, - }; - } catch (e) { - const fileFormat = getFileExtension(receivedFile.name); - const whiteListedFormat = WHITELISTED_FILE_FORMATS.find( - (a) => a.exactType === fileFormat, - ); - if (whiteListedFormat) { - return whiteListedFormat; - } - if (KNOWN_NON_MEDIA_FORMATS.includes(fileFormat)) { - throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); - } - if (e.message === CustomError.NON_MEDIA_FILE) { - log.error(`unsupported file format ${fileFormat}`, e); - throw Error(CustomError.UNSUPPORTED_FILE_FORMAT); - } - log.error(`type detection failed for format ${fileFormat}`, e); - throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat)); - } -} - -async function extractFileType(file: File) { - const fileBlobChunk = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION); - const fileDataChunk = await getUint8ArrayView(fileBlobChunk); - return getFileTypeFromBuffer(fileDataChunk); -} - -async function extractElectronFileType(file: ElectronFile) { - const stream = await file.stream(); - const reader = stream.getReader(); - const { value: fileDataChunk } = await reader.read(); - await reader.cancel(); - return getFileTypeFromBuffer(fileDataChunk); -} - -async function getFileTypeFromBuffer(buffer: Uint8Array) { - const result = await FileType.fromBuffer(buffer); - if (!result?.mime) { - let logableInfo = ""; - try { - logableInfo = `result: ${JSON.stringify(result)}`; - } catch (e) { - logableInfo = "failed to stringify result"; - } - throw Error(`mimetype missing from file type result - ${logableInfo}`); - } - return result; -} diff --git a/web/apps/photos/src/services/upload/date.ts b/web/apps/photos/src/services/upload/date.ts new file mode 100644 index 0000000000..89934e37ce --- /dev/null +++ b/web/apps/photos/src/services/upload/date.ts @@ -0,0 +1,166 @@ +import log from "@/next/log"; +import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; + +/** + * Try to extract a date (as epoch microseconds) from a file name by matching it + * against certain known patterns for media files. + * + * If it doesn't match a known pattern, or if there is some error during the + * parsing, return `undefined`. + */ +export const tryParseEpochMicrosecondsFromFileName = ( + fileName: string, +): number | undefined => { + try { + fileName = fileName.trim(); + let parsedDate: Date; + if (fileName.startsWith("IMG-") || fileName.startsWith("VID-")) { + // WhatsApp media files + // Sample name: IMG-20171218-WA0028.jpg + parsedDate = parseDateFromFusedDateString(fileName.split("-")[1]); + } else if (fileName.startsWith("Screenshot_")) { + // Screenshots on Android + // Sample name: Screenshot_20181227-152914.jpg + parsedDate = parseDateFromFusedDateString( + fileName.replaceAll("Screenshot_", ""), + ); + } else if (fileName.startsWith("signal-")) { + // Signal images + // Sample name: signal-2018-08-21-100217.jpg + const p = fileName.split("-"); + const dateString = `${p[1]}${p[2]}${p[3]}-${p[4]}`; + parsedDate = parseDateFromFusedDateString(dateString); + } + if (!parsedDate) { + parsedDate = tryToParseDateTime(fileName); + } + return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate); + } catch (e) { + log.error(`Could not extract date from file name ${fileName}`, e); + return undefined; + } +}; + +interface DateComponent { + year: T; + month: T; + day: T; + hour: T; + minute: T; + second: T; +} + +const currentYear = new Date().getFullYear(); + +/* +generates data component for date in format YYYYMMDD-HHMMSS + */ +function parseDateFromFusedDateString(dateTime: string) { + const dateComponent: DateComponent = convertDateComponentToNumber({ + year: dateTime.slice(0, 4), + month: dateTime.slice(4, 6), + day: dateTime.slice(6, 8), + hour: dateTime.slice(9, 11), + minute: dateTime.slice(11, 13), + second: dateTime.slice(13, 15), + }); + return validateAndGetDateFromComponents(dateComponent); +} + +/* sample date format = 2018-08-19 12:34:45 + the date has six symbol separated number values + which we would extract and use to form the date + */ +export function tryToParseDateTime(dateTime: string): Date { + const dateComponent = getDateComponentsFromSymbolJoinedString(dateTime); + if (dateComponent.year?.length === 8 && dateComponent.month?.length === 6) { + // the filename has size 8 consecutive and then 6 consecutive digits + // high possibility that the it is a date in format YYYYMMDD-HHMMSS + const possibleDateTime = dateComponent.year + "-" + dateComponent.month; + return parseDateFromFusedDateString(possibleDateTime); + } + return validateAndGetDateFromComponents( + convertDateComponentToNumber(dateComponent), + ); +} + +function getDateComponentsFromSymbolJoinedString( + dateTime: string, +): DateComponent { + const [year, month, day, hour, minute, second] = + dateTime.match(/\d+/g) ?? []; + + return { year, month, day, hour, minute, second }; +} + +function validateAndGetDateFromComponents( + dateComponent: DateComponent, + options = { minYear: 1990, maxYear: currentYear + 1 }, +) { + let date = getDateFromComponents(dateComponent); + if (hasTimeValues(dateComponent) && !isTimePartValid(date, dateComponent)) { + // if the date has time values but they are not valid + // then we remove the time values and try to validate the date + date = getDateFromComponents(removeTimeValues(dateComponent)); + } + if (!isDatePartValid(date, dateComponent)) { + return null; + } + if ( + date.getFullYear() < options.minYear || + date.getFullYear() > options.maxYear + ) { + return null; + } + return date; +} + +function isTimePartValid(date: Date, dateComponent: DateComponent) { + return ( + date.getHours() === dateComponent.hour && + date.getMinutes() === dateComponent.minute && + date.getSeconds() === dateComponent.second + ); +} + +function isDatePartValid(date: Date, dateComponent: DateComponent) { + return ( + date.getFullYear() === dateComponent.year && + date.getMonth() === dateComponent.month && + date.getDate() === dateComponent.day + ); +} + +function convertDateComponentToNumber( + dateComponent: DateComponent, +): DateComponent { + return { + year: Number(dateComponent.year), + // https://stackoverflow.com/questions/2552483/why-does-the-month-argument-range-from-0-to-11-in-javascripts-date-constructor + month: Number(dateComponent.month) - 1, + day: Number(dateComponent.day), + hour: Number(dateComponent.hour), + minute: Number(dateComponent.minute), + second: Number(dateComponent.second), + }; +} + +function getDateFromComponents(dateComponent: DateComponent) { + const { year, month, day, hour, minute, second } = dateComponent; + if (hasTimeValues(dateComponent)) { + return new Date(year, month, day, hour, minute, second); + } else { + return new Date(year, month, day); + } +} + +function hasTimeValues(dateComponent: DateComponent) { + const { hour, minute, second } = dateComponent; + return !isNaN(hour) && !isNaN(minute) && !isNaN(second); +} + +function removeTimeValues( + dateComponent: DateComponent, +): DateComponent { + return { ...dateComponent, hour: 0, minute: 0, second: 0 }; +} diff --git a/web/apps/photos/src/services/upload/encryptionService.ts b/web/apps/photos/src/services/upload/encryptionService.ts deleted file mode 100644 index 90f100c9fb..0000000000 --- a/web/apps/photos/src/services/upload/encryptionService.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { EncryptionResult } from "@ente/shared/crypto/types"; -import { Remote } from "comlink"; -import { DataStream, isDataStream } from "types/upload"; - -async function encryptFileStream( - worker: Remote, - fileData: DataStream, -) { - const { stream, chunkCount } = fileData; - const fileStreamReader = stream.getReader(); - const { key, decryptionHeader, pushState } = - await worker.initChunkEncryption(); - const ref = { pullCount: 1 }; - const encryptedFileStream = new ReadableStream({ - async pull(controller) { - const { value } = await fileStreamReader.read(); - const encryptedFileChunk = await worker.encryptFileChunk( - value, - pushState, - ref.pullCount === chunkCount, - ); - controller.enqueue(encryptedFileChunk); - if (ref.pullCount === chunkCount) { - controller.close(); - } - ref.pullCount++; - }, - }); - return { - key, - file: { - decryptionHeader, - encryptedData: { stream: encryptedFileStream, chunkCount }, - }, - }; -} - -export async function encryptFiledata( - worker: Remote, - filedata: Uint8Array | DataStream, -): Promise> { - return isDataStream(filedata) - ? await encryptFileStream(worker, filedata) - : await worker.encryptFile(filedata); -} diff --git a/web/apps/photos/src/services/upload/fileService.ts b/web/apps/photos/src/services/upload/fileService.ts deleted file mode 100644 index dacccdccbb..0000000000 --- a/web/apps/photos/src/services/upload/fileService.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { Remote } from "comlink"; -import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload"; -import { EncryptedMagicMetadata } from "types/magicMetadata"; -import { - DataStream, - ElectronFile, - EncryptedFile, - ExtractMetadataResult, - FileInMemory, - FileTypeInfo, - FileWithMetadata, - ParsedMetadataJSON, - ParsedMetadataJSONMap, -} from "types/upload"; -import { - getElectronFileStream, - getFileStream, - getUint8ArrayView, -} from "../readerService"; -import { encryptFiledata } from "./encryptionService"; -import { - MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT, - extractMetadata, - getClippedMetadataJSONMapKeyForFile, - getMetadataJSONMapKeyForFile, -} from "./metadataService"; -import { generateThumbnail } from "./thumbnailService"; - -export function getFileSize(file: File | ElectronFile) { - return file.size; -} - -export function getFilename(file: File | ElectronFile) { - return file.name; -} - -export async function readFile( - fileTypeInfo: FileTypeInfo, - rawFile: File | ElectronFile, -): Promise { - const { thumbnail, hasStaticThumbnail } = await generateThumbnail( - rawFile, - fileTypeInfo, - ); - log.info(`reading file data ${getFileNameSize(rawFile)} `); - let filedata: Uint8Array | DataStream; - if (!(rawFile instanceof File)) { - if (rawFile.size > MULTIPART_PART_SIZE) { - filedata = await getElectronFileStream( - rawFile, - FILE_READER_CHUNK_SIZE, - ); - } else { - filedata = await getUint8ArrayView(rawFile); - } - } else if (rawFile.size > MULTIPART_PART_SIZE) { - filedata = getFileStream(rawFile, FILE_READER_CHUNK_SIZE); - } else { - filedata = await getUint8ArrayView(rawFile); - } - - log.info(`read file data successfully ${getFileNameSize(rawFile)} `); - - return { - filedata, - thumbnail, - hasStaticThumbnail, - }; -} - -export async function extractFileMetadata( - worker: Remote, - parsedMetadataJSONMap: ParsedMetadataJSONMap, - collectionID: number, - fileTypeInfo: FileTypeInfo, - rawFile: File | ElectronFile, -): Promise { - let key = getMetadataJSONMapKeyForFile(collectionID, rawFile.name); - let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key); - - if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) { - key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFile.name); - googleMetadata = parsedMetadataJSONMap.get(key); - } - - const { metadata, publicMagicMetadata } = await extractMetadata( - worker, - rawFile, - fileTypeInfo, - ); - - for (const [key, value] of Object.entries(googleMetadata ?? {})) { - if (!value) { - continue; - } - metadata[key] = value; - } - return { metadata, publicMagicMetadata }; -} - -export async function encryptFile( - worker: Remote, - file: FileWithMetadata, - encryptionKey: string, -): Promise { - try { - const { key: fileKey, file: encryptedFiledata } = await encryptFiledata( - worker, - file.filedata, - ); - - const { file: encryptedThumbnail } = await worker.encryptThumbnail( - file.thumbnail, - fileKey, - ); - const { file: encryptedMetadata } = await worker.encryptMetadata( - file.metadata, - fileKey, - ); - - let encryptedPubMagicMetadata: EncryptedMagicMetadata; - if (file.pubMagicMetadata) { - const { file: encryptedPubMagicMetadataData } = - await worker.encryptMetadata( - file.pubMagicMetadata.data, - fileKey, - ); - encryptedPubMagicMetadata = { - version: file.pubMagicMetadata.version, - count: file.pubMagicMetadata.count, - data: encryptedPubMagicMetadataData.encryptedData, - header: encryptedPubMagicMetadataData.decryptionHeader, - }; - } - - const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey); - - const result: EncryptedFile = { - file: { - file: encryptedFiledata, - thumbnail: encryptedThumbnail, - metadata: encryptedMetadata, - pubMagicMetadata: encryptedPubMagicMetadata, - localID: file.localID, - }, - fileKey: encryptedKey, - }; - return result; - } catch (e) { - log.error("Error encrypting files", e); - throw e; - } -} diff --git a/web/apps/photos/src/services/upload/hashService.tsx b/web/apps/photos/src/services/upload/hashService.tsx deleted file mode 100644 index aa275fb34e..0000000000 --- a/web/apps/photos/src/services/upload/hashService.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { CustomError } from "@ente/shared/error"; -import { Remote } from "comlink"; -import { FILE_READER_CHUNK_SIZE } from "constants/upload"; -import { getElectronFileStream, getFileStream } from "services/readerService"; -import { DataStream, ElectronFile } from "types/upload"; - -export async function getFileHash( - worker: Remote, - file: File | ElectronFile, -) { - try { - log.info(`getFileHash called for ${getFileNameSize(file)}`); - let filedata: DataStream; - if (file instanceof File) { - filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); - } else { - filedata = await getElectronFileStream( - file, - FILE_READER_CHUNK_SIZE, - ); - } - const hashState = await worker.initChunkHashing(); - - const streamReader = filedata.stream.getReader(); - for (let i = 0; i < filedata.chunkCount; i++) { - const { done, value: chunk } = await streamReader.read(); - if (done) { - throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED); - } - await worker.hashFileChunk(hashState, Uint8Array.from(chunk)); - } - const { done } = await streamReader.read(); - if (!done) { - throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); - } - const hash = await worker.completeChunkHashing(hashState); - log.info( - `file hashing completed successfully ${getFileNameSize(file)}`, - ); - return hash; - } catch (e) { - log.error("getFileHash failed", e); - log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `); - } -} diff --git a/web/apps/photos/src/services/upload/livePhotoService.ts b/web/apps/photos/src/services/upload/livePhotoService.ts deleted file mode 100644 index 392b5b9c87..0000000000 --- a/web/apps/photos/src/services/upload/livePhotoService.ts +++ /dev/null @@ -1,306 +0,0 @@ -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { CustomError } from "@ente/shared/error"; -import { Remote } from "comlink"; -import { FILE_TYPE } from "constants/file"; -import { LIVE_PHOTO_ASSET_SIZE_LIMIT } from "constants/upload"; -import { encodeLivePhoto } from "services/livePhotoService"; -import { getFileType } from "services/typeDetectionService"; -import { - ElectronFile, - ExtractMetadataResult, - FileTypeInfo, - FileWithCollection, - LivePhotoAssets, - ParsedMetadataJSONMap, -} from "types/upload"; -import { - getFileExtensionWithDot, - getFileNameWithoutExtension, - isImageOrVideo, - splitFilenameAndExtension, -} from "utils/file"; -import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto"; -import { getUint8ArrayView } from "../readerService"; -import { extractFileMetadata } from "./fileService"; -import { getFileHash } from "./hashService"; -import { generateThumbnail } from "./thumbnailService"; -import uploadCancelService from "./uploadCancelService"; - -interface LivePhotoIdentifier { - collectionID: number; - fileType: FILE_TYPE; - name: string; - size: number; -} - -const UNDERSCORE_THREE = "_3"; -// Note: The icloud-photos-downloader library appends _HVEC to the end of the filename in case of live photos -// https://github.com/icloud-photos-downloader/icloud_photos_downloader -const UNDERSCORE_HEVC = "_HVEC"; - -export async function getLivePhotoFileType( - livePhotoAssets: LivePhotoAssets, -): Promise { - const imageFileTypeInfo = await getFileType(livePhotoAssets.image); - const videoFileTypeInfo = await getFileType(livePhotoAssets.video); - return { - fileType: FILE_TYPE.LIVE_PHOTO, - exactType: `${imageFileTypeInfo.exactType}+${videoFileTypeInfo.exactType}`, - imageType: imageFileTypeInfo.exactType, - videoType: videoFileTypeInfo.exactType, - }; -} - -export async function extractLivePhotoMetadata( - worker: Remote, - parsedMetadataJSONMap: ParsedMetadataJSONMap, - collectionID: number, - fileTypeInfo: FileTypeInfo, - livePhotoAssets: LivePhotoAssets, -): Promise { - const imageFileTypeInfo: FileTypeInfo = { - fileType: FILE_TYPE.IMAGE, - exactType: fileTypeInfo.imageType, - }; - const { - metadata: imageMetadata, - publicMagicMetadata: imagePublicMagicMetadata, - } = await extractFileMetadata( - worker, - parsedMetadataJSONMap, - collectionID, - imageFileTypeInfo, - livePhotoAssets.image, - ); - const videoHash = await getFileHash(worker, livePhotoAssets.video); - return { - metadata: { - ...imageMetadata, - title: getLivePhotoName(livePhotoAssets), - fileType: FILE_TYPE.LIVE_PHOTO, - imageHash: imageMetadata.hash, - videoHash: videoHash, - hash: undefined, - }, - publicMagicMetadata: imagePublicMagicMetadata, - }; -} - -export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) { - return livePhotoAssets.image.size + livePhotoAssets.video.size; -} - -export function getLivePhotoName(livePhotoAssets: LivePhotoAssets) { - return livePhotoAssets.image.name; -} - -export async function readLivePhoto( - fileTypeInfo: FileTypeInfo, - livePhotoAssets: LivePhotoAssets, -) { - const { thumbnail, hasStaticThumbnail } = await generateThumbnail( - livePhotoAssets.image, - { - exactType: fileTypeInfo.imageType, - fileType: FILE_TYPE.IMAGE, - }, - ); - - const image = await getUint8ArrayView(livePhotoAssets.image); - - const video = await getUint8ArrayView(livePhotoAssets.video); - - return { - filedata: await encodeLivePhoto({ - image, - video, - imageNameTitle: livePhotoAssets.image.name, - videoNameTitle: livePhotoAssets.video.name, - }), - thumbnail, - hasStaticThumbnail, - }; -} - -export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) { - try { - const analysedMediaFiles: FileWithCollection[] = []; - mediaFiles - .sort((firstMediaFile, secondMediaFile) => - splitFilenameAndExtension( - firstMediaFile.file.name, - )[0].localeCompare( - splitFilenameAndExtension(secondMediaFile.file.name)[0], - ), - ) - .sort( - (firstMediaFile, secondMediaFile) => - firstMediaFile.collectionID - secondMediaFile.collectionID, - ); - let index = 0; - while (index < mediaFiles.length - 1) { - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - const firstMediaFile = mediaFiles[index]; - const secondMediaFile = mediaFiles[index + 1]; - const firstFileType = - getFileTypeFromExtensionForLivePhotoClustering( - firstMediaFile.file.name, - ); - const secondFileType = - getFileTypeFromExtensionForLivePhotoClustering( - secondMediaFile.file.name, - ); - const firstFileIdentifier: LivePhotoIdentifier = { - collectionID: firstMediaFile.collectionID, - fileType: firstFileType, - name: firstMediaFile.file.name, - size: firstMediaFile.file.size, - }; - const secondFileIdentifier: LivePhotoIdentifier = { - collectionID: secondMediaFile.collectionID, - fileType: secondFileType, - name: secondMediaFile.file.name, - size: secondMediaFile.file.size, - }; - if ( - areFilesLivePhotoAssets( - firstFileIdentifier, - secondFileIdentifier, - ) - ) { - let imageFile: File | ElectronFile; - let videoFile: File | ElectronFile; - if ( - firstFileType === FILE_TYPE.IMAGE && - secondFileType === FILE_TYPE.VIDEO - ) { - imageFile = firstMediaFile.file; - videoFile = secondMediaFile.file; - } else { - videoFile = firstMediaFile.file; - imageFile = secondMediaFile.file; - } - const livePhotoLocalID = firstMediaFile.localID; - analysedMediaFiles.push({ - localID: livePhotoLocalID, - collectionID: firstMediaFile.collectionID, - isLivePhoto: true, - livePhotoAssets: { - image: imageFile, - video: videoFile, - }, - }); - index += 2; - } else { - analysedMediaFiles.push({ - ...firstMediaFile, - isLivePhoto: false, - }); - index += 1; - } - } - if (index === mediaFiles.length - 1) { - analysedMediaFiles.push({ - ...mediaFiles[index], - isLivePhoto: false, - }); - } - return analysedMediaFiles; - } catch (e) { - if (e.message === CustomError.UPLOAD_CANCELLED) { - throw e; - } else { - log.error("failed to cluster live photo", e); - throw e; - } - } -} - -function areFilesLivePhotoAssets( - firstFileIdentifier: LivePhotoIdentifier, - secondFileIdentifier: LivePhotoIdentifier, -) { - const haveSameCollectionID = - firstFileIdentifier.collectionID === secondFileIdentifier.collectionID; - const areNotSameFileType = - firstFileIdentifier.fileType !== secondFileIdentifier.fileType; - - let firstFileNameWithoutSuffix: string; - let secondFileNameWithoutSuffix: string; - if (firstFileIdentifier.fileType === FILE_TYPE.IMAGE) { - firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix( - getFileNameWithoutExtension(firstFileIdentifier.name), - // Note: The Google Live Photo image file can have video extension appended as suffix, passing that to removePotentialLivePhotoSuffix to remove it - // Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file) - getFileExtensionWithDot(secondFileIdentifier.name), - ); - secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix( - getFileNameWithoutExtension(secondFileIdentifier.name), - ); - } else { - firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix( - getFileNameWithoutExtension(firstFileIdentifier.name), - ); - secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix( - getFileNameWithoutExtension(secondFileIdentifier.name), - getFileExtensionWithDot(firstFileIdentifier.name), - ); - } - if ( - haveSameCollectionID && - isImageOrVideo(firstFileIdentifier.fileType) && - isImageOrVideo(secondFileIdentifier.fileType) && - areNotSameFileType && - firstFileNameWithoutSuffix === secondFileNameWithoutSuffix - ) { - // checks size of live Photo assets are less than allowed limit - // I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT - // also zipping library doesn't support stream as a input - if ( - firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT && - secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT - ) { - return true; - } else { - log.error( - `${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({ - fileSizes: [ - firstFileIdentifier.size, - secondFileIdentifier.size, - ], - })}`, - ); - } - } - return false; -} - -function removePotentialLivePhotoSuffix( - filenameWithoutExtension: string, - suffix?: string, -) { - let presentSuffix: string; - if (filenameWithoutExtension.endsWith(UNDERSCORE_THREE)) { - presentSuffix = UNDERSCORE_THREE; - } else if (filenameWithoutExtension.endsWith(UNDERSCORE_HEVC)) { - presentSuffix = UNDERSCORE_HEVC; - } else if ( - filenameWithoutExtension.endsWith(UNDERSCORE_HEVC.toLowerCase()) - ) { - presentSuffix = UNDERSCORE_HEVC.toLowerCase(); - } else if (suffix) { - if (filenameWithoutExtension.endsWith(suffix)) { - presentSuffix = suffix; - } else if (filenameWithoutExtension.endsWith(suffix.toLowerCase())) { - presentSuffix = suffix.toLowerCase(); - } - } - if (presentSuffix) { - return filenameWithoutExtension.slice(0, presentSuffix.length * -1); - } else { - return filenameWithoutExtension; - } -} diff --git a/web/apps/photos/src/services/upload/magicMetadataService.ts b/web/apps/photos/src/services/upload/magicMetadataService.ts deleted file mode 100644 index f56b31c439..0000000000 --- a/web/apps/photos/src/services/upload/magicMetadataService.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { - FilePublicMagicMetadata, - FilePublicMagicMetadataProps, -} from "types/file"; -import { - getNonEmptyMagicMetadataProps, - updateMagicMetadata, -} from "utils/magicMetadata"; - -export async function constructPublicMagicMetadata( - publicMagicMetadataProps: FilePublicMagicMetadataProps, -): Promise { - const nonEmptyPublicMagicMetadataProps = getNonEmptyMagicMetadataProps( - publicMagicMetadataProps, - ); - - if (Object.values(nonEmptyPublicMagicMetadataProps)?.length === 0) { - return null; - } - return await updateMagicMetadata(publicMagicMetadataProps); -} diff --git a/web/apps/photos/src/services/upload/metadataService.ts b/web/apps/photos/src/services/upload/metadataService.ts deleted file mode 100644 index 9bd2a63c01..0000000000 --- a/web/apps/photos/src/services/upload/metadataService.ts +++ /dev/null @@ -1,274 +0,0 @@ -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { - parseDateFromFusedDateString, - tryToParseDateTime, - validateAndGetCreationUnixTimeInMicroSeconds, -} from "@ente/shared/time"; -import { Remote } from "comlink"; -import { FILE_TYPE } from "constants/file"; -import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from "constants/upload"; -import { FilePublicMagicMetadataProps } from "types/file"; -import { - ElectronFile, - ExtractMetadataResult, - FileTypeInfo, - Location, - Metadata, - ParsedExtractedMetadata, - ParsedMetadataJSON, -} from "types/upload"; -import { splitFilenameAndExtension } from "utils/file"; -import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService"; -import { getFileHash } from "./hashService"; -import { getVideoMetadata } from "./videoMetadataService"; - -const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = { - creationTime: null, - modificationTime: null, - ...NULL_LOCATION, -}; - -const EXIF_TAGS_NEEDED = [ - "DateTimeOriginal", - "CreateDate", - "ModifyDate", - "GPSLatitude", - "GPSLongitude", - "GPSLatitudeRef", - "GPSLongitudeRef", - "DateCreated", - "ExifImageWidth", - "ExifImageHeight", - "ImageWidth", - "ImageHeight", - "PixelXDimension", - "PixelYDimension", - "MetadataDate", -]; - -export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46; - -export async function extractMetadata( - worker: Remote, - receivedFile: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -): Promise { - let extractedMetadata: ParsedExtractedMetadata = NULL_EXTRACTED_METADATA; - if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) { - extractedMetadata = await getImageMetadata(receivedFile, fileTypeInfo); - } else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) { - extractedMetadata = await getVideoMetadata(receivedFile); - } - const fileHash = await getFileHash(worker, receivedFile); - - const metadata: Metadata = { - title: receivedFile.name, - creationTime: - extractedMetadata.creationTime ?? - extractDateFromFileName(receivedFile.name) ?? - receivedFile.lastModified * 1000, - modificationTime: receivedFile.lastModified * 1000, - latitude: extractedMetadata.location.latitude, - longitude: extractedMetadata.location.longitude, - fileType: fileTypeInfo.fileType, - hash: fileHash, - }; - const publicMagicMetadata: FilePublicMagicMetadataProps = { - w: extractedMetadata.width, - h: extractedMetadata.height, - }; - return { metadata, publicMagicMetadata }; -} - -export async function getImageMetadata( - receivedFile: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -): Promise { - let imageMetadata = NULL_EXTRACTED_METADATA; - try { - if (!(receivedFile instanceof File)) { - receivedFile = new File( - [await receivedFile.blob()], - receivedFile.name, - { - lastModified: receivedFile.lastModified, - }, - ); - } - const exifData = await getParsedExifData( - receivedFile, - fileTypeInfo, - EXIF_TAGS_NEEDED, - ); - - imageMetadata = { - location: getEXIFLocation(exifData), - creationTime: getEXIFTime(exifData), - width: exifData?.imageWidth ?? null, - height: exifData?.imageHeight ?? null, - }; - } catch (e) { - log.error("getExifData failed", e); - } - return imageMetadata; -} - -export const getMetadataJSONMapKeyForJSON = ( - collectionID: number, - jsonFileName: string, -) => { - let title = jsonFileName.slice(0, -1 * ".json".length); - const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/); - if (endsWithNumberedSuffixWithBrackets) { - title = title.slice( - 0, - -1 * endsWithNumberedSuffixWithBrackets[0].length, - ); - const [name, extension] = splitFilenameAndExtension(title); - return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`; - } - return `${collectionID}-${title}`; -}; - -// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name -// so we need to use the clipped file name to get the metadataJSON file -export const getClippedMetadataJSONMapKeyForFile = ( - collectionID: number, - fileName: string, -) => { - return `${collectionID}-${fileName.slice( - 0, - MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT, - )}`; -}; - -export const getMetadataJSONMapKeyForFile = ( - collectionID: number, - fileName: string, -) => { - return `${collectionID}-${getFileOriginalName(fileName)}`; -}; - -export async function parseMetadataJSON(receivedFile: File | ElectronFile) { - try { - if (!(receivedFile instanceof File)) { - receivedFile = new File( - [await receivedFile.blob()], - receivedFile.name, - ); - } - const metadataJSON: object = JSON.parse(await receivedFile.text()); - - const parsedMetadataJSON: ParsedMetadataJSON = - NULL_PARSED_METADATA_JSON; - if (!metadataJSON) { - return; - } - - if ( - metadataJSON["photoTakenTime"] && - metadataJSON["photoTakenTime"]["timestamp"] - ) { - parsedMetadataJSON.creationTime = - metadataJSON["photoTakenTime"]["timestamp"] * 1000000; - } else if ( - metadataJSON["creationTime"] && - metadataJSON["creationTime"]["timestamp"] - ) { - parsedMetadataJSON.creationTime = - metadataJSON["creationTime"]["timestamp"] * 1000000; - } - if ( - metadataJSON["modificationTime"] && - metadataJSON["modificationTime"]["timestamp"] - ) { - parsedMetadataJSON.modificationTime = - metadataJSON["modificationTime"]["timestamp"] * 1000000; - } - let locationData: Location = NULL_LOCATION; - if ( - metadataJSON["geoData"] && - (metadataJSON["geoData"]["latitude"] !== 0.0 || - metadataJSON["geoData"]["longitude"] !== 0.0) - ) { - locationData = metadataJSON["geoData"]; - } else if ( - metadataJSON["geoDataExif"] && - (metadataJSON["geoDataExif"]["latitude"] !== 0.0 || - metadataJSON["geoDataExif"]["longitude"] !== 0.0) - ) { - locationData = metadataJSON["geoDataExif"]; - } - if (locationData !== null) { - parsedMetadataJSON.latitude = locationData.latitude; - parsedMetadataJSON.longitude = locationData.longitude; - } - return parsedMetadataJSON; - } catch (e) { - log.error("parseMetadataJSON failed", e); - // ignore - } -} - -// tries to extract date from file name if available else returns null -export function extractDateFromFileName(filename: string): number { - try { - filename = filename.trim(); - let parsedDate: Date; - if (filename.startsWith("IMG-") || filename.startsWith("VID-")) { - // Whatsapp media files - // sample name IMG-20171218-WA0028.jpg - parsedDate = parseDateFromFusedDateString(filename.split("-")[1]); - } else if (filename.startsWith("Screenshot_")) { - // Screenshots on droid - // sample name Screenshot_20181227-152914.jpg - parsedDate = parseDateFromFusedDateString( - filename.replaceAll("Screenshot_", ""), - ); - } else if (filename.startsWith("signal-")) { - // signal images - // sample name :signal-2018-08-21-100217.jpg - const dateString = convertSignalNameToFusedDateString(filename); - parsedDate = parseDateFromFusedDateString(dateString); - } - if (!parsedDate) { - parsedDate = tryToParseDateTime(filename); - } - return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate); - } catch (e) { - log.error("failed to extract date From FileName ", e); - return null; - } -} - -function convertSignalNameToFusedDateString(filename: string) { - const dateStringParts = filename.split("-"); - return `${dateStringParts[1]}${dateStringParts[2]}${dateStringParts[3]}-${dateStringParts[4]}`; -} - -const EDITED_FILE_SUFFIX = "-edited"; - -/* - Get the original file name for edited file to associate it to original file's metadataJSON file - as edited file doesn't have their own metadata file -*/ -function getFileOriginalName(fileName: string) { - let originalName: string = null; - const [nameWithoutExtension, extension] = - splitFilenameAndExtension(fileName); - - const isEditedFile = nameWithoutExtension.endsWith(EDITED_FILE_SUFFIX); - if (isEditedFile) { - originalName = nameWithoutExtension.slice( - 0, - -1 * EDITED_FILE_SUFFIX.length, - ); - } else { - originalName = nameWithoutExtension; - } - if (extension) { - originalName += "." + extension; - } - return originalName; -} diff --git a/web/apps/photos/src/services/upload/multiPartUploadService.ts b/web/apps/photos/src/services/upload/multiPartUploadService.ts deleted file mode 100644 index 1b4442710f..0000000000 --- a/web/apps/photos/src/services/upload/multiPartUploadService.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { CustomError } from "@ente/shared/error"; -import { - FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART, - RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, -} from "constants/upload"; -import { DataStream, Logger, MultipartUploadURLs } from "types/upload"; -import * as convert from "xml-js"; -import UIService from "./uiService"; -import uploadCancelService from "./uploadCancelService"; -import UploadHttpClient from "./uploadHttpClient"; -import uploadService from "./uploadService"; - -interface PartEtag { - PartNumber: number; - ETag: string; -} - -function calculatePartCount(chunkCount: number) { - const partCount = Math.ceil( - chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART, - ); - return partCount; -} -export async function uploadStreamUsingMultipart( - logger: Logger, - fileLocalID: number, - dataStream: DataStream, -) { - const uploadPartCount = calculatePartCount(dataStream.chunkCount); - logger(`fetching ${uploadPartCount} urls for multipart upload`); - const multipartUploadURLs = - await uploadService.fetchMultipartUploadURLs(uploadPartCount); - logger(`fetched ${uploadPartCount} urls for multipart upload`); - - const fileObjectKey = await uploadStreamInParts( - logger, - multipartUploadURLs, - dataStream.stream, - fileLocalID, - uploadPartCount, - ); - return fileObjectKey; -} - -export async function uploadStreamInParts( - logger: Logger, - multipartUploadURLs: MultipartUploadURLs, - dataStream: ReadableStream, - fileLocalID: number, - uploadPartCount: number, -) { - const streamReader = dataStream.getReader(); - const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount); - const partEtags: PartEtag[] = []; - logger(`uploading file in chunks`); - for (const [ - index, - fileUploadURL, - ] of multipartUploadURLs.partURLs.entries()) { - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - const uploadChunk = await combineChunksToFormUploadPart(streamReader); - const progressTracker = UIService.trackUploadProgress( - fileLocalID, - percentPerPart, - index, - ); - let eTag = null; - if (!uploadService.getIsCFUploadProxyDisabled()) { - eTag = await UploadHttpClient.putFilePartV2( - fileUploadURL, - uploadChunk, - progressTracker, - ); - } else { - eTag = await UploadHttpClient.putFilePart( - fileUploadURL, - uploadChunk, - progressTracker, - ); - } - partEtags.push({ PartNumber: index + 1, ETag: eTag }); - } - const { done } = await streamReader.read(); - if (!done) { - throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); - } - logger(`uploading file in chunks done`); - logger(`completing multipart upload`); - await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL); - logger(`completing multipart upload done`); - return multipartUploadURLs.objectKey; -} - -function getRandomProgressPerPartUpload(uploadPartCount: number) { - const percentPerPart = - RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount; - return percentPerPart; -} - -async function combineChunksToFormUploadPart( - streamReader: ReadableStreamDefaultReader, -) { - const combinedChunks = []; - for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) { - const { done, value: chunk } = await streamReader.read(); - if (done) { - break; - } - for (let index = 0; index < chunk.length; index++) { - combinedChunks.push(chunk[index]); - } - } - return Uint8Array.from(combinedChunks); -} - -async function completeMultipartUpload( - partEtags: PartEtag[], - completeURL: string, -) { - const options = { compact: true, ignoreComment: true, spaces: 4 }; - const body = convert.js2xml( - { CompleteMultipartUpload: { Part: partEtags } }, - options, - ); - if (!uploadService.getIsCFUploadProxyDisabled()) { - await UploadHttpClient.completeMultipartUploadV2(completeURL, body); - } else { - await UploadHttpClient.completeMultipartUpload(completeURL, body); - } -} diff --git a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts index f7d87c51c9..8f18a1638b 100644 --- a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts @@ -3,8 +3,8 @@ import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import { EnteFile } from "types/file"; -import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload"; -import { retryHTTPCall } from "utils/upload/uploadRetrier"; +import { retryHTTPCall } from "./uploadHttpClient"; +import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); diff --git a/web/apps/photos/src/services/upload/takeout.ts b/web/apps/photos/src/services/upload/takeout.ts new file mode 100644 index 0000000000..24c0a9d267 --- /dev/null +++ b/web/apps/photos/src/services/upload/takeout.ts @@ -0,0 +1,176 @@ +/** @file Dealing with the JSON metadata in Google Takeouts */ + +import { ensureElectron } from "@/next/electron"; +import { nameAndExtension } from "@/next/file"; +import log from "@/next/log"; +import { NULL_LOCATION } from "constants/upload"; +import type { Location } from "types/metadata"; +import { readStream } from "utils/native-stream"; +import type { UploadItem } from "./types"; + +export interface ParsedMetadataJSON { + creationTime: number; + modificationTime: number; + latitude: number; + longitude: number; +} + +export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46; + +export const getMetadataJSONMapKeyForJSON = ( + collectionID: number, + jsonFileName: string, +) => { + let title = jsonFileName.slice(0, -1 * ".json".length); + const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/); + if (endsWithNumberedSuffixWithBrackets) { + title = title.slice( + 0, + -1 * endsWithNumberedSuffixWithBrackets[0].length, + ); + const [name, extension] = nameAndExtension(title); + return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`; + } + return `${collectionID}-${title}`; +}; + +// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name +// so we need to use the clipped file name to get the metadataJSON file +export const getClippedMetadataJSONMapKeyForFile = ( + collectionID: number, + fileName: string, +) => { + return `${collectionID}-${fileName.slice( + 0, + MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT, + )}`; +}; + +export const getMetadataJSONMapKeyForFile = ( + collectionID: number, + fileName: string, +) => { + return `${collectionID}-${getFileOriginalName(fileName)}`; +}; + +const EDITED_FILE_SUFFIX = "-edited"; + +/* + Get the original file name for edited file to associate it to original file's metadataJSON file + as edited file doesn't have their own metadata file +*/ +function getFileOriginalName(fileName: string) { + let originalName: string = null; + const [name, extension] = nameAndExtension(fileName); + + const isEditedFile = name.endsWith(EDITED_FILE_SUFFIX); + if (isEditedFile) { + originalName = name.slice(0, -1 * EDITED_FILE_SUFFIX.length); + } else { + originalName = name; + } + if (extension) { + originalName += "." + extension; + } + return originalName; +} + +/** Try to parse the contents of a metadata JSON file from a Google Takeout. */ +export const tryParseTakeoutMetadataJSON = async ( + uploadItem: UploadItem, +): Promise => { + try { + return parseMetadataJSONText(await uploadItemText(uploadItem)); + } catch (e) { + log.error("Failed to parse takeout metadata JSON", e); + return undefined; + } +}; + +const uploadItemText = async (uploadItem: UploadItem) => { + if (uploadItem instanceof File) { + return await uploadItem.text(); + } else if (typeof uploadItem == "string") { + return await ensureElectron().fs.readTextFile(uploadItem); + } else if (Array.isArray(uploadItem)) { + const { response } = await readStream(ensureElectron(), uploadItem); + return await response.text(); + } else { + return await uploadItem.file.text(); + } +}; + +const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = { + creationTime: null, + modificationTime: null, + ...NULL_LOCATION, +}; + +const parseMetadataJSONText = (text: string) => { + const metadataJSON: object = JSON.parse(text); + if (!metadataJSON) { + return undefined; + } + + const parsedMetadataJSON = { ...NULL_PARSED_METADATA_JSON }; + + if ( + metadataJSON["photoTakenTime"] && + metadataJSON["photoTakenTime"]["timestamp"] + ) { + parsedMetadataJSON.creationTime = + metadataJSON["photoTakenTime"]["timestamp"] * 1000000; + } else if ( + metadataJSON["creationTime"] && + metadataJSON["creationTime"]["timestamp"] + ) { + parsedMetadataJSON.creationTime = + metadataJSON["creationTime"]["timestamp"] * 1000000; + } + if ( + metadataJSON["modificationTime"] && + metadataJSON["modificationTime"]["timestamp"] + ) { + parsedMetadataJSON.modificationTime = + metadataJSON["modificationTime"]["timestamp"] * 1000000; + } + let locationData: Location = { ...NULL_LOCATION }; + if ( + metadataJSON["geoData"] && + (metadataJSON["geoData"]["latitude"] !== 0.0 || + metadataJSON["geoData"]["longitude"] !== 0.0) + ) { + locationData = metadataJSON["geoData"]; + } else if ( + metadataJSON["geoDataExif"] && + (metadataJSON["geoDataExif"]["latitude"] !== 0.0 || + metadataJSON["geoDataExif"]["longitude"] !== 0.0) + ) { + locationData = metadataJSON["geoDataExif"]; + } + if (locationData !== null) { + parsedMetadataJSON.latitude = locationData.latitude; + parsedMetadataJSON.longitude = locationData.longitude; + } + return parsedMetadataJSON; +}; + +/** + * Return the matching entry (if any) from {@link parsedMetadataJSONMap} for the + * {@link fileName} and {@link collectionID} combination. + */ +export const matchTakeoutMetadata = ( + fileName: string, + collectionID: number, + parsedMetadataJSONMap: Map, +) => { + let key = getMetadataJSONMapKeyForFile(collectionID, fileName); + let takeoutMetadata = parsedMetadataJSONMap.get(key); + + if (!takeoutMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) { + key = getClippedMetadataJSONMapKeyForFile(collectionID, fileName); + takeoutMetadata = parsedMetadataJSONMap.get(key); + } + + return takeoutMetadata; +}; diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts new file mode 100644 index 0000000000..1dd448376e --- /dev/null +++ b/web/apps/photos/src/services/upload/thumbnail.ts @@ -0,0 +1,238 @@ +import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; +import log from "@/next/log"; +import { type Electron } from "@/next/types/ipc"; +import { withTimeout } from "@ente/shared/utils"; +import * as ffmpeg from "services/ffmpeg"; +import { heicToJPEG } from "services/heic-convert"; +import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types"; + +/** Maximum width or height of the generated thumbnail */ +const maxThumbnailDimension = 720; +/** Maximum size (in bytes) of the generated thumbnail */ +const maxThumbnailSize = 100 * 1024; // 100 KB + +/** + * Generate a JPEG thumbnail for the given image or video blob. + * + * The thumbnail has a smaller file size so that is quick to load. But more + * importantly, it uses a universal file format (JPEG in our case) so that the + * thumbnail itself can be opened in all clients, even those like the web client + * itself that might not yet have support for more exotic formats. + * + * @param blob The image or video blob whose thumbnail we want to generate. + * + * @param fileTypeInfo The type information for the file this blob came from. + * + * @return The JPEG data of the generated thumbnail. + */ +export const generateThumbnailWeb = async ( + blob: Blob, + fileTypeInfo: FileTypeInfo, +): Promise => + fileTypeInfo.fileType === FILE_TYPE.IMAGE + ? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo) + : await generateVideoThumbnailWeb(blob); + +const generateImageThumbnailUsingCanvas = async ( + blob: Blob, + { extension }: FileTypeInfo, +) => { + if (extension == "heic" || extension == "heif") { + log.debug(() => `Pre-converting HEIC to JPEG for thumbnail generation`); + blob = await heicToJPEG(blob); + } + + const canvas = document.createElement("canvas"); + const canvasCtx = canvas.getContext("2d"); + + const imageURL = URL.createObjectURL(blob); + await withTimeout( + new Promise((resolve, reject) => { + const image = new Image(); + image.setAttribute("src", imageURL); + image.onload = () => { + try { + URL.revokeObjectURL(imageURL); + const { width, height } = scaledThumbnailDimensions( + image.width, + image.height, + maxThumbnailDimension, + ); + canvas.width = width; + canvas.height = height; + canvasCtx.drawImage(image, 0, 0, width, height); + resolve(undefined); + } catch (e) { + reject(e); + } + }; + }), + 30 * 1000, + ); + + return await compressedJPEGData(canvas); +}; + +const generateVideoThumbnailWeb = async (blob: Blob) => { + try { + return await ffmpeg.generateVideoThumbnailWeb(blob); + } catch (e) { + log.error( + `Failed to generate video thumbnail using the wasm FFmpeg web worker, will fallback to canvas`, + e, + ); + return generateVideoThumbnailUsingCanvas(blob); + } +}; + +const generateVideoThumbnailUsingCanvas = async (blob: Blob) => { + const canvas = document.createElement("canvas"); + const canvasCtx = canvas.getContext("2d"); + + const videoURL = URL.createObjectURL(blob); + await withTimeout( + new Promise((resolve, reject) => { + const video = document.createElement("video"); + video.preload = "metadata"; + video.src = videoURL; + video.addEventListener("loadeddata", () => { + try { + URL.revokeObjectURL(videoURL); + const { width, height } = scaledThumbnailDimensions( + video.videoWidth, + video.videoHeight, + maxThumbnailDimension, + ); + canvas.width = width; + canvas.height = height; + canvasCtx.drawImage(video, 0, 0, width, height); + resolve(undefined); + } catch (e) { + reject(e); + } + }); + }), + 30 * 1000, + ); + + return await compressedJPEGData(canvas); +}; + +/** + * Compute the size of the thumbnail to create for an image with the given + * {@link width} and {@link height}. + * + * This function calculates a new size of an image for limiting it to maximum + * width and height (both specified by {@link maxDimension}), while maintaining + * aspect ratio. + * + * It returns `{0, 0}` for invalid inputs. + */ +const scaledThumbnailDimensions = ( + width: number, + height: number, + maxDimension: number, +): { width: number; height: number } => { + if (width === 0 || height === 0) return { width: 0, height: 0 }; + const widthScaleFactor = maxDimension / width; + const heightScaleFactor = maxDimension / height; + const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); + const thumbnailDimensions = { + width: Math.round(width * scaleFactor), + height: Math.round(height * scaleFactor), + }; + if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0) + return { width: 0, height: 0 }; + return thumbnailDimensions; +}; + +const compressedJPEGData = async (canvas: HTMLCanvasElement) => { + let blob: Blob; + let prevSize = Number.MAX_SAFE_INTEGER; + let quality = 0.7; + + do { + if (blob) prevSize = blob.size; + blob = await new Promise((resolve) => { + canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality); + }); + quality -= 0.1; + } while ( + quality >= 0.5 && + blob.size > maxThumbnailSize && + percentageSizeDiff(blob.size, prevSize) >= 10 + ); + + return new Uint8Array(await blob.arrayBuffer()); +}; + +const percentageSizeDiff = ( + newThumbnailSize: number, + oldThumbnailSize: number, +) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; + +/** + * Generate a JPEG thumbnail for the given file or path using native tools. + * + * This function only works when we're running in the context of our desktop + * app, and this dependency is enforced by the need to pass the {@link electron} + * object which we use to perform IPC with the Node.js side of our desktop app. + * + * @param dataOrPath Contents of an image or video file, or the path to the + * image or video file on the user's local file system, whose thumbnail we want + * to generate. + * + * @param fileTypeInfo The type information for {@link dataOrPath}. + * + * @return The JPEG data of the generated thumbnail. + * + * See also {@link generateThumbnailWeb}. + */ +export const generateThumbnailNative = async ( + electron: Electron, + desktopUploadItem: DesktopUploadItem, + fileTypeInfo: FileTypeInfo, +): Promise => + fileTypeInfo.fileType === FILE_TYPE.IMAGE + ? await electron.generateImageThumbnail( + toDataOrPathOrZipEntry(desktopUploadItem), + maxThumbnailDimension, + maxThumbnailSize, + ) + : ffmpeg.generateVideoThumbnailNative(electron, desktopUploadItem); + +/** + * A fallback, black, thumbnail for use in cases where thumbnail generation + * fails. + */ +export const fallbackThumbnail = () => + Uint8Array.from(atob(blackThumbnailB64), (c) => c.charCodeAt(0)); + +const blackThumbnailB64 = + "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" + + "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" + + "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" + + "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" + + "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" + + "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" + + "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" + + "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" + + "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" + + "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" + + "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" + + "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + + "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" + + "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + + "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k="; diff --git a/web/apps/photos/src/services/upload/thumbnailService.ts b/web/apps/photos/src/services/upload/thumbnailService.ts deleted file mode 100644 index 071ef30785..0000000000 --- a/web/apps/photos/src/services/upload/thumbnailService.ts +++ /dev/null @@ -1,332 +0,0 @@ -import { ensureElectron } from "@/next/electron"; -import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file"; -import log from "@/next/log"; -import { CustomError } from "@ente/shared/error"; -import { FILE_TYPE } from "constants/file"; -import { BLACK_THUMBNAIL_BASE64 } from "constants/upload"; -import isElectron from "is-electron"; -import * as FFmpegService from "services/ffmpeg/ffmpegService"; -import HeicConversionService from "services/heicConversionService"; -import { ElectronFile, FileTypeInfo } from "types/upload"; -import { isFileHEIC } from "utils/file"; -import { getUint8ArrayView } from "../readerService"; - -const MAX_THUMBNAIL_DIMENSION = 720; -const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10; -const MAX_THUMBNAIL_SIZE = 100 * 1024; -const MIN_QUALITY = 0.5; -const MAX_QUALITY = 0.7; - -const WAIT_TIME_THUMBNAIL_GENERATION = 30 * 1000; - -interface Dimension { - width: number; - height: number; -} - -export async function generateThumbnail( - file: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> { - try { - log.info(`generating thumbnail for ${getFileNameSize(file)}`); - let hasStaticThumbnail = false; - let thumbnail: Uint8Array; - try { - if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) { - thumbnail = await generateImageThumbnail(file, fileTypeInfo); - } else { - thumbnail = await generateVideoThumbnail(file, fileTypeInfo); - } - if (thumbnail.length > 1.5 * MAX_THUMBNAIL_SIZE) { - log.error( - `thumbnail greater than max limit - ${JSON.stringify({ - thumbnailSize: convertBytesToHumanReadable( - thumbnail.length, - ), - fileSize: convertBytesToHumanReadable(file.size), - fileType: fileTypeInfo.exactType, - })}`, - ); - } - if (thumbnail.length === 0) { - throw Error("EMPTY THUMBNAIL"); - } - log.info( - `thumbnail successfully generated ${getFileNameSize(file)}`, - ); - } catch (e) { - log.error( - `thumbnail generation failed ${getFileNameSize(file)} with format ${fileTypeInfo.exactType}`, - e, - ); - thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => - c.charCodeAt(0), - ); - hasStaticThumbnail = true; - } - return { thumbnail, hasStaticThumbnail }; - } catch (e) { - log.error("Error generating static thumbnail", e); - throw e; - } -} - -async function generateImageThumbnail( - file: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -) { - if (isElectron()) { - try { - return await generateImageThumbnailInElectron( - file, - MAX_THUMBNAIL_DIMENSION, - MAX_THUMBNAIL_SIZE, - ); - } catch (e) { - return await generateImageThumbnailUsingCanvas(file, fileTypeInfo); - } - } else { - return await generateImageThumbnailUsingCanvas(file, fileTypeInfo); - } -} - -const generateImageThumbnailInElectron = async ( - inputFile: File | ElectronFile, - maxDimension: number, - maxSize: number, -): Promise => { - try { - const startTime = Date.now(); - const thumb = await ensureElectron().generateImageThumbnail( - inputFile, - maxDimension, - maxSize, - ); - log.info( - `originalFileSize:${convertBytesToHumanReadable( - inputFile?.size, - )},thumbFileSize:${convertBytesToHumanReadable( - thumb?.length, - )}, native thumbnail generation time: ${ - Date.now() - startTime - }ms `, - ); - return thumb; - } catch (e) { - if ( - e.message !== - CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED - ) { - log.error("failed to generate image thumbnail natively", e); - } - throw e; - } -}; - -export async function generateImageThumbnailUsingCanvas( - file: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -) { - const canvas = document.createElement("canvas"); - const canvasCTX = canvas.getContext("2d"); - - let imageURL = null; - let timeout = null; - const isHEIC = isFileHEIC(fileTypeInfo.exactType); - if (isHEIC) { - log.info(`HEICConverter called for ${getFileNameSize(file)}`); - const convertedBlob = await HeicConversionService.convert( - new Blob([await file.arrayBuffer()]), - ); - file = new File([convertedBlob], file.name); - log.info(`${getFileNameSize(file)} successfully converted`); - } - let image = new Image(); - imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()])); - await new Promise((resolve, reject) => { - image.setAttribute("src", imageURL); - image.onload = () => { - try { - URL.revokeObjectURL(imageURL); - const imageDimension = { - width: image.width, - height: image.height, - }; - const thumbnailDimension = calculateThumbnailDimension( - imageDimension, - MAX_THUMBNAIL_DIMENSION, - ); - canvas.width = thumbnailDimension.width; - canvas.height = thumbnailDimension.height; - canvasCTX.drawImage( - image, - 0, - 0, - thumbnailDimension.width, - thumbnailDimension.height, - ); - image = null; - clearTimeout(timeout); - resolve(null); - } catch (e) { - const err = new Error(CustomError.THUMBNAIL_GENERATION_FAILED, { - cause: e, - }); - reject(err); - } - }; - timeout = setTimeout( - () => reject(new Error("Operation timed out")), - WAIT_TIME_THUMBNAIL_GENERATION, - ); - }); - const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas); - return await getUint8ArrayView(thumbnailBlob); -} - -async function generateVideoThumbnail( - file: File | ElectronFile, - fileTypeInfo: FileTypeInfo, -) { - let thumbnail: Uint8Array; - try { - log.info( - `ffmpeg generateThumbnail called for ${getFileNameSize(file)}`, - ); - - const thumbnail = await FFmpegService.generateVideoThumbnail(file); - log.info( - `ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`, - ); - return await getUint8ArrayView(thumbnail); - } catch (e) { - log.info( - `ffmpeg thumbnail generated failed ${getFileNameSize( - file, - )} error: ${e.message}`, - ); - log.error( - `failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`, - e, - ); - thumbnail = await generateVideoThumbnailUsingCanvas(file); - } - return thumbnail; -} - -export async function generateVideoThumbnailUsingCanvas( - file: File | ElectronFile, -) { - const canvas = document.createElement("canvas"); - const canvasCTX = canvas.getContext("2d"); - - let timeout = null; - let videoURL = null; - - let video = document.createElement("video"); - videoURL = URL.createObjectURL(new Blob([await file.arrayBuffer()])); - await new Promise((resolve, reject) => { - video.preload = "metadata"; - video.src = videoURL; - video.addEventListener("loadeddata", function () { - try { - URL.revokeObjectURL(videoURL); - if (!video) { - throw Error("video load failed"); - } - const videoDimension = { - width: video.videoWidth, - height: video.videoHeight, - }; - const thumbnailDimension = calculateThumbnailDimension( - videoDimension, - MAX_THUMBNAIL_DIMENSION, - ); - canvas.width = thumbnailDimension.width; - canvas.height = thumbnailDimension.height; - canvasCTX.drawImage( - video, - 0, - 0, - thumbnailDimension.width, - thumbnailDimension.height, - ); - video = null; - clearTimeout(timeout); - resolve(null); - } catch (e) { - const err = Error( - `${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`, - ); - log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e); - reject(err); - } - }); - timeout = setTimeout( - () => reject(new Error("Operation timed out")), - WAIT_TIME_THUMBNAIL_GENERATION, - ); - }); - const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas); - return await getUint8ArrayView(thumbnailBlob); -} - -async function getCompressedThumbnailBlobFromCanvas(canvas: HTMLCanvasElement) { - let thumbnailBlob: Blob = null; - let prevSize = Number.MAX_SAFE_INTEGER; - let quality = MAX_QUALITY; - - do { - if (thumbnailBlob) { - prevSize = thumbnailBlob.size; - } - thumbnailBlob = await new Promise((resolve) => { - canvas.toBlob( - function (blob) { - resolve(blob); - }, - "image/jpeg", - quality, - ); - }); - thumbnailBlob = thumbnailBlob ?? new Blob([]); - quality -= 0.1; - } while ( - quality >= MIN_QUALITY && - thumbnailBlob.size > MAX_THUMBNAIL_SIZE && - percentageSizeDiff(thumbnailBlob.size, prevSize) >= - MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF - ); - - return thumbnailBlob; -} - -function percentageSizeDiff( - newThumbnailSize: number, - oldThumbnailSize: number, -) { - return ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize; -} - -// method to calculate new size of image for limiting it to maximum width and height, maintaining aspect ratio -// returns {0,0} for invalid inputs -function calculateThumbnailDimension( - originalDimension: Dimension, - maxDimension: number, -): Dimension { - if (originalDimension.height === 0 || originalDimension.width === 0) { - return { width: 0, height: 0 }; - } - const widthScaleFactor = maxDimension / originalDimension.width; - const heightScaleFactor = maxDimension / originalDimension.height; - const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor); - const thumbnailDimension = { - width: Math.round(originalDimension.width * scaleFactor), - height: Math.round(originalDimension.height * scaleFactor), - }; - if (thumbnailDimension.width === 0 || thumbnailDimension.height === 0) { - return { width: 0, height: 0 }; - } - return thumbnailDimension; -} diff --git a/web/apps/photos/src/services/upload/types.ts b/web/apps/photos/src/services/upload/types.ts new file mode 100644 index 0000000000..25e2ab408a --- /dev/null +++ b/web/apps/photos/src/services/upload/types.ts @@ -0,0 +1,57 @@ +import type { ZipItem } from "@/next/types/ipc"; + +/** + * An item to upload is one of the following: + * + * 1. A file drag-and-dropped or selected by the user when we are running in the + * web browser. These is the {@link File} case. + * + * 2. A file drag-and-dropped or selected by the user when we are running in the + * context of our desktop app. In such cases, we also have the absolute path + * of the file in the user's local file system. This is the + * {@link FileAndPath} case. + * + * 3. A file path programmatically requested by the desktop app. For example, we + * might be resuming a previously interrupted upload after an app restart + * (thus we no longer have access to the {@link File} from case 2). Or we + * could be uploading a file this is in one of the folders the user has asked + * us to watch for changes. This is the `string` case. + * + * 4. A file within a zip file on the user's local file system. This too is only + * possible when we are running in the context of our desktop app. The user + * might have drag-and-dropped or selected a zip file, or it might be a zip + * file that they'd previously selected but we now are resuming an + * interrupted upload of. Either ways, what we have is a tuple containing the + * (path to zip file, and the name of an entry within that zip file). This is + * the {@link ZipItem} case. + * + * Also see: [Note: Reading a UploadItem]. + */ +export type UploadItem = File | FileAndPath | string | ZipItem; + +/** + * When we are running in the context of our desktop app, we have access to the + * absolute path of {@link File} objects. This convenience type clubs these two + * bits of information, saving us the need to query the path again and again + * using the {@link getPathForFile} method of {@link Electron}. + */ +export interface FileAndPath { + file: File; + path: string; +} + +/** + * The of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app. + */ +export type DesktopUploadItem = Exclude; + +/** + * For each of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app, return a value that can be passed to + * {@link Electron} functions over IPC. + */ +export const toDataOrPathOrZipEntry = (desktopUploadItem: DesktopUploadItem) => + typeof desktopUploadItem == "string" || Array.isArray(desktopUploadItem) + ? desktopUploadItem + : desktopUploadItem.path; diff --git a/web/apps/photos/src/services/upload/uiService.ts b/web/apps/photos/src/services/upload/uiService.ts deleted file mode 100644 index 13dd780019..0000000000 --- a/web/apps/photos/src/services/upload/uiService.ts +++ /dev/null @@ -1,218 +0,0 @@ -import { CustomError } from "@ente/shared/error"; -import { Canceler } from "axios"; -import { - RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, - UPLOAD_RESULT, - UPLOAD_STAGES, -} from "constants/upload"; -import { - FinishedUploads, - InProgressUpload, - InProgressUploads, - ProgressUpdater, - SegregatedFinishedUploads, -} from "types/upload/ui"; -import uploadCancelService from "./uploadCancelService"; - -const REQUEST_TIMEOUT_TIME = 30 * 1000; // 30 sec; -class UIService { - private progressUpdater: ProgressUpdater; - - // UPLOAD LEVEL STATES - private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START; - private filenames: Map = new Map(); - private hasLivePhoto: boolean = false; - private uploadProgressView: boolean = false; - - // STAGE LEVEL STATES - private perFileProgress: number; - private filesUploadedCount: number; - private totalFilesCount: number; - private inProgressUploads: InProgressUploads = new Map(); - private finishedUploads: FinishedUploads = new Map(); - - init(progressUpdater: ProgressUpdater) { - this.progressUpdater = progressUpdater; - this.progressUpdater.setUploadStage(this.uploadStage); - this.progressUpdater.setUploadFilenames(this.filenames); - this.progressUpdater.setHasLivePhotos(this.hasLivePhoto); - this.progressUpdater.setUploadProgressView(this.uploadProgressView); - this.progressUpdater.setUploadCounter({ - finished: this.filesUploadedCount, - total: this.totalFilesCount, - }); - this.progressUpdater.setInProgressUploads( - convertInProgressUploadsToList(this.inProgressUploads), - ); - this.progressUpdater.setFinishedUploads( - segregatedFinishedUploadsToList(this.finishedUploads), - ); - } - - reset(count = 0) { - this.setTotalFileCount(count); - this.filesUploadedCount = 0; - this.inProgressUploads = new Map(); - this.finishedUploads = new Map(); - this.updateProgressBarUI(); - } - - setTotalFileCount(count: number) { - this.totalFilesCount = count; - if (count > 0) { - this.perFileProgress = 100 / this.totalFilesCount; - } else { - this.perFileProgress = 0; - } - } - - setFileProgress(key: number, progress: number) { - this.inProgressUploads.set(key, progress); - this.updateProgressBarUI(); - } - - setUploadStage(stage: UPLOAD_STAGES) { - this.uploadStage = stage; - this.progressUpdater.setUploadStage(stage); - } - - setFilenames(filenames: Map) { - this.filenames = filenames; - this.progressUpdater.setUploadFilenames(filenames); - } - - setHasLivePhoto(hasLivePhoto: boolean) { - this.hasLivePhoto = hasLivePhoto; - this.progressUpdater.setHasLivePhotos(hasLivePhoto); - } - - setUploadProgressView(uploadProgressView: boolean) { - this.uploadProgressView = uploadProgressView; - this.progressUpdater.setUploadProgressView(uploadProgressView); - } - - increaseFileUploaded() { - this.filesUploadedCount++; - this.updateProgressBarUI(); - } - - moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) { - this.finishedUploads.set(key, uploadResult); - this.inProgressUploads.delete(key); - this.updateProgressBarUI(); - } - - hasFilesInResultList() { - const finishedUploadsList = segregatedFinishedUploadsToList( - this.finishedUploads, - ); - for (const x of finishedUploadsList.values()) { - if (x.length > 0) { - return true; - } - } - return false; - } - - private updateProgressBarUI() { - const { - setPercentComplete, - setUploadCounter, - setInProgressUploads, - setFinishedUploads, - } = this.progressUpdater; - setUploadCounter({ - finished: this.filesUploadedCount, - total: this.totalFilesCount, - }); - let percentComplete = - this.perFileProgress * - (this.finishedUploads.size || this.filesUploadedCount); - if (this.inProgressUploads) { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - for (const [_, progress] of this.inProgressUploads) { - // filter negative indicator values during percentComplete calculation - if (progress < 0) { - continue; - } - percentComplete += (this.perFileProgress * progress) / 100; - } - } - - setPercentComplete(percentComplete); - setInProgressUploads( - convertInProgressUploadsToList(this.inProgressUploads), - ); - setFinishedUploads( - segregatedFinishedUploadsToList(this.finishedUploads), - ); - } - - trackUploadProgress( - fileLocalID: number, - percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(), - index = 0, - ) { - const cancel: { exec: Canceler } = { exec: () => {} }; - const cancelTimedOutRequest = () => - cancel.exec(CustomError.REQUEST_TIMEOUT); - - const cancelCancelledUploadRequest = () => - cancel.exec(CustomError.UPLOAD_CANCELLED); - - let timeout = null; - const resetTimeout = () => { - if (timeout) { - clearTimeout(timeout); - } - timeout = setTimeout(cancelTimedOutRequest, REQUEST_TIMEOUT_TIME); - }; - return { - cancel, - onUploadProgress: (event) => { - this.inProgressUploads.set( - fileLocalID, - Math.min( - Math.round( - percentPerPart * index + - (percentPerPart * event.loaded) / event.total, - ), - 98, - ), - ); - this.updateProgressBarUI(); - if (event.loaded === event.total) { - clearTimeout(timeout); - } else { - resetTimeout(); - } - if (uploadCancelService.isUploadCancelationRequested()) { - cancelCancelledUploadRequest(); - } - }, - }; - } -} - -export default new UIService(); - -function convertInProgressUploadsToList(inProgressUploads) { - return [...inProgressUploads.entries()].map( - ([localFileID, progress]) => - ({ - localFileID, - progress, - }) as InProgressUpload, - ); -} - -function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) { - const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads; - for (const [localID, result] of finishedUploads) { - if (!segregatedFinishedUploads.has(result)) { - segregatedFinishedUploads.set(result, []); - } - segregatedFinishedUploads.get(result).push(localID); - } - return segregatedFinishedUploads; -} diff --git a/web/apps/photos/src/services/upload/uploadCancelService.ts b/web/apps/photos/src/services/upload/uploadCancelService.ts deleted file mode 100644 index 790245784b..0000000000 --- a/web/apps/photos/src/services/upload/uploadCancelService.ts +++ /dev/null @@ -1,23 +0,0 @@ -interface UploadCancelStatus { - value: boolean; -} - -class UploadCancelService { - private shouldUploadBeCancelled: UploadCancelStatus = { - value: false, - }; - - reset() { - this.shouldUploadBeCancelled.value = false; - } - - requestUploadCancelation() { - this.shouldUploadBeCancelled.value = true; - } - - isUploadCancelationRequested(): boolean { - return this.shouldUploadBeCancelled.value; - } -} - -export default new UploadCancelService(); diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts index 7ba35dc0dd..e8ae6de977 100644 --- a/web/apps/photos/src/services/upload/uploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts @@ -3,9 +3,9 @@ import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; +import { wait } from "@ente/shared/utils"; import { EnteFile } from "types/file"; -import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload"; -import { retryHTTPCall } from "utils/upload/uploadRetrier"; +import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); const UPLOAD_ENDPOINT = getUploadEndpoint(); @@ -236,3 +236,31 @@ class UploadHttpClient { } export default new UploadHttpClient(); + +const retrySleepTimeInMilliSeconds = [2000, 5000, 10000]; + +export async function retryHTTPCall( + func: () => Promise, + checkForBreakingError?: (error) => void, +): Promise { + const retrier = async ( + func: () => Promise, + attemptNumber: number = 0, + ) => { + try { + const resp = await func(); + return resp; + } catch (e) { + if (checkForBreakingError) { + checkForBreakingError(e); + } + if (attemptNumber < retrySleepTimeInMilliSeconds.length) { + await wait(retrySleepTimeInMilliSeconds[attemptNumber]); + return await retrier(func, attemptNumber + 1); + } else { + throw e; + } + } + }; + return await retrier(func); +} diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index d222999d8a..38fd7037be 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -1,61 +1,339 @@ -import { getFileNameSize } from "@/next/file"; +import { FILE_TYPE } from "@/media/file-type"; +import { potentialFileTypeFromExtension } from "@/media/live-photo"; +import { ensureElectron } from "@/next/electron"; +import { lowercaseExtension, nameAndExtension } from "@/next/file"; import log from "@/next/log"; +import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; +import { ensure } from "@/utils/ensure"; import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; +import { wait } from "@ente/shared/utils"; +import { Canceler } from "axios"; import { Remote } from "comlink"; -import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload"; +import { + RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, + UPLOAD_RESULT, + UPLOAD_STAGES, +} from "constants/upload"; import isElectron from "is-electron"; -import ImportService from "services/importService"; import { getLocalPublicFiles, getPublicCollectionUID, } from "services/publicCollectionService"; import { getDisableCFUploadProxyFlag } from "services/userService"; -import watchFolderService from "services/watch"; +import watcher from "services/watch"; import { Collection } from "types/collection"; import { EncryptedEnteFile, EnteFile } from "types/file"; import { SetFiles } from "types/gallery"; -import { - FileWithCollection, - ParsedMetadataJSON, - ParsedMetadataJSONMap, - PublicUploadProps, -} from "types/upload"; -import { ProgressUpdater } from "types/upload/ui"; import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file"; -import { - areFileWithCollectionsSame, - segregateMetadataAndMediaFiles, -} from "utils/upload"; import { getLocalFiles } from "../fileService"; import { getMetadataJSONMapKeyForJSON, - parseMetadataJSON, -} from "./metadataService"; -import { default as UIService, default as uiService } from "./uiService"; -import uploadCancelService from "./uploadCancelService"; -import UploadService from "./uploadService"; -import uploader from "./uploader"; + tryParseTakeoutMetadataJSON, + type ParsedMetadataJSON, +} from "./takeout"; +import type { UploadItem } from "./types"; +import UploadService, { uploadItemFileName, uploader } from "./uploadService"; -const MAX_CONCURRENT_UPLOADS = 4; +export type FileID = number; + +export type PercentageUploaded = number; +/* localID => fileName */ +export type UploadFileNames = Map; + +export interface UploadCounter { + finished: number; + total: number; +} + +export interface InProgressUpload { + localFileID: FileID; + progress: PercentageUploaded; +} + +export interface FinishedUpload { + localFileID: FileID; + result: UPLOAD_RESULT; +} + +export type InProgressUploads = Map; + +export type FinishedUploads = Map; + +export type SegregatedFinishedUploads = Map; + +export interface ProgressUpdater { + setPercentComplete: React.Dispatch>; + setUploadCounter: React.Dispatch>; + setUploadStage: React.Dispatch>; + setInProgressUploads: React.Dispatch< + React.SetStateAction + >; + setFinishedUploads: React.Dispatch< + React.SetStateAction + >; + setUploadFilenames: React.Dispatch>; + setHasLivePhotos: React.Dispatch>; + setUploadProgressView: React.Dispatch>; +} + +/** The number of uploads to process in parallel. */ +const maxConcurrentUploads = 4; + +export interface UploadItemWithCollection { + localID: number; + collectionID: number; + isLivePhoto?: boolean; + uploadItem?: UploadItem; + livePhotoAssets?: LivePhotoAssets; +} + +export interface LivePhotoAssets { + image: UploadItem; + video: UploadItem; +} + +export interface PublicUploadProps { + token: string; + passwordToken: string; + accessedThroughSharedURL: boolean; +} + +interface UploadCancelStatus { + value: boolean; +} + +class UploadCancelService { + private shouldUploadBeCancelled: UploadCancelStatus = { + value: false, + }; + + reset() { + this.shouldUploadBeCancelled.value = false; + } + + requestUploadCancelation() { + this.shouldUploadBeCancelled.value = true; + } + + isUploadCancelationRequested(): boolean { + return this.shouldUploadBeCancelled.value; + } +} + +const uploadCancelService = new UploadCancelService(); + +class UIService { + private progressUpdater: ProgressUpdater; + + // UPLOAD LEVEL STATES + private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START; + private filenames: Map = new Map(); + private hasLivePhoto: boolean = false; + private uploadProgressView: boolean = false; + + // STAGE LEVEL STATES + private perFileProgress: number; + private filesUploadedCount: number; + private totalFilesCount: number; + private inProgressUploads: InProgressUploads = new Map(); + private finishedUploads: FinishedUploads = new Map(); + + init(progressUpdater: ProgressUpdater) { + this.progressUpdater = progressUpdater; + this.progressUpdater.setUploadStage(this.uploadStage); + this.progressUpdater.setUploadFilenames(this.filenames); + this.progressUpdater.setHasLivePhotos(this.hasLivePhoto); + this.progressUpdater.setUploadProgressView(this.uploadProgressView); + this.progressUpdater.setUploadCounter({ + finished: this.filesUploadedCount, + total: this.totalFilesCount, + }); + this.progressUpdater.setInProgressUploads( + convertInProgressUploadsToList(this.inProgressUploads), + ); + this.progressUpdater.setFinishedUploads( + groupByResult(this.finishedUploads), + ); + } + + reset(count = 0) { + this.setTotalFileCount(count); + this.filesUploadedCount = 0; + this.inProgressUploads = new Map(); + this.finishedUploads = new Map(); + this.updateProgressBarUI(); + } + + setTotalFileCount(count: number) { + this.totalFilesCount = count; + if (count > 0) { + this.perFileProgress = 100 / this.totalFilesCount; + } else { + this.perFileProgress = 0; + } + } + + setFileProgress(key: number, progress: number) { + this.inProgressUploads.set(key, progress); + this.updateProgressBarUI(); + } + + setUploadStage(stage: UPLOAD_STAGES) { + this.uploadStage = stage; + this.progressUpdater.setUploadStage(stage); + } + + setFiles(files: { localID: number; fileName: string }[]) { + const filenames = new Map(files.map((f) => [f.localID, f.fileName])); + this.filenames = filenames; + this.progressUpdater.setUploadFilenames(filenames); + } + + setHasLivePhoto(hasLivePhoto: boolean) { + this.hasLivePhoto = hasLivePhoto; + this.progressUpdater.setHasLivePhotos(hasLivePhoto); + } + + setUploadProgressView(uploadProgressView: boolean) { + this.uploadProgressView = uploadProgressView; + this.progressUpdater.setUploadProgressView(uploadProgressView); + } + + increaseFileUploaded() { + this.filesUploadedCount++; + this.updateProgressBarUI(); + } + + moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) { + this.finishedUploads.set(key, uploadResult); + this.inProgressUploads.delete(key); + this.updateProgressBarUI(); + } + + hasFilesInResultList() { + return this.finishedUploads.size > 0; + } + + private updateProgressBarUI() { + const { + setPercentComplete, + setUploadCounter, + setInProgressUploads, + setFinishedUploads, + } = this.progressUpdater; + setUploadCounter({ + finished: this.filesUploadedCount, + total: this.totalFilesCount, + }); + let percentComplete = + this.perFileProgress * + (this.finishedUploads.size || this.filesUploadedCount); + if (this.inProgressUploads) { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + for (const [_, progress] of this.inProgressUploads) { + // filter negative indicator values during percentComplete calculation + if (progress < 0) { + continue; + } + percentComplete += (this.perFileProgress * progress) / 100; + } + } + + setPercentComplete(percentComplete); + setInProgressUploads( + convertInProgressUploadsToList(this.inProgressUploads), + ); + setFinishedUploads(groupByResult(this.finishedUploads)); + } + + trackUploadProgress( + fileLocalID: number, + percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(), + index = 0, + ) { + const cancel: { exec: Canceler } = { exec: () => {} }; + const cancelTimedOutRequest = () => + cancel.exec(CustomError.REQUEST_TIMEOUT); + + const cancelCancelledUploadRequest = () => + cancel.exec(CustomError.UPLOAD_CANCELLED); + + let timeout = null; + const resetTimeout = () => { + if (timeout) { + clearTimeout(timeout); + } + timeout = setTimeout(cancelTimedOutRequest, 30 * 1000 /* 30 sec */); + }; + return { + cancel, + onUploadProgress: (event) => { + this.inProgressUploads.set( + fileLocalID, + Math.min( + Math.round( + percentPerPart * index + + (percentPerPart * event.loaded) / event.total, + ), + 98, + ), + ); + this.updateProgressBarUI(); + if (event.loaded === event.total) { + clearTimeout(timeout); + } else { + resetTimeout(); + } + if (uploadCancelService.isUploadCancelationRequested()) { + cancelCancelledUploadRequest(); + } + }, + }; + } +} + +function convertInProgressUploadsToList(inProgressUploads) { + return [...inProgressUploads.entries()].map( + ([localFileID, progress]) => + ({ + localFileID, + progress, + }) as InProgressUpload, + ); +} + +const groupByResult = (finishedUploads: FinishedUploads) => { + const groups: SegregatedFinishedUploads = new Map(); + for (const [localID, result] of finishedUploads) { + if (!groups.has(result)) groups.set(result, []); + groups.get(result).push(localID); + } + return groups; +}; class UploadManager { private cryptoWorkers = new Array< ComlinkWorker - >(MAX_CONCURRENT_UPLOADS); - private parsedMetadataJSONMap: ParsedMetadataJSONMap; - private filesToBeUploaded: FileWithCollection[]; - private remainingFiles: FileWithCollection[] = []; - private failedFiles: FileWithCollection[]; + >(maxConcurrentUploads); + private parsedMetadataJSONMap: Map; + private itemsToBeUploaded: ClusteredUploadItem[]; + private failedItems: ClusteredUploadItem[]; private existingFiles: EnteFile[]; private setFiles: SetFiles; private collections: Map; private uploadInProgress: boolean; private publicUploadProps: PublicUploadProps; private uploaderName: string; + private uiService: UIService; + private isCFUploadProxyDisabled: boolean = false; + + constructor() { + this.uiService = new UIService(); + } public async init( progressUpdater: ProgressUpdater, @@ -63,13 +341,14 @@ class UploadManager { publicCollectProps: PublicUploadProps, isCFUploadProxyDisabled: boolean, ) { - UIService.init(progressUpdater); + this.uiService.init(progressUpdater); const remoteIsCFUploadProxyDisabled = await getDisableCFUploadProxyFlag(); if (remoteIsCFUploadProxyDisabled) { isCFUploadProxyDisabled = remoteIsCFUploadProxyDisabled; } - UploadService.init(publicCollectProps, isCFUploadProxyDisabled); + this.isCFUploadProxyDisabled = isCFUploadProxyDisabled; + UploadService.init(publicCollectProps); this.setFiles = setFiles; this.publicUploadProps = publicCollectProps; } @@ -79,26 +358,109 @@ class UploadManager { } private resetState() { - this.filesToBeUploaded = []; - this.remainingFiles = []; - this.failedFiles = []; + this.itemsToBeUploaded = []; + this.failedItems = []; this.parsedMetadataJSONMap = new Map(); this.uploaderName = null; } - prepareForNewUpload() { + public prepareForNewUpload() { this.resetState(); - UIService.reset(); + this.uiService.reset(); uploadCancelService.reset(); - UIService.setUploadStage(UPLOAD_STAGES.START); + this.uiService.setUploadStage(UPLOAD_STAGES.START); } showUploadProgressDialog() { - UIService.setUploadProgressView(true); + this.uiService.setUploadProgressView(true); } - async updateExistingFilesAndCollections(collections: Collection[]) { + /** + * Upload files + * + * This method waits for all the files to get uploaded (successfully or + * unsucessfully) before returning. + * + * It is an error to call this method when there is already an in-progress + * upload. + * + * @param itemsWithCollection The items to upload, each paired with the id + * of the collection that they should be uploaded into. + * + * @returns `true` if at least one file was processed + */ + public async uploadItems( + itemsWithCollection: UploadItemWithCollection[], + collections: Collection[], + uploaderName?: string, + ) { + if (this.uploadInProgress) + throw new Error("Cannot run multiple uploads at once"); + + log.info(`Uploading ${itemsWithCollection.length} files`); + this.uploadInProgress = true; + this.uploaderName = uploaderName; + + try { + await this.updateExistingFilesAndCollections(collections); + + const namedItems = itemsWithCollection.map( + makeUploadItemWithCollectionIDAndName, + ); + + this.uiService.setFiles(namedItems); + + const [metadataItems, mediaItems] = + splitMetadataAndMediaItems(namedItems); + + if (metadataItems.length) { + this.uiService.setUploadStage( + UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, + ); + + await this.parseMetadataJSONFiles(metadataItems); + } + + if (mediaItems.length) { + const clusteredMediaItems = await clusterLivePhotos(mediaItems); + + this.abortIfCancelled(); + + // Live photos might've been clustered together, reset the list + // of files to reflect that. + this.uiService.setFiles(clusteredMediaItems); + + this.uiService.setHasLivePhoto( + mediaItems.length != clusteredMediaItems.length, + ); + + await this.uploadMediaItems(clusteredMediaItems); + } + } catch (e) { + if (e.message != CustomError.UPLOAD_CANCELLED) { + log.error("Upload failed", e); + throw e; + } + } finally { + this.uiService.setUploadStage(UPLOAD_STAGES.FINISH); + void globalThis.electron?.clearPendingUploads(); + for (let i = 0; i < maxConcurrentUploads; i++) { + this.cryptoWorkers[i]?.terminate(); + } + this.uploadInProgress = false; + } + + return this.uiService.hasFilesInResultList(); + } + + private abortIfCancelled = () => { + if (uploadCancelService.isUploadCancelationRequested()) { + throw Error(CustomError.UPLOAD_CANCELLED); + } + }; + + private async updateExistingFilesAndCollections(collections: Collection[]) { if (this.publicUploadProps.accessedThroughSharedURL) { this.existingFiles = await getLocalPublicFiles( getPublicCollectionUID(this.publicUploadProps.token), @@ -111,236 +473,125 @@ class UploadManager { ); } - public async queueFilesForUpload( - filesWithCollectionToUploadIn: FileWithCollection[], - collections: Collection[], - uploaderName?: string, + private async parseMetadataJSONFiles( + items: UploadItemWithCollectionIDAndName[], ) { - try { - if (this.uploadInProgress) { - throw Error("can't run multiple uploads at once"); - } - this.uploadInProgress = true; - await this.updateExistingFilesAndCollections(collections); - this.uploaderName = uploaderName; - log.info( - `received ${filesWithCollectionToUploadIn.length} files to upload`, + this.uiService.reset(items.length); + + for (const { uploadItem, fileName, collectionID } of items) { + this.abortIfCancelled(); + + log.info(`Parsing metadata JSON ${fileName}`); + const metadataJSON = await tryParseTakeoutMetadataJSON( + ensure(uploadItem), ); - uiService.setFilenames( - new Map( - filesWithCollectionToUploadIn.map((mediaFile) => [ - mediaFile.localID, - UploadService.getAssetName(mediaFile), - ]), - ), - ); - const { metadataJSONFiles, mediaFiles } = - segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn); - log.info(`has ${metadataJSONFiles.length} metadata json files`); - log.info(`has ${mediaFiles.length} media files`); - if (metadataJSONFiles.length) { - UIService.setUploadStage( - UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, - ); - await this.parseMetadataJSONFiles(metadataJSONFiles); - - UploadService.setParsedMetadataJSONMap( - this.parsedMetadataJSONMap, + if (metadataJSON) { + this.parsedMetadataJSONMap.set( + getMetadataJSONMapKeyForJSON(collectionID, fileName), + metadataJSON, ); + this.uiService.increaseFileUploaded(); } - if (mediaFiles.length) { - log.info(`clusterLivePhotoFiles started`); - const analysedMediaFiles = - await UploadService.clusterLivePhotoFiles(mediaFiles); - log.info(`clusterLivePhotoFiles ended`); - log.info( - `got live photos: ${ - mediaFiles.length !== analysedMediaFiles.length - }`, - ); - uiService.setFilenames( - new Map( - analysedMediaFiles.map((mediaFile) => [ - mediaFile.localID, - UploadService.getAssetName(mediaFile), - ]), - ), - ); - - UIService.setHasLivePhoto( - mediaFiles.length !== analysedMediaFiles.length, - ); - - await this.uploadMediaFiles(analysedMediaFiles); - } - } catch (e) { - if (e.message === CustomError.UPLOAD_CANCELLED) { - if (isElectron()) { - this.remainingFiles = []; - await ImportService.cancelRemainingUploads(); - } - } else { - log.error("uploading failed with error", e); - throw e; - } - } finally { - UIService.setUploadStage(UPLOAD_STAGES.FINISH); - for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) { - this.cryptoWorkers[i]?.terminate(); - } - this.uploadInProgress = false; - } - try { - if (!UIService.hasFilesInResultList()) { - return true; - } else { - return false; - } - } catch (e) { - log.error(" failed to return shouldCloseProgressBar", e); - return false; } } - private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) { - try { - log.info(`parseMetadataJSONFiles function executed `); - - UIService.reset(metadataFiles.length); - - for (const { file, collectionID } of metadataFiles) { - try { - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - log.info( - `parsing metadata json file ${getFileNameSize(file)}`, - ); - - const parsedMetadataJSON = await parseMetadataJSON(file); - if (parsedMetadataJSON) { - this.parsedMetadataJSONMap.set( - getMetadataJSONMapKeyForJSON( - collectionID, - file.name, - ), - parsedMetadataJSON && { ...parsedMetadataJSON }, - ); - UIService.increaseFileUploaded(); - } - log.info( - `successfully parsed metadata json file ${getFileNameSize( - file, - )}`, - ); - } catch (e) { - if (e.message === CustomError.UPLOAD_CANCELLED) { - throw e; - } else { - // and don't break for subsequent files just log and move on - log.error("parsing failed for a file", e); - log.info( - `failed to parse metadata json file ${getFileNameSize( - file, - )} error: ${e.message}`, - ); - } - } - } - } catch (e) { - if (e.message !== CustomError.UPLOAD_CANCELLED) { - log.error("error seeding MetadataMap", e); - } - throw e; - } - } - - private async uploadMediaFiles(mediaFiles: FileWithCollection[]) { - log.info(`uploadMediaFiles called`); - this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles]; - - if (isElectron()) { - this.remainingFiles = [...this.remainingFiles, ...mediaFiles]; - } - - UIService.reset(mediaFiles.length); - - await UploadService.setFileCount(mediaFiles.length); - - UIService.setUploadStage(UPLOAD_STAGES.UPLOADING); + private async uploadMediaItems(mediaItems: ClusteredUploadItem[]) { + this.itemsToBeUploaded = [...this.itemsToBeUploaded, ...mediaItems]; + this.uiService.reset(mediaItems.length); + await UploadService.setFileCount(mediaItems.length); + this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING); const uploadProcesses = []; for ( let i = 0; - i < MAX_CONCURRENT_UPLOADS && this.filesToBeUploaded.length > 0; + i < maxConcurrentUploads && this.itemsToBeUploaded.length > 0; i++ ) { this.cryptoWorkers[i] = getDedicatedCryptoWorker(); const worker = await this.cryptoWorkers[i].remote; - uploadProcesses.push(this.uploadNextFileInQueue(worker)); + uploadProcesses.push(this.uploadNextItemInQueue(worker)); } await Promise.all(uploadProcesses); } - private async uploadNextFileInQueue(worker: Remote) { - while (this.filesToBeUploaded.length > 0) { - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - let fileWithCollection = this.filesToBeUploaded.pop(); - const { collectionID } = fileWithCollection; + private async uploadNextItemInQueue(worker: Remote) { + const uiService = this.uiService; + + while (this.itemsToBeUploaded.length > 0) { + this.abortIfCancelled(); + + const clusteredItem = this.itemsToBeUploaded.pop(); + const { localID, collectionID } = clusteredItem; const collection = this.collections.get(collectionID); - fileWithCollection = { ...fileWithCollection, collection }; - const { fileUploadResult, uploadedFile } = await uploader( - worker, - this.existingFiles, - fileWithCollection, + const uploadableItem = { ...clusteredItem, collection }; + + uiService.setFileProgress(localID, 0); + await wait(0); + + const { uploadResult, uploadedFile } = await uploader( + uploadableItem, this.uploaderName, + this.existingFiles, + this.parsedMetadataJSONMap, + worker, + this.isCFUploadProxyDisabled, + () => { + this.abortIfCancelled(); + }, + ( + fileLocalID: number, + percentPerPart?: number, + index?: number, + ) => + uiService.trackUploadProgress( + fileLocalID, + percentPerPart, + index, + ), ); const finalUploadResult = await this.postUploadTask( - fileUploadResult, + uploadableItem, + uploadResult, uploadedFile, - fileWithCollection, ); - UIService.moveFileToResultList( - fileWithCollection.localID, - finalUploadResult, - ); - UIService.increaseFileUploaded(); + this.uiService.moveFileToResultList(localID, finalUploadResult); + this.uiService.increaseFileUploaded(); UploadService.reducePendingUploadCount(); } } - async postUploadTask( - fileUploadResult: UPLOAD_RESULT, - uploadedFile: EncryptedEnteFile | EnteFile | null, - fileWithCollection: FileWithCollection, + private async postUploadTask( + uploadableItem: UploadableUploadItem, + uploadResult: UPLOAD_RESULT, + uploadedFile: EncryptedEnteFile | EnteFile | undefined, ) { + log.info( + `Uploaded ${uploadableItem.fileName} with result ${uploadResult}`, + ); try { + const electron = globalThis.electron; + if (electron) await markUploaded(electron, uploadableItem); + let decryptedFile: EnteFile; - log.info( - `post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`, - ); - await this.updateElectronRemainingFiles(fileWithCollection); - switch (fileUploadResult) { + switch (uploadResult) { case UPLOAD_RESULT.FAILED: case UPLOAD_RESULT.BLOCKED: - this.failedFiles.push(fileWithCollection); + this.failedItems.push(uploadableItem); break; case UPLOAD_RESULT.ALREADY_UPLOADED: decryptedFile = uploadedFile as EnteFile; break; case UPLOAD_RESULT.ADDED_SYMLINK: decryptedFile = uploadedFile as EnteFile; - fileUploadResult = UPLOAD_RESULT.UPLOADED; + uploadResult = UPLOAD_RESULT.UPLOADED; break; case UPLOAD_RESULT.UPLOADED: case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL: decryptedFile = await decryptFile( uploadedFile as EncryptedEnteFile, - fileWithCollection.collection.key, + uploadableItem.collection.key, ); break; case UPLOAD_RESULT.UNSUPPORTED: @@ -348,33 +599,47 @@ class UploadManager { // no-op break; default: - throw Error("Invalid Upload Result" + fileUploadResult); + throw new Error(`Invalid Upload Result ${uploadResult}`); } if ( [ UPLOAD_RESULT.ADDED_SYMLINK, UPLOAD_RESULT.UPLOADED, UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL, - ].includes(fileUploadResult) + ].includes(uploadResult) ) { try { + let file: File | undefined; + const uploadItem = + uploadableItem.uploadItem ?? + uploadableItem.livePhotoAssets.image; + if (uploadItem) { + if (uploadItem instanceof File) { + file = uploadItem; + } else if ( + typeof uploadItem == "string" || + Array.isArray(uploadItem) + ) { + // path from desktop, no file object + } else { + file = uploadItem.file; + } + } eventBus.emit(Events.FILE_UPLOADED, { enteFile: decryptedFile, - localFile: - fileWithCollection.file ?? - fileWithCollection.livePhotoAssets.image, + localFile: file, }); } catch (e) { - log.error("Error in fileUploaded handlers", e); + log.warn("Ignoring error in fileUploaded handlers", e); } this.updateExistingFiles(decryptedFile); } await this.watchFolderCallback( - fileUploadResult, - fileWithCollection, + uploadResult, + uploadableItem, uploadedFile as EncryptedEnteFile, ); - return fileUploadResult; + return uploadResult; } catch (e) { log.error("failed to do post file upload action", e); return UPLOAD_RESULT.FAILED; @@ -383,32 +648,34 @@ class UploadManager { private async watchFolderCallback( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection, + fileWithCollection: ClusteredUploadItem, uploadedFile: EncryptedEnteFile, ) { if (isElectron()) { - await watchFolderService.onFileUpload( - fileUploadResult, - fileWithCollection, - uploadedFile, - ); + if (watcher.isUploadRunning()) { + await watcher.onFileUpload( + fileUploadResult, + fileWithCollection, + uploadedFile, + ); + } } } public cancelRunningUpload() { - log.info("user cancelled running upload"); - UIService.setUploadStage(UPLOAD_STAGES.CANCELLING); + log.info("User cancelled running upload"); + this.uiService.setUploadStage(UPLOAD_STAGES.CANCELLING); uploadCancelService.requestUploadCancelation(); } - getFailedFilesWithCollections() { + public getFailedItemsWithCollections() { return { - files: this.failedFiles, + items: this.failedItems, collections: [...this.collections.values()], }; } - getUploaderName() { + public getUploaderName() { return this.uploaderName; } @@ -424,20 +691,310 @@ class UploadManager { this.setFiles((files) => sortFiles([...files, decryptedFile])); } - private async updateElectronRemainingFiles( - fileWithCollection: FileWithCollection, - ) { - if (isElectron()) { - this.remainingFiles = this.remainingFiles.filter( - (file) => !areFileWithCollectionsSame(file, fileWithCollection), - ); - await ImportService.updatePendingUploads(this.remainingFiles); - } - } - public shouldAllowNewUpload = () => { - return !this.uploadInProgress || watchFolderService.isUploadRunning(); + return !this.uploadInProgress || watcher.isUploadRunning(); }; } export default new UploadManager(); + +/** + * The data operated on by the intermediate stages of the upload. + * + * [Note: Intermediate file types during upload] + * + * As files progress through stages, they get more and more bits tacked on to + * them. These types document the journey. + * + * - The input is {@link UploadItemWithCollection}. This can either be a new + * {@link UploadItemWithCollection}, in which case it'll only have a + * {@link localID}, {@link collectionID} and a {@link uploadItem}. Or it could + * be a retry, in which case it'll not have a {@link uploadItem} but instead + * will have data from a previous stage (concretely, it'll just be a + * relabelled {@link ClusteredUploadItem}), like a snake eating its tail. + * + * - Immediately we convert it to {@link UploadItemWithCollectionIDAndName}. + * This is to mostly systematize what we have, and also attach a + * {@link fileName}. + * + * - These then get converted to "assets", whereby both parts of a live photo + * are combined. This is a {@link ClusteredUploadItem}. + * + * - On to the {@link ClusteredUploadItem} we attach the corresponding + * {@link collection}, giving us {@link UploadableUploadItem}. This is what + * gets queued and then passed to the {@link uploader}. + */ +type UploadItemWithCollectionIDAndName = { + /** A unique ID for the duration of the upload */ + localID: number; + /** The ID of the collection to which this file should be uploaded. */ + collectionID: number; + /** + * The name of the file. + * + * In case of live photos, this'll be the name of the image part. + */ + fileName: string; + /** `true` if this is a live photo. */ + isLivePhoto?: boolean; + /* Valid for non-live photos */ + uploadItem?: UploadItem; + /* Valid for live photos */ + livePhotoAssets?: LivePhotoAssets; +}; + +const makeUploadItemWithCollectionIDAndName = ( + f: UploadItemWithCollection, +): UploadItemWithCollectionIDAndName => ({ + localID: ensure(f.localID), + collectionID: ensure(f.collectionID), + fileName: ensure( + f.isLivePhoto + ? uploadItemFileName(f.livePhotoAssets.image) + : uploadItemFileName(f.uploadItem), + ), + isLivePhoto: f.isLivePhoto, + uploadItem: f.uploadItem, + livePhotoAssets: f.livePhotoAssets, +}); + +/** + * An upload item with both parts of a live photo clubbed together. + * + * See: [Note: Intermediate file types during upload]. + */ +type ClusteredUploadItem = { + localID: number; + collectionID: number; + fileName: string; + isLivePhoto: boolean; + uploadItem?: UploadItem; + livePhotoAssets?: LivePhotoAssets; +}; + +/** + * The file that we hand off to the uploader. Essentially + * {@link ClusteredUploadItem} with the {@link collection} attached to it. + * + * See: [Note: Intermediate file types during upload]. + */ +export type UploadableUploadItem = ClusteredUploadItem & { + collection: Collection; +}; + +const splitMetadataAndMediaItems = ( + items: UploadItemWithCollectionIDAndName[], +): [ + metadata: UploadItemWithCollectionIDAndName[], + media: UploadItemWithCollectionIDAndName[], +] => + items.reduce( + ([metadata, media], f) => { + if (lowercaseExtension(f.fileName) == "json") metadata.push(f); + else media.push(f); + return [metadata, media]; + }, + [[], []], + ); + +const markUploaded = async (electron: Electron, item: ClusteredUploadItem) => { + // TODO: This can be done better + if (item.isLivePhoto) { + const [p0, p1] = [ + item.livePhotoAssets.image, + item.livePhotoAssets.video, + ]; + if (Array.isArray(p0) && Array.isArray(p1)) { + electron.markUploadedZipItems([p0, p1]); + } else if (typeof p0 == "string" && typeof p1 == "string") { + electron.markUploadedFiles([p0, p1]); + } else if ( + p0 && + typeof p0 == "object" && + "path" in p0 && + p1 && + typeof p1 == "object" && + "path" in p1 + ) { + electron.markUploadedFiles([p0.path, p1.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } + } else { + const p = ensure(item.uploadItem); + if (Array.isArray(p)) { + electron.markUploadedZipItems([p]); + } else if (typeof p == "string") { + electron.markUploadedFiles([p]); + } else if (p && typeof p == "object" && "path" in p) { + electron.markUploadedFiles([p.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } + } +}; + +/** + * Go through the given files, combining any sibling image + video assets into a + * single live photo when appropriate. + */ +const clusterLivePhotos = async ( + items: UploadItemWithCollectionIDAndName[], +) => { + const result: ClusteredUploadItem[] = []; + items + .sort((f, g) => + nameAndExtension(f.fileName)[0].localeCompare( + nameAndExtension(g.fileName)[0], + ), + ) + .sort((f, g) => f.collectionID - g.collectionID); + let index = 0; + while (index < items.length - 1) { + const f = items[index]; + const g = items[index + 1]; + const fFileType = potentialFileTypeFromExtension(f.fileName); + const gFileType = potentialFileTypeFromExtension(g.fileName); + const fa: PotentialLivePhotoAsset = { + fileName: f.fileName, + fileType: fFileType, + collectionID: f.collectionID, + uploadItem: f.uploadItem, + }; + const ga: PotentialLivePhotoAsset = { + fileName: g.fileName, + fileType: gFileType, + collectionID: g.collectionID, + uploadItem: g.uploadItem, + }; + if (await areLivePhotoAssets(fa, ga)) { + const [image, video] = + fFileType == FILE_TYPE.IMAGE ? [f, g] : [g, f]; + result.push({ + localID: f.localID, + collectionID: f.collectionID, + fileName: image.fileName, + isLivePhoto: true, + livePhotoAssets: { + image: image.uploadItem, + video: video.uploadItem, + }, + }); + index += 2; + } else { + result.push({ + ...f, + isLivePhoto: false, + }); + index += 1; + } + } + if (index === items.length - 1) { + result.push({ + ...items[index], + isLivePhoto: false, + }); + } + return result; +}; + +interface PotentialLivePhotoAsset { + fileName: string; + fileType: FILE_TYPE; + collectionID: number; + uploadItem: UploadItem; +} + +const areLivePhotoAssets = async ( + f: PotentialLivePhotoAsset, + g: PotentialLivePhotoAsset, +) => { + if (f.collectionID != g.collectionID) return false; + + const [fName, fExt] = nameAndExtension(f.fileName); + const [gName, gExt] = nameAndExtension(g.fileName); + + let fPrunedName: string; + let gPrunedName: string; + if (f.fileType == FILE_TYPE.IMAGE && g.fileType == FILE_TYPE.VIDEO) { + fPrunedName = removePotentialLivePhotoSuffix( + fName, + // A Google Live Photo image file can have video extension appended + // as suffix, so we pass that to removePotentialLivePhotoSuffix to + // remove it. + // + // Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file) + gExt ? `.${gExt}` : undefined, + ); + gPrunedName = removePotentialLivePhotoSuffix(gName); + } else if (f.fileType == FILE_TYPE.VIDEO && g.fileType == FILE_TYPE.IMAGE) { + fPrunedName = removePotentialLivePhotoSuffix(fName); + gPrunedName = removePotentialLivePhotoSuffix( + gName, + fExt ? `.${fExt}` : undefined, + ); + } else { + return false; + } + + if (fPrunedName != gPrunedName) return false; + + // Also check that the size of an individual Live Photo asset is less than + // an (arbitrary) limit. This should be true in practice as the videos for a + // live photo are a few seconds long. Further on, the zipping library that + // we use doesn't support stream as a input. + + const maxAssetSize = 20 * 1024 * 1024; /* 20MB */ + const fSize = await uploadItemSize(f.uploadItem); + const gSize = await uploadItemSize(g.uploadItem); + if (fSize > maxAssetSize || gSize > maxAssetSize) { + log.info( + `Not classifying files with too large sizes (${fSize} and ${gSize} bytes) as a live photo`, + ); + return false; + } + + return true; +}; + +const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => { + const suffix_3 = "_3"; + + // The icloud-photos-downloader library appends _HVEC to the end of the + // filename in case of live photos. + // + // https://github.com/icloud-photos-downloader/icloud_photos_downloader + const suffix_hvec = "_HVEC"; + + let foundSuffix: string | undefined; + if (name.endsWith(suffix_3)) { + foundSuffix = suffix_3; + } else if ( + name.endsWith(suffix_hvec) || + name.endsWith(suffix_hvec.toLowerCase()) + ) { + foundSuffix = suffix_hvec; + } else if (suffix) { + if (name.endsWith(suffix) || name.endsWith(suffix.toLowerCase())) { + foundSuffix = suffix; + } + } + + return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name; +}; + +/** + * Return the size of the given {@link uploadItem}. + */ +const uploadItemSize = async (uploadItem: UploadItem): Promise => { + if (uploadItem instanceof File) return uploadItem.size; + if (typeof uploadItem == "string") + return ensureElectron().pathOrZipItemSize(uploadItem); + if (Array.isArray(uploadItem)) + return ensureElectron().pathOrZipItemSize(uploadItem); + return uploadItem.file.size; +}; diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts index 95e4752a70..52f495785a 100644 --- a/web/apps/photos/src/services/upload/uploadService.ts +++ b/web/apps/photos/src/services/upload/uploadService.ts @@ -1,70 +1,112 @@ +import { hasFileHash } from "@/media/file"; +import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; +import { encodeLivePhoto } from "@/media/live-photo"; +import type { Metadata } from "@/media/types/file"; +import { ensureElectron } from "@/next/electron"; +import { basename } from "@/next/file"; import log from "@/next/log"; +import { CustomErrorMessage } from "@/next/types/ipc"; +import { ensure } from "@/utils/ensure"; +import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { B64EncryptionResult } from "@ente/shared/crypto/types"; import { CustomError, handleUploadError } from "@ente/shared/error"; import { Remote } from "comlink"; -import { Collection } from "types/collection"; -import { FilePublicMagicMetadataProps } from "types/file"; import { - BackupedFile, - EncryptedFile, - ExtractMetadataResult, - FileTypeInfo, - FileWithCollection, - FileWithMetadata, - Logger, - ParsedMetadataJSON, - ParsedMetadataJSONMap, - ProcessedFile, + NULL_LOCATION, + RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, + UPLOAD_RESULT, +} from "constants/upload"; +import { addToCollection } from "services/collectionService"; +import { parseImageMetadata } from "services/exif"; +import * as ffmpeg from "services/ffmpeg"; +import { PublicUploadProps, - UploadAsset, - UploadFile, - UploadURL, - isDataStream, -} from "types/upload"; -import { getFileType } from "../typeDetectionService"; + type LivePhotoAssets, +} from "services/upload/uploadManager"; import { - encryptFile, - extractFileMetadata, - getFileSize, - getFilename, - readFile, -} from "./fileService"; + EnteFile, + MetadataFileAttributes, + S3FileAttributes, + type EncryptedEnteFile, + type FilePublicMagicMetadata, + type FilePublicMagicMetadataProps, +} from "types/file"; +import { EncryptedMagicMetadata } from "types/magicMetadata"; +import type { ParsedExtractedMetadata } from "types/metadata"; import { - clusterLivePhotoFiles, - extractLivePhotoMetadata, - getLivePhotoFileType, - getLivePhotoName, - getLivePhotoSize, - readLivePhoto, -} from "./livePhotoService"; -import { constructPublicMagicMetadata } from "./magicMetadataService"; -import { uploadStreamUsingMultipart } from "./multiPartUploadService"; + getNonEmptyMagicMetadataProps, + updateMagicMetadata, +} from "utils/magicMetadata"; +import { readStream } from "utils/native-stream"; +import * as convert from "xml-js"; +import { detectFileTypeInfoFromChunk } from "../detect-type"; +import { tryParseEpochMicrosecondsFromFileName } from "./date"; import publicUploadHttpClient from "./publicUploadHttpClient"; -import UIService from "./uiService"; +import type { ParsedMetadataJSON } from "./takeout"; +import { matchTakeoutMetadata } from "./takeout"; +import { + fallbackThumbnail, + generateThumbnailNative, + generateThumbnailWeb, +} from "./thumbnail"; +import type { UploadItem } from "./types"; import UploadHttpClient from "./uploadHttpClient"; +import type { UploadableUploadItem } from "./uploadManager"; +/** + * A readable stream for a file, and its associated size and last modified time. + * + * This is the in-memory representation of the {@link UploadItem} type that we + * usually pass around. See: [Note: Reading a UploadItem] + */ +interface FileStream { + /** + * A stream of the file's contents + * + * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks + * (except the last chunk which can be smaller since a file would rarely + * align exactly to a ENCRYPTION_CHUNK_SIZE multiple). + * + * Note: A stream can only be read once! + */ + stream: ReadableStream; + /** + * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE + * sized (except the last one). + */ + chunkCount: number; + /** + * The size in bytes of the underlying file. + */ + fileSize: number; + /** + * The modification time of the file, in epoch milliseconds. + */ + lastModifiedMs: number; + /** + * Set to the underlying {@link File} when we also have access to it. + */ + file?: File; +} + +/** + * If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use + * multipart uploads for it, with each multipart-part containing 5 chunks. + * + * ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload + * part is 5, so each part is (up to) 20 MB. + */ +const multipartChunksPerPart = 5; + +/** Upload files to cloud storage */ class UploadService { private uploadURLs: UploadURL[] = []; - private parsedMetadataJSONMap: ParsedMetadataJSONMap = new Map< - string, - ParsedMetadataJSON - >(); - - private uploaderName: string; - private pendingUploadCount: number = 0; - private publicUploadProps: PublicUploadProps = undefined; - private isCFUploadProxyDisabled: boolean = false; - - init( - publicUploadProps: PublicUploadProps, - isCFUploadProxyDisabled: boolean, - ) { + init(publicUploadProps: PublicUploadProps) { this.publicUploadProps = publicUploadProps; - this.isCFUploadProxyDisabled = isCFUploadProxyDisabled; } async setFileCount(fileCount: number) { @@ -72,194 +114,18 @@ class UploadService { await this.preFetchUploadURLs(); } - setParsedMetadataJSONMap(parsedMetadataJSONMap: ParsedMetadataJSONMap) { - this.parsedMetadataJSONMap = parsedMetadataJSONMap; - } - - setUploaderName(uploaderName: string) { - this.uploaderName = uploaderName; - } - - getUploaderName() { - return this.uploaderName; - } - - getIsCFUploadProxyDisabled() { - return this.isCFUploadProxyDisabled; - } - reducePendingUploadCount() { this.pendingUploadCount--; } - getAssetSize({ isLivePhoto, file, livePhotoAssets }: UploadAsset) { - return isLivePhoto - ? getLivePhotoSize(livePhotoAssets) - : getFileSize(file); - } - - getAssetName({ isLivePhoto, file, livePhotoAssets }: UploadAsset) { - return isLivePhoto - ? getLivePhotoName(livePhotoAssets) - : getFilename(file); - } - - getAssetFileType({ isLivePhoto, file, livePhotoAssets }: UploadAsset) { - return isLivePhoto - ? getLivePhotoFileType(livePhotoAssets) - : getFileType(file); - } - - async readAsset( - fileTypeInfo: FileTypeInfo, - { isLivePhoto, file, livePhotoAssets }: UploadAsset, - ) { - return isLivePhoto - ? await readLivePhoto(fileTypeInfo, livePhotoAssets) - : await readFile(fileTypeInfo, file); - } - - async extractAssetMetadata( - worker: Remote, - { isLivePhoto, file, livePhotoAssets }: UploadAsset, - collectionID: number, - fileTypeInfo: FileTypeInfo, - ): Promise { - return isLivePhoto - ? extractLivePhotoMetadata( - worker, - this.parsedMetadataJSONMap, - collectionID, - fileTypeInfo, - livePhotoAssets, - ) - : await extractFileMetadata( - worker, - this.parsedMetadataJSONMap, - collectionID, - fileTypeInfo, - file, - ); - } - - clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) { - return clusterLivePhotoFiles(mediaFiles); - } - - constructPublicMagicMetadata( - publicMagicMetadataProps: FilePublicMagicMetadataProps, - ) { - return constructPublicMagicMetadata(publicMagicMetadataProps); - } - - async encryptAsset( - worker: Remote, - file: FileWithMetadata, - encryptionKey: string, - ): Promise { - return encryptFile(worker, file, encryptionKey); - } - - async uploadToBucket( - logger: Logger, - file: ProcessedFile, - ): Promise { - try { - let fileObjectKey: string = null; - logger("uploading file to bucket"); - if (isDataStream(file.file.encryptedData)) { - logger("uploading using multipart"); - fileObjectKey = await uploadStreamUsingMultipart( - logger, - file.localID, - file.file.encryptedData, - ); - logger("uploading using multipart done"); - } else { - logger("uploading using single part"); - const progressTracker = UIService.trackUploadProgress( - file.localID, - ); - const fileUploadURL = await this.getUploadURL(); - if (!this.isCFUploadProxyDisabled) { - logger("uploading using cf proxy"); - fileObjectKey = await UploadHttpClient.putFileV2( - fileUploadURL, - file.file.encryptedData as Uint8Array, - progressTracker, - ); - } else { - logger("uploading directly to s3"); - fileObjectKey = await UploadHttpClient.putFile( - fileUploadURL, - file.file.encryptedData as Uint8Array, - progressTracker, - ); - } - logger("uploading using single part done"); - } - logger("uploading thumbnail to bucket"); - const thumbnailUploadURL = await this.getUploadURL(); - let thumbnailObjectKey: string = null; - if (!this.isCFUploadProxyDisabled) { - thumbnailObjectKey = await UploadHttpClient.putFileV2( - thumbnailUploadURL, - file.thumbnail.encryptedData, - null, - ); - } else { - thumbnailObjectKey = await UploadHttpClient.putFile( - thumbnailUploadURL, - file.thumbnail.encryptedData, - null, - ); - } - logger("uploading thumbnail to bucket done"); - - const backupedFile: BackupedFile = { - file: { - decryptionHeader: file.file.decryptionHeader, - objectKey: fileObjectKey, - }, - thumbnail: { - decryptionHeader: file.thumbnail.decryptionHeader, - objectKey: thumbnailObjectKey, - }, - metadata: file.metadata, - pubMagicMetadata: file.pubMagicMetadata, - }; - return backupedFile; - } catch (e) { - if (e.message !== CustomError.UPLOAD_CANCELLED) { - log.error("error uploading to bucket", e); - } - throw e; - } - } - - getUploadFile( - collection: Collection, - backupedFile: BackupedFile, - fileKey: B64EncryptionResult, - ): UploadFile { - const uploadFile: UploadFile = { - collectionID: collection.id, - encryptedKey: fileKey.encryptedData, - keyDecryptionNonce: fileKey.nonce, - ...backupedFile, - }; - uploadFile; - return uploadFile; - } - - private async getUploadURL() { + async getUploadURL() { if (this.uploadURLs.length === 0 && this.pendingUploadCount) { await this.fetchUploadURLs(); } return this.uploadURLs.pop(); } - public async preFetchUploadURLs() { + private async preFetchUploadURLs() { try { await this.fetchUploadURLs(); // checking for any subscription related errors @@ -310,4 +176,1186 @@ class UploadService { } } -export default new UploadService(); +/** The singleton instance of {@link UploadService}. */ +const uploadService = new UploadService(); + +export default uploadService; + +/** + * Return the file name for the given {@link uploadItem}. + */ +export const uploadItemFileName = (uploadItem: UploadItem) => { + if (uploadItem instanceof File) return uploadItem.name; + if (typeof uploadItem == "string") return basename(uploadItem); + if (Array.isArray(uploadItem)) return basename(uploadItem[1]); + return uploadItem.file.name; +}; + +/* -- Various intermediate type used during upload -- */ + +interface UploadAsset { + isLivePhoto?: boolean; + uploadItem?: UploadItem; + livePhotoAssets?: LivePhotoAssets; +} + +interface ThumbnailedFile { + fileStreamOrData: FileStream | Uint8Array; + /** The JPEG data of the generated thumbnail */ + thumbnail: Uint8Array; + /** + * `true` if this is a fallback (all black) thumbnail we're returning since + * thumbnail generation failed for some reason. + */ + hasStaticThumbnail: boolean; +} + +interface FileWithMetadata extends Omit { + metadata: Metadata; + localID: number; + pubMagicMetadata: FilePublicMagicMetadata; +} + +interface EncryptedFile { + file: ProcessedFile; + fileKey: B64EncryptionResult; +} + +interface EncryptedFileStream { + /** + * A stream of the file's encrypted contents + * + * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks + * (except the last chunk which can be smaller since a file would rarely + * align exactly to a ENCRYPTION_CHUNK_SIZE multiple). + */ + stream: ReadableStream; + /** + * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE + * sized (except the last one). + */ + chunkCount: number; +} + +interface LocalFileAttributes< + T extends string | Uint8Array | EncryptedFileStream, +> { + encryptedData: T; + decryptionHeader: string; +} + +interface EncryptionResult< + T extends string | Uint8Array | EncryptedFileStream, +> { + file: LocalFileAttributes; + key: string; +} + +interface ProcessedFile { + file: LocalFileAttributes; + thumbnail: LocalFileAttributes; + metadata: LocalFileAttributes; + pubMagicMetadata: EncryptedMagicMetadata; + localID: number; +} + +export interface BackupedFile { + file: S3FileAttributes; + thumbnail: S3FileAttributes; + metadata: MetadataFileAttributes; + pubMagicMetadata: EncryptedMagicMetadata; +} + +export interface UploadFile extends BackupedFile { + collectionID: number; + encryptedKey: string; + keyDecryptionNonce: string; +} + +export interface MultipartUploadURLs { + objectKey: string; + partURLs: string[]; + completeURL: string; +} + +export interface UploadURL { + url: string; + objectKey: string; +} + +/** + * A function that can be called to obtain a "progressTracker" that then is + * directly fed to axios to both cancel the upload if needed, and update the + * progress status. + * + * Enhancement: The return value needs to be typed. + */ +type MakeProgressTracker = ( + fileLocalID: number, + percentPerPart?: number, + index?: number, +) => unknown; + +interface UploadResponse { + uploadResult: UPLOAD_RESULT; + uploadedFile?: EncryptedEnteFile | EnteFile; +} + +/** + * Upload the given {@link UploadableUploadItem} + * + * This is lower layer implementation of the upload. It is invoked by + * {@link UploadManager} after it has assembled all the relevant bits we need to + * go forth and upload. + */ +export const uploader = async ( + { collection, localID, fileName, ...uploadAsset }: UploadableUploadItem, + uploaderName: string, + existingFiles: EnteFile[], + parsedMetadataJSONMap: Map, + worker: Remote, + isCFUploadProxyDisabled: boolean, + abortIfCancelled: () => void, + makeProgessTracker: MakeProgressTracker, +): Promise => { + log.info(`Uploading ${fileName}`); + try { + /* + * We read the file four times: + * 1. To determine its MIME type (only needs first few KBs). + * 2. To extract its metadata. + * 3. To calculate its hash. + * 4. To encrypt it. + * + * When we already have a File object the multiple reads are fine. + * + * When we're in the context of our desktop app and have a path, it + * might be possible to optimize further by using `ReadableStream.tee` + * to perform these steps simultaneously. However, that'll require + * restructuring the code so that these steps run in a parallel manner + * (tee will not work for strictly sequential reads of large streams). + */ + + const { fileTypeInfo, fileSize, lastModifiedMs } = + await readAssetDetails(uploadAsset); + + const maxFileSize = 4 * 1024 * 1024 * 1024; /* 4 GB */ + if (fileSize >= maxFileSize) + return { uploadResult: UPLOAD_RESULT.TOO_LARGE }; + + abortIfCancelled(); + + const { metadata, publicMagicMetadata } = await extractAssetMetadata( + uploadAsset, + fileTypeInfo, + lastModifiedMs, + collection.id, + parsedMetadataJSONMap, + worker, + ); + + const matches = existingFiles.filter((file) => + areFilesSame(file.metadata, metadata), + ); + + const anyMatch = matches?.length > 0 ? matches[0] : undefined; + + if (anyMatch) { + const matchInSameCollection = matches.find( + (f) => f.collectionID == collection.id, + ); + if (matchInSameCollection) { + return { + uploadResult: UPLOAD_RESULT.ALREADY_UPLOADED, + uploadedFile: matchInSameCollection, + }; + } else { + // Any of the matching files can be used to add a symlink. + const symlink = Object.assign({}, anyMatch); + symlink.collectionID = collection.id; + await addToCollection(collection, [symlink]); + return { + uploadResult: UPLOAD_RESULT.ADDED_SYMLINK, + uploadedFile: symlink, + }; + } + } + + abortIfCancelled(); + + const { fileStreamOrData, thumbnail, hasStaticThumbnail } = + await readAsset(fileTypeInfo, uploadAsset); + + if (hasStaticThumbnail) metadata.hasStaticThumbnail = true; + + const pubMagicMetadata = await constructPublicMagicMetadata({ + ...publicMagicMetadata, + uploaderName, + }); + + abortIfCancelled(); + + const fileWithMetadata: FileWithMetadata = { + localID, + fileStreamOrData, + thumbnail, + metadata, + pubMagicMetadata, + }; + + const encryptedFile = await encryptFile( + fileWithMetadata, + collection.key, + worker, + ); + + abortIfCancelled(); + + const backupedFile = await uploadToBucket( + encryptedFile.file, + makeProgessTracker, + isCFUploadProxyDisabled, + abortIfCancelled, + ); + + const uploadedFile = await uploadService.uploadFile({ + collectionID: collection.id, + encryptedKey: encryptedFile.fileKey.encryptedData, + keyDecryptionNonce: encryptedFile.fileKey.nonce, + ...backupedFile, + }); + + return { + uploadResult: metadata.hasStaticThumbnail + ? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL + : UPLOAD_RESULT.UPLOADED, + uploadedFile: uploadedFile, + }; + } catch (e) { + if (e.message == CustomError.UPLOAD_CANCELLED) { + log.info(`Upload for ${fileName} cancelled`); + } else if (e.message == CustomError.UNSUPPORTED_FILE_FORMAT) { + log.info(`Not uploading ${fileName}: unsupported file format`); + } else { + log.error(`Upload failed for ${fileName}`, e); + } + + const error = handleUploadError(e); + switch (error.message) { + case CustomError.ETAG_MISSING: + return { uploadResult: UPLOAD_RESULT.BLOCKED }; + case CustomError.UNSUPPORTED_FILE_FORMAT: + return { uploadResult: UPLOAD_RESULT.UNSUPPORTED }; + case CustomError.FILE_TOO_LARGE: + return { + uploadResult: UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE, + }; + default: + return { uploadResult: UPLOAD_RESULT.FAILED }; + } + } +}; + +/** + * Read the given file or path or zip item into an in-memory representation. + * + * [Note: Reading a UploadItem] + * + * The file can be either a web + * [File](https://developer.mozilla.org/en-US/docs/Web/API/File), the absolute + * path to a file on desk, a combination of these two, or a entry in a zip file + * on the user's local file system. + * + * tl;dr; There are four cases: + * + * 1. web / File + * 2. desktop / File (+ path) + * 3. desktop / path + * 4. desktop / ZipItem + * + * For the when and why, read on. + * + * The code that accesses files (e.g. uplaads) gets invoked in two contexts: + * + * 1. web: the normal mode, when we're running in as a web app in the browser. + * + * 2. desktop: when we're running inside our desktop app. + * + * In the web context, we'll always get a File, since within the browser we + * cannot programmatically construct paths to or arbitrarily access files on the + * user's file system. + * + * > Note that even if we were to somehow have an absolute path at hand, we + * cannot programmatically create such File objects to arbitrary absolute + * paths on user's local file system for security reasons. + * + * So in the web context, this will always be a File we get as a result of an + * explicit user interaction (e.g. drag and drop or using a file selector). + * + * In the desktop context, this can be either a File (+ path), or a path, or an + * entry within a zip file. + * + * 2. If the user provided us this file via some user interaction (say a drag + * and a drop), this'll still be a File. But unlike in the web context, we + * also have access to the full path of this file. + * + * 3. In addition, when running in the desktop app we have the ability to + * initate programmatic access absolute paths on the user's file system. For + * example, if the user asks us to watch certain folders on their disk for + * changes, we'll be able to pick up new images being added, and in such + * cases, the parameter here will be a path. Another example is when resuming + * an previously interrupted upload - we'll only have the path at hand in + * such cases, not the original File object since the app subsequently + * restarted. + * + * 4. The user might've also initiated an upload of a zip file (or we might be + * resuming one). In such cases we will get a tuple (path to the zip file on + * the local file system, and the name of the entry within that zip file). + * + * Case 3 and 4, when we're provided a path, are simple. We don't have a choice, + * since we cannot still programmatically construct a File object (we can + * construct it on the Node.js layer, but it can't then be transferred over the + * IPC boundary). So all our operations use the path itself. + * + * Case 2 involves a choice on a use-case basis. Neither File nor the path is a + * better choice for all use cases. + * + * > The advantage of the File object is that the browser has already read it + * into memory for us. The disadvantage comes in the case where we need to + * communicate with the native Node.js layer of our desktop app. Since this + * communication happens over IPC, the File's contents need to be serialized + * and copied, which is a bummer for large videos etc. + */ +const readUploadItem = async (uploadItem: UploadItem): Promise => { + let underlyingStream: ReadableStream; + let file: File | undefined; + let fileSize: number; + let lastModifiedMs: number; + + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { + const { + response, + size, + lastModifiedMs: lm, + } = await readStream(ensureElectron(), uploadItem); + underlyingStream = response.body; + fileSize = size; + lastModifiedMs = lm; + } else { + if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; + } + underlyingStream = file.stream(); + fileSize = file.size; + lastModifiedMs = file.lastModified; + } + + const N = ENCRYPTION_CHUNK_SIZE; + const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE); + + // Pipe the underlying stream through a transformer that emits + // ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be + // smaller). + let pending: Uint8Array | undefined; + const transformer = new TransformStream({ + async transform( + chunk: Uint8Array, + controller: TransformStreamDefaultController, + ) { + let next: Uint8Array; + if (pending) { + next = new Uint8Array(pending.length + chunk.length); + next.set(pending); + next.set(chunk, pending.length); + pending = undefined; + } else { + next = chunk; + } + while (next.length >= N) { + controller.enqueue(next.slice(0, N)); + next = next.slice(N); + } + if (next.length) pending = next; + }, + flush(controller: TransformStreamDefaultController) { + if (pending) controller.enqueue(pending); + }, + }); + + const stream = underlyingStream.pipeThrough(transformer); + + return { stream, chunkCount, fileSize, lastModifiedMs, file }; +}; + +interface ReadAssetDetailsResult { + fileTypeInfo: FileTypeInfo; + fileSize: number; + lastModifiedMs: number; +} + +/** + * Read the associated file(s) to determine the type, size and last modified + * time of the given {@link asset}. + */ +const readAssetDetails = async ({ + isLivePhoto, + livePhotoAssets, + uploadItem, +}: UploadAsset): Promise => + isLivePhoto + ? readLivePhotoDetails(livePhotoAssets) + : readImageOrVideoDetails(uploadItem); + +const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { + const img = await readImageOrVideoDetails(image); + const vid = await readImageOrVideoDetails(video); + + return { + fileTypeInfo: { + fileType: FILE_TYPE.LIVE_PHOTO, + extension: `${img.fileTypeInfo.extension}+${vid.fileTypeInfo.extension}`, + imageType: img.fileTypeInfo.extension, + videoType: vid.fileTypeInfo.extension, + }, + fileSize: img.fileSize + vid.fileSize, + lastModifiedMs: img.lastModifiedMs, + }; +}; + +/** + * Read the beginning of the given file (or its path), or use its filename as a + * fallback, to determine its MIME type. From that, construct and return a + * {@link FileTypeInfo}. + * + * While we're at it, also return the size of the file, and its last modified + * time (expressed as epoch milliseconds). + * + * @param uploadItem See: [Note: Reading a UploadItem] + */ +const readImageOrVideoDetails = async (uploadItem: UploadItem) => { + const { stream, fileSize, lastModifiedMs } = + await readUploadItem(uploadItem); + + const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => { + const reader = stream.getReader(); + const chunk = ensure((await reader.read()).value); + await reader.cancel(); + return chunk; + }, uploadItemFileName(uploadItem)); + + return { fileTypeInfo, fileSize, lastModifiedMs }; +}; + +/** + * Read the entirety of a readable stream. + * + * It is not recommended to use this for large (say, multi-hundred MB) files. It + * is provided as a syntactic shortcut for cases where we already know that the + * size of the stream will be reasonable enough to be read in its entirety + * without us running out of memory. + */ +const readEntireStream = async (stream: ReadableStream) => + new Uint8Array(await new Response(stream).arrayBuffer()); + +interface ExtractAssetMetadataResult { + metadata: Metadata; + publicMagicMetadata: FilePublicMagicMetadataProps; +} + +/** + * Compute the hash, extract EXIF or other metadata, and merge in data from the + * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum. + */ +const extractAssetMetadata = async ( + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, + fileTypeInfo: FileTypeInfo, + lastModifiedMs: number, + collectionID: number, + parsedMetadataJSONMap: Map, + worker: Remote, +): Promise => + isLivePhoto + ? await extractLivePhotoMetadata( + livePhotoAssets, + fileTypeInfo, + lastModifiedMs, + collectionID, + parsedMetadataJSONMap, + worker, + ) + : await extractImageOrVideoMetadata( + uploadItem, + fileTypeInfo, + lastModifiedMs, + collectionID, + parsedMetadataJSONMap, + worker, + ); + +const extractLivePhotoMetadata = async ( + livePhotoAssets: LivePhotoAssets, + fileTypeInfo: FileTypeInfo, + lastModifiedMs: number, + collectionID: number, + parsedMetadataJSONMap: Map, + worker: Remote, +) => { + const imageFileTypeInfo: FileTypeInfo = { + fileType: FILE_TYPE.IMAGE, + extension: fileTypeInfo.imageType, + }; + const { metadata: imageMetadata, publicMagicMetadata } = + await extractImageOrVideoMetadata( + livePhotoAssets.image, + imageFileTypeInfo, + lastModifiedMs, + collectionID, + parsedMetadataJSONMap, + worker, + ); + + const videoHash = await computeHash(livePhotoAssets.video, worker); + + return { + metadata: { + ...imageMetadata, + title: uploadItemFileName(livePhotoAssets.image), + fileType: FILE_TYPE.LIVE_PHOTO, + imageHash: imageMetadata.hash, + videoHash: videoHash, + hash: undefined, + }, + publicMagicMetadata, + }; +}; + +const extractImageOrVideoMetadata = async ( + uploadItem: UploadItem, + fileTypeInfo: FileTypeInfo, + lastModifiedMs: number, + collectionID: number, + parsedMetadataJSONMap: Map, + worker: Remote, +) => { + const fileName = uploadItemFileName(uploadItem); + const { fileType } = fileTypeInfo; + + let extractedMetadata: ParsedExtractedMetadata; + if (fileType === FILE_TYPE.IMAGE) { + extractedMetadata = + (await tryExtractImageMetadata( + uploadItem, + fileTypeInfo, + lastModifiedMs, + )) ?? NULL_EXTRACTED_METADATA; + } else if (fileType === FILE_TYPE.VIDEO) { + extractedMetadata = + (await tryExtractVideoMetadata(uploadItem)) ?? + NULL_EXTRACTED_METADATA; + } else { + throw new Error(`Unexpected file type ${fileType} for ${uploadItem}`); + } + + const hash = await computeHash(uploadItem, worker); + + const modificationTime = lastModifiedMs * 1000; + const creationTime = + extractedMetadata.creationTime ?? + tryParseEpochMicrosecondsFromFileName(fileName) ?? + modificationTime; + + const metadata: Metadata = { + title: fileName, + creationTime, + modificationTime, + latitude: extractedMetadata.location.latitude, + longitude: extractedMetadata.location.longitude, + fileType, + hash, + }; + + const publicMagicMetadata: FilePublicMagicMetadataProps = { + w: extractedMetadata.width, + h: extractedMetadata.height, + }; + + const takeoutMetadata = matchTakeoutMetadata( + fileName, + collectionID, + parsedMetadataJSONMap, + ); + + if (takeoutMetadata) + for (const [key, value] of Object.entries(takeoutMetadata)) + if (value) metadata[key] = value; + + return { metadata, publicMagicMetadata }; +}; + +const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = { + location: { ...NULL_LOCATION }, + creationTime: null, + width: null, + height: null, +}; + +async function tryExtractImageMetadata( + uploadItem: UploadItem, + fileTypeInfo: FileTypeInfo, + lastModifiedMs: number, +): Promise { + let file: File; + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { + // The library we use for extracting EXIF from images, exifr, doesn't + // support streams. But unlike videos, for images it is reasonable to + // read the entire stream into memory here. + const { response } = await readStream(ensureElectron(), uploadItem); + const path = typeof uploadItem == "string" ? uploadItem : uploadItem[1]; + file = new File([await response.arrayBuffer()], basename(path), { + lastModified: lastModifiedMs, + }); + } else if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; + } + + try { + return await parseImageMetadata(file, fileTypeInfo); + } catch (e) { + log.error(`Failed to extract image metadata for ${uploadItem}`, e); + return undefined; + } +} + +const tryExtractVideoMetadata = async (uploadItem: UploadItem) => { + try { + return await ffmpeg.extractVideoMetadata(uploadItem); + } catch (e) { + log.error(`Failed to extract video metadata for ${uploadItem}`, e); + return undefined; + } +}; + +const computeHash = async ( + uploadItem: UploadItem, + worker: Remote, +) => { + const { stream, chunkCount } = await readUploadItem(uploadItem); + const hashState = await worker.initChunkHashing(); + + const streamReader = stream.getReader(); + for (let i = 0; i < chunkCount; i++) { + const { done, value: chunk } = await streamReader.read(); + if (done) throw new Error("Less chunks than expected"); + await worker.hashFileChunk(hashState, Uint8Array.from(chunk)); + } + + const { done } = await streamReader.read(); + if (!done) throw new Error("More chunks than expected"); + return await worker.completeChunkHashing(hashState); +}; + +/** + * Return true if the two files, as represented by their metadata, are same. + * + * Note that the metadata includes the hash of the file's contents (when + * available), so this also in effect compares the contents of the files, not + * just the "meta" information about them. + */ +const areFilesSame = (f: Metadata, g: Metadata) => + hasFileHash(f) && hasFileHash(g) + ? areFilesSameHash(f, g) + : areFilesSameNoHash(f, g); + +const areFilesSameHash = (f: Metadata, g: Metadata) => { + if (f.fileType !== g.fileType || f.title !== g.title) { + return false; + } + if (f.fileType === FILE_TYPE.LIVE_PHOTO) { + return f.imageHash === g.imageHash && f.videoHash === g.videoHash; + } else { + return f.hash === g.hash; + } +}; + +/** + * Older files that were uploaded before we introduced hashing will not have + * hashes, so retain and use the logic we used back then for such files. + * + * Deprecation notice April 2024: Note that hashing was introduced very early + * (years ago), so the chance of us finding files without hashes is rare. And + * even in these cases, the worst that'll happen is that a duplicate file would + * get uploaded which can later be deduped. So we can get rid of this case at + * some point (e.g. the mobile app doesn't do this extra check, just uploads). + */ +const areFilesSameNoHash = (f: Metadata, g: Metadata) => { + /* + * The maximum difference in the creation/modification times of two similar + * files is set to 1 second. This is because while uploading files in the + * web - browsers and users could have set reduced precision of file times + * to prevent timing attacks and fingerprinting. + * + * See: + * https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision + */ + const oneSecond = 1e6; + return ( + f.fileType == g.fileType && + f.title == g.title && + Math.abs(f.creationTime - g.creationTime) < oneSecond && + Math.abs(f.modificationTime - g.modificationTime) < oneSecond + ); +}; + +const readAsset = async ( + fileTypeInfo: FileTypeInfo, + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, +): Promise => + isLivePhoto + ? await readLivePhoto(livePhotoAssets, fileTypeInfo) + : await readImageOrVideo(uploadItem, fileTypeInfo); + +const readLivePhoto = async ( + livePhotoAssets: LivePhotoAssets, + fileTypeInfo: FileTypeInfo, +) => { + const { + fileStreamOrData: imageFileStreamOrData, + thumbnail, + hasStaticThumbnail, + } = await withThumbnail( + livePhotoAssets.image, + { + extension: fileTypeInfo.imageType, + fileType: FILE_TYPE.IMAGE, + }, + await readUploadItem(livePhotoAssets.image), + ); + const videoFileStreamOrData = await readUploadItem(livePhotoAssets.video); + + // The JS zip library that encodeLivePhoto uses does not support + // ReadableStreams, so pass the file (blob) if we have one, otherwise read + // the entire stream into memory and pass the resultant data. + // + // This is a reasonable behaviour since the videos corresponding to live + // photos are only a couple of seconds long (we've already done a pre-flight + // check during areLivePhotoAssets to ensure their size is small). + const fileOrData = async (sd: FileStream | Uint8Array) => { + const fos = async ({ file, stream }: FileStream) => + file ? file : await readEntireStream(stream); + return sd instanceof Uint8Array ? sd : fos(sd); + }; + + return { + fileStreamOrData: await encodeLivePhoto({ + imageFileName: uploadItemFileName(livePhotoAssets.image), + imageFileOrData: await fileOrData(imageFileStreamOrData), + videoFileName: uploadItemFileName(livePhotoAssets.video), + videoFileOrData: await fileOrData(videoFileStreamOrData), + }), + thumbnail, + hasStaticThumbnail, + }; +}; + +const readImageOrVideo = async ( + uploadItem: UploadItem, + fileTypeInfo: FileTypeInfo, +) => { + const fileStream = await readUploadItem(uploadItem); + return withThumbnail(uploadItem, fileTypeInfo, fileStream); +}; + +// TODO(MR): Merge with the uploader +class ModuleState { + /** + * This will be set to true if we get an error from the Node.js side of our + * desktop app telling us that native image thumbnail generation is not + * available for the current OS/arch combination. + * + * That way, we can stop pestering it again and again (saving an IPC + * round-trip). + * + * Note the double negative when it is used. + */ + isNativeImageThumbnailGenerationNotAvailable = false; +} + +const moduleState = new ModuleState(); + +/** + * Augment the given {@link dataOrStream} with thumbnail information. + * + * This is a companion method for {@link readUploadItem}, and can be used to + * convert the result of {@link readUploadItem} into an {@link ThumbnailedFile}. + * + * @param uploadItem The {@link UploadItem} where the given {@link fileStream} + * came from. + * + * Note: The `fileStream` in the returned {@link ThumbnailedFile} may be + * different from the one passed to the function. + */ +const withThumbnail = async ( + uploadItem: UploadItem, + fileTypeInfo: FileTypeInfo, + fileStream: FileStream, +): Promise => { + let fileData: Uint8Array | undefined; + let thumbnail: Uint8Array | undefined; + let hasStaticThumbnail = false; + + const electron = globalThis.electron; + const notAvailable = + fileTypeInfo.fileType == FILE_TYPE.IMAGE && + moduleState.isNativeImageThumbnailGenerationNotAvailable; + + // 1. Native thumbnail generation using items's (effective) path. + if (electron && !notAvailable && !(uploadItem instanceof File)) { + try { + thumbnail = await generateThumbnailNative( + electron, + uploadItem, + fileTypeInfo, + ); + } catch (e) { + if (e.message.endsWith(CustomErrorMessage.NotAvailable)) { + moduleState.isNativeImageThumbnailGenerationNotAvailable = true; + } else { + log.error("Native thumbnail generation failed", e); + } + } + } + + if (!thumbnail) { + let blob: Blob | undefined; + if (uploadItem instanceof File) { + // 2. Browser based thumbnail generation for File (blobs). + blob = uploadItem; + } else { + // 3. Browser based thumbnail generation for paths. + // + // There are two reasons why we could get here: + // + // - We're running under Electron, but thumbnail generation is not + // available. This is currently only a specific scenario for image + // files on Windows. + // + // - We're running under the Electron, but the thumbnail generation + // otherwise failed for some exception. + // + // The fallback in this case involves reading the entire stream into + // memory, and passing that data across the IPC boundary in a single + // go (i.e. not in a streaming manner). This is risky for videos of + // unbounded sizes, plus we shouldn't even be getting here unless + // something went wrong. + // + // So instead of trying to cater for arbitrary exceptions, we only + // run this fallback to cover for the case where thumbnail + // generation was not available for an image file on Windows. + // If/when we add support of native thumbnailing on Windows too, + // this entire branch can be removed. + + if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) { + const data = await readEntireStream(fileStream.stream); + blob = new Blob([data]); + + // The Readable stream cannot be read twice, so use the data + // directly for subsequent steps. + fileData = data; + } else { + log.warn( + `Not using browser based thumbnail generation fallback for video at path ${uploadItem}`, + ); + } + } + + try { + if (blob) + thumbnail = await generateThumbnailWeb(blob, fileTypeInfo); + } catch (e) { + log.error("Web thumbnail creation failed", e); + } + } + + if (!thumbnail) { + thumbnail = fallbackThumbnail(); + hasStaticThumbnail = true; + } + + return { + fileStreamOrData: fileData ?? fileStream, + thumbnail, + hasStaticThumbnail, + }; +}; + +const constructPublicMagicMetadata = async ( + publicMagicMetadataProps: FilePublicMagicMetadataProps, +): Promise => { + const nonEmptyPublicMagicMetadataProps = getNonEmptyMagicMetadataProps( + publicMagicMetadataProps, + ); + + if (Object.values(nonEmptyPublicMagicMetadataProps)?.length === 0) { + return null; + } + return await updateMagicMetadata(publicMagicMetadataProps); +}; + +const encryptFile = async ( + file: FileWithMetadata, + encryptionKey: string, + worker: Remote, +): Promise => { + const { key: fileKey, file: encryptedFiledata } = await encryptFiledata( + file.fileStreamOrData, + worker, + ); + + const { file: encryptedThumbnail } = await worker.encryptThumbnail( + file.thumbnail, + fileKey, + ); + + const { file: encryptedMetadata } = await worker.encryptMetadata( + file.metadata, + fileKey, + ); + + let encryptedPubMagicMetadata: EncryptedMagicMetadata; + if (file.pubMagicMetadata) { + const { file: encryptedPubMagicMetadataData } = + await worker.encryptMetadata(file.pubMagicMetadata.data, fileKey); + encryptedPubMagicMetadata = { + version: file.pubMagicMetadata.version, + count: file.pubMagicMetadata.count, + data: encryptedPubMagicMetadataData.encryptedData, + header: encryptedPubMagicMetadataData.decryptionHeader, + }; + } + + const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey); + + const result: EncryptedFile = { + file: { + file: encryptedFiledata, + thumbnail: encryptedThumbnail, + metadata: encryptedMetadata, + pubMagicMetadata: encryptedPubMagicMetadata, + localID: file.localID, + }, + fileKey: encryptedKey, + }; + return result; +}; + +const encryptFiledata = async ( + fileStreamOrData: FileStream | Uint8Array, + worker: Remote, +): Promise> => + fileStreamOrData instanceof Uint8Array + ? await worker.encryptFile(fileStreamOrData) + : await encryptFileStream(fileStreamOrData, worker); + +const encryptFileStream = async ( + fileData: FileStream, + worker: Remote, +) => { + const { stream, chunkCount } = fileData; + const fileStreamReader = stream.getReader(); + const { key, decryptionHeader, pushState } = + await worker.initChunkEncryption(); + const ref = { pullCount: 1 }; + const encryptedFileStream = new ReadableStream({ + async pull(controller) { + const { value } = await fileStreamReader.read(); + const encryptedFileChunk = await worker.encryptFileChunk( + value, + pushState, + ref.pullCount === chunkCount, + ); + controller.enqueue(encryptedFileChunk); + if (ref.pullCount === chunkCount) { + controller.close(); + } + ref.pullCount++; + }, + }); + return { + key, + file: { + decryptionHeader, + encryptedData: { stream: encryptedFileStream, chunkCount }, + }, + }; +}; + +const uploadToBucket = async ( + file: ProcessedFile, + makeProgessTracker: MakeProgressTracker, + isCFUploadProxyDisabled: boolean, + abortIfCancelled: () => void, +): Promise => { + try { + let fileObjectKey: string = null; + + const encryptedData = file.file.encryptedData; + if ( + !(encryptedData instanceof Uint8Array) && + encryptedData.chunkCount >= multipartChunksPerPart + ) { + // We have a stream, and it is more than multipartChunksPerPart + // chunks long, so use a multipart upload to upload it. + fileObjectKey = await uploadStreamUsingMultipart( + file.localID, + encryptedData, + makeProgessTracker, + isCFUploadProxyDisabled, + abortIfCancelled, + ); + } else { + const data = + encryptedData instanceof Uint8Array + ? encryptedData + : await readEntireStream(encryptedData.stream); + + const progressTracker = makeProgessTracker(file.localID); + const fileUploadURL = await uploadService.getUploadURL(); + if (!isCFUploadProxyDisabled) { + fileObjectKey = await UploadHttpClient.putFileV2( + fileUploadURL, + data, + progressTracker, + ); + } else { + fileObjectKey = await UploadHttpClient.putFile( + fileUploadURL, + data, + progressTracker, + ); + } + } + const thumbnailUploadURL = await uploadService.getUploadURL(); + let thumbnailObjectKey: string = null; + if (!isCFUploadProxyDisabled) { + thumbnailObjectKey = await UploadHttpClient.putFileV2( + thumbnailUploadURL, + file.thumbnail.encryptedData, + null, + ); + } else { + thumbnailObjectKey = await UploadHttpClient.putFile( + thumbnailUploadURL, + file.thumbnail.encryptedData, + null, + ); + } + + const backupedFile: BackupedFile = { + file: { + decryptionHeader: file.file.decryptionHeader, + objectKey: fileObjectKey, + }, + thumbnail: { + decryptionHeader: file.thumbnail.decryptionHeader, + objectKey: thumbnailObjectKey, + }, + metadata: file.metadata, + pubMagicMetadata: file.pubMagicMetadata, + }; + return backupedFile; + } catch (e) { + if (e.message !== CustomError.UPLOAD_CANCELLED) { + log.error("Error when uploading to bucket", e); + } + throw e; + } +}; + +interface PartEtag { + PartNumber: number; + ETag: string; +} + +async function uploadStreamUsingMultipart( + fileLocalID: number, + dataStream: EncryptedFileStream, + makeProgessTracker: MakeProgressTracker, + isCFUploadProxyDisabled: boolean, + abortIfCancelled: () => void, +) { + const uploadPartCount = Math.ceil( + dataStream.chunkCount / multipartChunksPerPart, + ); + const multipartUploadURLs = + await uploadService.fetchMultipartUploadURLs(uploadPartCount); + + const { stream } = dataStream; + + const streamReader = stream.getReader(); + const percentPerPart = + RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount; + const partEtags: PartEtag[] = []; + for (const [ + index, + fileUploadURL, + ] of multipartUploadURLs.partURLs.entries()) { + abortIfCancelled(); + + const uploadChunk = await combineChunksToFormUploadPart(streamReader); + const progressTracker = makeProgessTracker( + fileLocalID, + percentPerPart, + index, + ); + let eTag = null; + if (!isCFUploadProxyDisabled) { + eTag = await UploadHttpClient.putFilePartV2( + fileUploadURL, + uploadChunk, + progressTracker, + ); + } else { + eTag = await UploadHttpClient.putFilePart( + fileUploadURL, + uploadChunk, + progressTracker, + ); + } + partEtags.push({ PartNumber: index + 1, ETag: eTag }); + } + const { done } = await streamReader.read(); + if (!done) throw new Error("More chunks than expected"); + + const completeURL = multipartUploadURLs.completeURL; + const cBody = convert.js2xml( + { CompleteMultipartUpload: { Part: partEtags } }, + { compact: true, ignoreComment: true, spaces: 4 }, + ); + if (!isCFUploadProxyDisabled) { + await UploadHttpClient.completeMultipartUploadV2(completeURL, cBody); + } else { + await UploadHttpClient.completeMultipartUpload(completeURL, cBody); + } + + return multipartUploadURLs.objectKey; +} + +async function combineChunksToFormUploadPart( + streamReader: ReadableStreamDefaultReader, +) { + const combinedChunks = []; + for (let i = 0; i < multipartChunksPerPart; i++) { + const { done, value: chunk } = await streamReader.read(); + if (done) { + break; + } + for (let index = 0; index < chunk.length; index++) { + combinedChunks.push(chunk[index]); + } + } + return Uint8Array.from(combinedChunks); +} diff --git a/web/apps/photos/src/services/upload/uploader.ts b/web/apps/photos/src/services/upload/uploader.ts deleted file mode 100644 index 5fb164c625..0000000000 --- a/web/apps/photos/src/services/upload/uploader.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { convertBytesToHumanReadable } from "@/next/file"; -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { CustomError, handleUploadError } from "@ente/shared/error"; -import { sleep } from "@ente/shared/utils"; -import { Remote } from "comlink"; -import { MAX_FILE_SIZE_SUPPORTED, UPLOAD_RESULT } from "constants/upload"; -import { addToCollection } from "services/collectionService"; -import { EnteFile } from "types/file"; -import { - BackupedFile, - FileTypeInfo, - FileWithCollection, - FileWithMetadata, - Logger, - UploadFile, -} from "types/upload"; -import { findMatchingExistingFiles } from "utils/upload"; -import UIService from "./uiService"; -import uploadCancelService from "./uploadCancelService"; -import { - default as UploadService, - default as uploadService, -} from "./uploadService"; - -interface UploadResponse { - fileUploadResult: UPLOAD_RESULT; - uploadedFile?: EnteFile; -} - -export default async function uploader( - worker: Remote, - existingFiles: EnteFile[], - fileWithCollection: FileWithCollection, - uploaderName: string, -): Promise { - const { collection, localID, ...uploadAsset } = fileWithCollection; - const fileNameSize = `${UploadService.getAssetName( - fileWithCollection, - )}_${convertBytesToHumanReadable(UploadService.getAssetSize(uploadAsset))}`; - - log.info(`uploader called for ${fileNameSize}`); - UIService.setFileProgress(localID, 0); - await sleep(0); - let fileTypeInfo: FileTypeInfo; - let fileSize: number; - try { - fileSize = UploadService.getAssetSize(uploadAsset); - if (fileSize >= MAX_FILE_SIZE_SUPPORTED) { - return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE }; - } - log.info(`getting filetype for ${fileNameSize}`); - fileTypeInfo = await UploadService.getAssetFileType(uploadAsset); - log.info( - `got filetype for ${fileNameSize} - ${JSON.stringify(fileTypeInfo)}`, - ); - - log.info(`extracting metadata ${fileNameSize}`); - const { metadata, publicMagicMetadata } = - await UploadService.extractAssetMetadata( - worker, - uploadAsset, - collection.id, - fileTypeInfo, - ); - - const matchingExistingFiles = findMatchingExistingFiles( - existingFiles, - metadata, - ); - log.debug( - () => - `matchedFileList: ${matchingExistingFiles - .map((f) => `${f.id}-${f.metadata.title}`) - .join(",")}`, - ); - if (matchingExistingFiles?.length) { - const matchingExistingFilesCollectionIDs = - matchingExistingFiles.map((e) => e.collectionID); - log.debug( - () => - `matched file collectionIDs:${matchingExistingFilesCollectionIDs} - and collectionID:${collection.id}`, - ); - if (matchingExistingFilesCollectionIDs.includes(collection.id)) { - log.info( - `file already present in the collection , skipped upload for ${fileNameSize}`, - ); - const sameCollectionMatchingExistingFile = - matchingExistingFiles.find( - (f) => f.collectionID === collection.id, - ); - return { - fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED, - uploadedFile: sameCollectionMatchingExistingFile, - }; - } else { - log.info( - `same file in ${matchingExistingFilesCollectionIDs.length} collection found for ${fileNameSize} ,adding symlink`, - ); - // any of the matching file can used to add a symlink - const resultFile = Object.assign({}, matchingExistingFiles[0]); - resultFile.collectionID = collection.id; - await addToCollection(collection, [resultFile]); - return { - fileUploadResult: UPLOAD_RESULT.ADDED_SYMLINK, - uploadedFile: resultFile, - }; - } - } - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - log.info(`reading asset ${fileNameSize}`); - - const file = await UploadService.readAsset(fileTypeInfo, uploadAsset); - - if (file.hasStaticThumbnail) { - metadata.hasStaticThumbnail = true; - } - - const pubMagicMetadata = - await uploadService.constructPublicMagicMetadata({ - ...publicMagicMetadata, - uploaderName, - }); - - const fileWithMetadata: FileWithMetadata = { - localID, - filedata: file.filedata, - thumbnail: file.thumbnail, - metadata, - pubMagicMetadata, - }; - - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - log.info(`encryptAsset ${fileNameSize}`); - const encryptedFile = await UploadService.encryptAsset( - worker, - fileWithMetadata, - collection.key, - ); - - if (uploadCancelService.isUploadCancelationRequested()) { - throw Error(CustomError.UPLOAD_CANCELLED); - } - log.info(`uploadToBucket ${fileNameSize}`); - const logger: Logger = (message: string) => { - log.info(message, `fileNameSize: ${fileNameSize}`); - }; - const backupedFile: BackupedFile = await UploadService.uploadToBucket( - logger, - encryptedFile.file, - ); - - const uploadFile: UploadFile = UploadService.getUploadFile( - collection, - backupedFile, - encryptedFile.fileKey, - ); - log.info(`uploading file to server ${fileNameSize}`); - - const uploadedFile = await UploadService.uploadFile(uploadFile); - - log.info(`${fileNameSize} successfully uploaded`); - - return { - fileUploadResult: metadata.hasStaticThumbnail - ? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL - : UPLOAD_RESULT.UPLOADED, - uploadedFile: uploadedFile, - }; - } catch (e) { - log.info(`upload failed for ${fileNameSize} ,error: ${e.message}`); - if ( - e.message !== CustomError.UPLOAD_CANCELLED && - e.message !== CustomError.UNSUPPORTED_FILE_FORMAT - ) { - log.error( - `file upload failed - ${JSON.stringify({ - fileFormat: fileTypeInfo?.exactType, - fileSize: convertBytesToHumanReadable(fileSize), - })}`, - e, - ); - } - const error = handleUploadError(e); - switch (error.message) { - case CustomError.ETAG_MISSING: - return { fileUploadResult: UPLOAD_RESULT.BLOCKED }; - case CustomError.UNSUPPORTED_FILE_FORMAT: - return { fileUploadResult: UPLOAD_RESULT.UNSUPPORTED }; - case CustomError.FILE_TOO_LARGE: - return { - fileUploadResult: - UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE, - }; - default: - return { fileUploadResult: UPLOAD_RESULT.FAILED }; - } - } -} diff --git a/web/apps/photos/src/services/upload/videoMetadataService.ts b/web/apps/photos/src/services/upload/videoMetadataService.ts deleted file mode 100644 index 947bd538c0..0000000000 --- a/web/apps/photos/src/services/upload/videoMetadataService.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import log from "@/next/log"; -import { NULL_EXTRACTED_METADATA } from "constants/upload"; -import * as ffmpegService from "services/ffmpeg/ffmpegService"; -import { ElectronFile } from "types/upload"; - -export async function getVideoMetadata(file: File | ElectronFile) { - let videoMetadata = NULL_EXTRACTED_METADATA; - try { - log.info(`getVideoMetadata called for ${getFileNameSize(file)}`); - videoMetadata = await ffmpegService.extractVideoMetadata(file); - log.info( - `videoMetadata successfully extracted ${getFileNameSize(file)}`, - ); - } catch (e) { - log.error("failed to get video metadata", e); - log.info( - `videoMetadata extracted failed ${getFileNameSize(file)} ,${ - e.message - } `, - ); - } - - return videoMetadata; -} diff --git a/web/apps/photos/src/services/wasm/ffmpeg.ts b/web/apps/photos/src/services/wasm/ffmpeg.ts deleted file mode 100644 index 10c5a5c05c..0000000000 --- a/web/apps/photos/src/services/wasm/ffmpeg.ts +++ /dev/null @@ -1,115 +0,0 @@ -import log from "@/next/log"; -import { promiseWithTimeout } from "@ente/shared/utils"; -import QueueProcessor from "@ente/shared/utils/queueProcessor"; -import { generateTempName } from "@ente/shared/utils/temp"; -import { createFFmpeg, FFmpeg } from "ffmpeg-wasm"; -import { getUint8ArrayView } from "services/readerService"; - -const INPUT_PATH_PLACEHOLDER = "INPUT"; -const FFMPEG_PLACEHOLDER = "FFMPEG"; -const OUTPUT_PATH_PLACEHOLDER = "OUTPUT"; - -const FFMPEG_EXECUTION_WAIT_TIME = 30 * 1000; - -export class WasmFFmpeg { - private ffmpeg: FFmpeg; - private ready: Promise = null; - private ffmpegTaskQueue = new QueueProcessor(); - - constructor() { - this.ffmpeg = createFFmpeg({ - corePath: "/js/ffmpeg/ffmpeg-core.js", - mt: false, - }); - - this.ready = this.init(); - } - - private async init() { - if (!this.ffmpeg.isLoaded()) { - await this.ffmpeg.load(); - } - } - - async run( - cmd: string[], - inputFile: File, - outputFileName: string, - dontTimeout = false, - ) { - const response = this.ffmpegTaskQueue.queueUpRequest(() => { - if (dontTimeout) { - return this.execute(cmd, inputFile, outputFileName); - } else { - return promiseWithTimeout( - this.execute(cmd, inputFile, outputFileName), - FFMPEG_EXECUTION_WAIT_TIME, - ); - } - }); - try { - return await response.promise; - } catch (e) { - log.error("ffmpeg run failed", e); - throw e; - } - } - - private async execute( - cmd: string[], - inputFile: File, - outputFileName: string, - ) { - let tempInputFilePath: string; - let tempOutputFilePath: string; - try { - await this.ready; - const extension = getFileExtension(inputFile.name); - const tempNameSuffix = extension ? `input.${extension}` : "input"; - tempInputFilePath = `${generateTempName(10, tempNameSuffix)}`; - this.ffmpeg.FS( - "writeFile", - tempInputFilePath, - await getUint8ArrayView(inputFile), - ); - tempOutputFilePath = `${generateTempName(10, outputFileName)}`; - - cmd = cmd.map((cmdPart) => { - if (cmdPart === FFMPEG_PLACEHOLDER) { - return ""; - } else if (cmdPart === INPUT_PATH_PLACEHOLDER) { - return tempInputFilePath; - } else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) { - return tempOutputFilePath; - } else { - return cmdPart; - } - }); - log.info(`${cmd}`); - await this.ffmpeg.run(...cmd); - return new File( - [this.ffmpeg.FS("readFile", tempOutputFilePath)], - outputFileName, - ); - } finally { - try { - this.ffmpeg.FS("unlink", tempInputFilePath); - } catch (e) { - log.error("unlink input file failed", e); - } - try { - this.ffmpeg.FS("unlink", tempOutputFilePath); - } catch (e) { - log.error("unlink output file failed", e); - } - } - } -} - -function getFileExtension(filename: string) { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return null; - else { - return filename.slice(lastDotPosition + 1); - } -} diff --git a/web/apps/photos/src/services/watch.ts b/web/apps/photos/src/services/watch.ts index 2d5ef02287..82d3b2f4ec 100644 --- a/web/apps/photos/src/services/watch.ts +++ b/web/apps/photos/src/services/watch.ts @@ -4,283 +4,330 @@ */ import { ensureElectron } from "@/next/electron"; +import { basename, dirname } from "@/next/file"; import log from "@/next/log"; -import { UPLOAD_RESULT, UPLOAD_STRATEGY } from "constants/upload"; +import type { + CollectionMapping, + FolderWatch, + FolderWatchSyncedFile, +} from "@/next/types/ipc"; +import { ensureString } from "@/utils/ensure"; +import { UPLOAD_RESULT } from "constants/upload"; import debounce from "debounce"; -import uploadManager from "services/upload/uploadManager"; +import uploadManager, { + type UploadItemWithCollection, +} from "services/upload/uploadManager"; import { Collection } from "types/collection"; import { EncryptedEnteFile } from "types/file"; -import { ElectronFile, FileWithCollection } from "types/upload"; -import { - EventQueueItem, - WatchMapping, - WatchMappingSyncedFile, -} from "types/watchFolder"; import { groupFilesBasedOnCollectionID } from "utils/file"; -import { isSystemFile } from "utils/upload"; import { removeFromCollection } from "./collectionService"; import { getLocalFiles } from "./fileService"; -class WatchFolderService { - private eventQueue: EventQueueItem[] = []; - private currentEvent: EventQueueItem; - private currentlySyncedMapping: WatchMapping; - private trashingDirQueue: string[] = []; - private isEventRunning: boolean = false; - private uploadRunning: boolean = false; - private filePathToUploadedFileIDMap = new Map(); - private unUploadableFilePaths = new Set(); +/** + * Watch for file system folders and automatically update the corresponding Ente + * collections. + * + * This class relies on APIs exposed over the Electron IPC layer, and thus only + * works when we're running inside our desktop app. + */ +class FolderWatcher { + /** Pending file system events that we need to process. */ + private eventQueue: WatchEvent[] = []; + /** The folder watch whose event we're currently processing */ + private activeWatch: FolderWatch | undefined; + /** + * If the file system directory corresponding to the (root) folder path of a + * folder watch is deleted on disk, we note down that in this queue so that + * we can ignore any file system events that come for it next. + */ + private deletedFolderPaths: string[] = []; + /** `true` if we are using the uploader. */ + private uploadRunning = false; + /** `true` if we are temporarily paused to let a user upload go through. */ private isPaused = false; - private setElectronFiles: (files: ElectronFile[]) => void; - private setCollectionName: (collectionName: string) => void; - private syncWithRemote: () => void; - private setWatchFolderServiceIsRunning: (isRunning: boolean) => void; + /** + * A map from file paths to an Ente file for files that were uploaded (or + * symlinked) as part of the most recent upload attempt. + */ + private uploadedFileForPath = new Map(); + /** + * A set of file paths that could not be uploaded in the most recent upload + * attempt. These are the uploads that failed due to a permanent error that + * a retry will not fix. + */ + private unUploadableFilePaths = new Set(); + + /** + * A function to call when we want to enqueue a new upload of the given list + * of file paths to the given Ente collection. + * + * This is passed as a param to {@link init}. + */ + private upload: (collectionName: string, filePaths: string[]) => void; + /** + * A function to call when we want to sync with the backend. It will + * initiate the sync but will not await its completion. + * + * This is passed as a param to {@link init}. + */ + private requestSyncWithRemote: () => void; + + /** A helper function that debounces invocations of {@link runNextEvent}. */ private debouncedRunNextEvent: () => void; constructor() { this.debouncedRunNextEvent = debounce(() => this.runNextEvent(), 1000); } + /** + * Initialize the watcher and start processing file system events. + * + * This is only called when we're running in the context of our desktop app. + * + * The caller provides us with the hooks we can use to actually upload the + * files, and to sync with remote (say after deletion). + */ + init( + upload: (collectionName: string, filePaths: string[]) => void, + requestSyncWithRemote: () => void, + ) { + this.upload = upload; + this.requestSyncWithRemote = requestSyncWithRemote; + this.registerListeners(); + this.syncWithDisk(); + } + + /** Return `true` if we are currently using the uploader. */ isUploadRunning() { return this.uploadRunning; } + /** Return `true` if syncing has been temporarily paused. */ isSyncPaused() { return this.isPaused; } - async init( - setElectronFiles: (files: ElectronFile[]) => void, - setCollectionName: (collectionName: string) => void, - syncWithRemote: () => void, - setWatchFolderServiceIsRunning: (isRunning: boolean) => void, - ) { - try { - this.setElectronFiles = setElectronFiles; - this.setCollectionName = setCollectionName; - this.syncWithRemote = syncWithRemote; - this.setWatchFolderServiceIsRunning = - setWatchFolderServiceIsRunning; - this.setupWatcherFunctions(); - await this.getAndSyncDiffOfFiles(); - } catch (e) { - log.error("error while initializing watch service", e); - } + /** + * Temporarily pause syncing and cancel any running uploads. + * + * This frees up the uploader for handling user initated uploads. + */ + pauseRunningSync() { + this.isPaused = true; + uploadManager.cancelRunningUpload(); } - async getAndSyncDiffOfFiles() { + /** + * Resume from a temporary pause, resyncing from disk. + * + * Sibling of {@link pauseRunningSync}. + */ + resumePausedSync() { + this.isPaused = false; + this.syncWithDisk(); + } + + /** Return the list of folders we are watching for changes. */ + async getWatches(): Promise { + return await ensureElectron().watch.get(); + } + + /** + * Return true if we are currently syncing files that belong to the given + * {@link folderPath}. + */ + isSyncingFolder(folderPath: string) { + return this.activeWatch?.folderPath == folderPath; + } + + /** + * Add a new folder watch for the given root {@link folderPath} + * + * @param mapping The {@link CollectionMapping} to use to decide which + * collection do files belonging to nested directories go to. + * + * @returns The updated list of watches. + */ + async addWatch(folderPath: string, mapping: CollectionMapping) { + const watches = await ensureElectron().watch.add(folderPath, mapping); + this.syncWithDisk(); + return watches; + } + + /** + * Remove the folder watch for the given root {@link folderPath}. + * + * @returns The updated list of watches. + */ + async removeWatch(folderPath: string) { + return await ensureElectron().watch.remove(folderPath); + } + + private async syncWithDisk() { try { - let mappings = await this.getWatchMappings(); - - if (!mappings?.length) { - return; - } - - mappings = await this.filterOutDeletedMappings(mappings); + const watches = await this.getWatches(); + if (!watches) return; this.eventQueue = []; + const events = await deduceEvents(watches); + log.info(`Folder watch deduced ${events.length} events`); + this.eventQueue = this.eventQueue.concat(events); - for (const mapping of mappings) { - const filesOnDisk: ElectronFile[] = - await ensureElectron().getDirFiles(mapping.folderPath); - - this.uploadDiffOfFiles(mapping, filesOnDisk); - this.trashDiffOfFiles(mapping, filesOnDisk); - } + this.debouncedRunNextEvent(); } catch (e) { - log.error("error while getting and syncing diff of files", e); + log.error("Ignoring error while syncing watched folders", e); } } - isMappingSyncInProgress(mapping: WatchMapping) { - return this.currentEvent?.folderPath === mapping.folderPath; - } - - private uploadDiffOfFiles( - mapping: WatchMapping, - filesOnDisk: ElectronFile[], - ) { - const filesToUpload = getValidFilesToUpload(filesOnDisk, mapping); - - if (filesToUpload.length > 0) { - for (const file of filesToUpload) { - const event: EventQueueItem = { - type: "upload", - collectionName: this.getCollectionNameForMapping( - mapping, - file.path, - ), - folderPath: mapping.folderPath, - files: [file], - }; - this.pushEvent(event); - } - } - } - - private trashDiffOfFiles( - mapping: WatchMapping, - filesOnDisk: ElectronFile[], - ) { - const filesToRemove = mapping.syncedFiles.filter((file) => { - return !filesOnDisk.find( - (electronFile) => electronFile.path === file.path, - ); - }); - - if (filesToRemove.length > 0) { - for (const file of filesToRemove) { - const event: EventQueueItem = { - type: "trash", - collectionName: this.getCollectionNameForMapping( - mapping, - file.path, - ), - folderPath: mapping.folderPath, - paths: [file.path], - }; - this.pushEvent(event); - } - } - } - - private async filterOutDeletedMappings( - mappings: WatchMapping[], - ): Promise { - const notDeletedMappings = []; - for (const mapping of mappings) { - const mappingExists = await ensureElectron().isFolder( - mapping.folderPath, - ); - if (!mappingExists) { - ensureElectron().removeWatchMapping(mapping.folderPath); - } else { - notDeletedMappings.push(mapping); - } - } - return notDeletedMappings; - } - - pushEvent(event: EventQueueItem) { + pushEvent(event: WatchEvent) { this.eventQueue.push(event); + log.info("Folder watch event", event); this.debouncedRunNextEvent(); } - async pushTrashedDir(path: string) { - this.trashingDirQueue.push(path); - } + private registerListeners() { + const watch = ensureElectron().watch; - private setupWatcherFunctions() { - ensureElectron().registerWatcherFunctions( - diskFileAddedCallback, - diskFileRemovedCallback, - diskFolderRemovedCallback, - ); - } + // [Note: File renames during folder watch] + // + // Renames come as two file system events - an `onAddFile` + an + // `onRemoveFile` - in an arbitrary order. - async addWatchMapping( - rootFolderName: string, - folderPath: string, - uploadStrategy: UPLOAD_STRATEGY, - ) { - try { - await ensureElectron().addWatchMapping( - rootFolderName, - folderPath, - uploadStrategy, - ); - this.getAndSyncDiffOfFiles(); - } catch (e) { - log.error("error while adding watch mapping", e); - } - } + watch.onAddFile((path: string, watch: FolderWatch) => { + this.pushEvent({ + action: "upload", + collectionName: collectionNameForPath(path, watch), + folderPath: watch.folderPath, + filePath: path, + }); + }); - async mappingsAfterRemovingFolder(folderPath: string) { - await ensureElectron().removeWatchMapping(folderPath); - return await this.getWatchMappings(); - } + watch.onRemoveFile((path: string, watch: FolderWatch) => { + this.pushEvent({ + action: "trash", + collectionName: collectionNameForPath(path, watch), + folderPath: watch.folderPath, + filePath: path, + }); + }); - async getWatchMappings(): Promise { - try { - return (await ensureElectron().getWatchMappings()) ?? []; - } catch (e) { - log.error("error while getting watch mappings", e); - return []; - } - } - - private setIsEventRunning(isEventRunning: boolean) { - this.isEventRunning = isEventRunning; - this.setWatchFolderServiceIsRunning(isEventRunning); + watch.onRemoveDir((path: string, watch: FolderWatch) => { + if (path == watch.folderPath) { + log.info( + `Received file system delete event for a watched folder at ${path}`, + ); + this.deletedFolderPaths.push(path); + } + }); } private async runNextEvent() { - try { - if ( - this.eventQueue.length === 0 || - this.isEventRunning || - this.isPaused - ) { + if (this.eventQueue.length == 0 || this.activeWatch || this.isPaused) + return; + + const skip = (reason: string) => { + log.info(`Ignoring event since ${reason}`); + this.debouncedRunNextEvent(); + }; + + const event = this.dequeueClubbedEvent(); + log.info( + `Processing ${event.action} event for folder watch ${event.folderPath} (collectionName ${event.collectionName}, ${event.filePaths.length} files)`, + ); + + const watch = (await this.getWatches()).find( + (watch) => watch.folderPath == event.folderPath, + ); + if (!watch) { + // Possibly stale + skip(`no folder watch for found for ${event.folderPath}`); + return; + } + + if (event.action === "upload") { + const paths = pathsToUpload(event.filePaths, watch); + if (paths.length == 0) { + skip("none of the files need uploading"); return; } - const event = this.clubSameCollectionEvents(); - log.info( - `running event type:${event.type} collectionName:${event.collectionName} folderPath:${event.folderPath} , fileCount:${event.files?.length} pathsCount: ${event.paths?.length}`, - ); - const mappings = await this.getWatchMappings(); - const mapping = mappings.find( - (mapping) => mapping.folderPath === event.folderPath, - ); - if (!mapping) { - throw Error("no Mapping found for event"); - } - log.info( - `mapping for event rootFolder: ${mapping.rootFolderName} folderPath: ${mapping.folderPath} uploadStrategy: ${mapping.uploadStrategy} syncedFilesCount: ${mapping.syncedFiles.length} ignoredFilesCount ${mapping.ignoredFiles.length}`, - ); - if (event.type === "upload") { - event.files = getValidFilesToUpload(event.files, mapping); - log.info(`valid files count: ${event.files?.length}`); - if (event.files.length === 0) { - return; - } - } - this.currentEvent = event; - this.currentlySyncedMapping = mapping; + // Here we pass control to the uploader. When the upload is done, + // the uploader will notify us by calling allFileUploadsDone. - this.setIsEventRunning(true); - if (event.type === "upload") { - this.processUploadEvent(); - } else { - await this.processTrashEvent(); - this.setIsEventRunning(false); - setTimeout(() => this.runNextEvent(), 0); - } - } catch (e) { - log.error("runNextEvent failed", e); - } - } - - private async processUploadEvent() { - try { + this.activeWatch = watch; this.uploadRunning = true; - this.setCollectionName(this.currentEvent.collectionName); - this.setElectronFiles(this.currentEvent.files); - } catch (e) { - log.error("error while running next upload", e); + const collectionName = event.collectionName; + log.info( + `Folder watch requested upload of ${paths.length} files to collection ${collectionName}`, + ); + + this.upload(collectionName, paths); + } else { + if (this.pruneFileEventsFromDeletedFolderPaths()) { + skip("event was from a deleted folder path"); + return; + } + + const [removed, rest] = watch.syncedFiles.reduce( + ([removed, rest], { path }) => { + (event.filePaths.includes(path) ? rest : removed).push( + watch, + ); + return [removed, rest]; + }, + [[], []], + ); + + this.activeWatch = watch; + + await this.moveToTrash(removed); + + await ensureElectron().watch.updateSyncedFiles( + rest, + watch.folderPath, + ); + + this.activeWatch = undefined; + + this.debouncedRunNextEvent(); } } + /** + * Batch the next run of events with the same action, collection and folder + * path into a single clubbed event that contains the list of all effected + * file paths from the individual events. + */ + private dequeueClubbedEvent(): ClubbedWatchEvent | undefined { + const event = this.eventQueue.shift(); + if (!event) return undefined; + + const filePaths = [event.filePath]; + while ( + this.eventQueue.length > 0 && + event.action === this.eventQueue[0].action && + event.folderPath === this.eventQueue[0].folderPath && + event.collectionName === this.eventQueue[0].collectionName + ) { + filePaths.push(this.eventQueue[0].filePath); + this.eventQueue.shift(); + } + return { ...event, filePaths }; + } + + /** + * Callback invoked by the uploader whenever a item we requested to + * {@link upload} gets uploaded. + */ async onFileUpload( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection, + item: UploadItemWithCollection, file: EncryptedEnteFile, ) { - log.debug(() => `onFileUpload called`); - if (!this.isUploadRunning()) { - return; - } + // Re the usage of ensureString: For desktop watch, the only possibility + // for a UploadItem is for it to be a string (the absolute path to a + // file on disk). if ( [ UPLOAD_RESULT.ADDED_SYMLINK, @@ -289,20 +336,18 @@ class WatchFolderService { UPLOAD_RESULT.ALREADY_UPLOADED, ].includes(fileUploadResult) ) { - if (fileWithCollection.isLivePhoto) { - this.filePathToUploadedFileIDMap.set( - (fileWithCollection.livePhotoAssets.image as ElectronFile) - .path, + if (item.isLivePhoto) { + this.uploadedFileForPath.set( + ensureString(item.livePhotoAssets.image), file, ); - this.filePathToUploadedFileIDMap.set( - (fileWithCollection.livePhotoAssets.video as ElectronFile) - .path, + this.uploadedFileForPath.set( + ensureString(item.livePhotoAssets.video), file, ); } else { - this.filePathToUploadedFileIDMap.set( - (fileWithCollection.file as ElectronFile).path, + this.uploadedFileForPath.set( + ensureString(item.uploadItem), file, ); } @@ -311,430 +356,269 @@ class WatchFolderService { fileUploadResult, ) ) { - if (fileWithCollection.isLivePhoto) { + if (item.isLivePhoto) { this.unUploadableFilePaths.add( - (fileWithCollection.livePhotoAssets.image as ElectronFile) - .path, + ensureString(item.livePhotoAssets.image), ); this.unUploadableFilePaths.add( - (fileWithCollection.livePhotoAssets.video as ElectronFile) - .path, + ensureString(item.livePhotoAssets.video), ); } else { - this.unUploadableFilePaths.add( - (fileWithCollection.file as ElectronFile).path, - ); + this.unUploadableFilePaths.add(ensureString(item.uploadItem)); } } } + /** + * Callback invoked by the uploader whenever all the files we requested to + * {@link upload} get uploaded. + */ async allFileUploadsDone( - filesWithCollection: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], ) { - try { - log.debug( - () => - `allFileUploadsDone,${JSON.stringify( - filesWithCollection, - )} ${JSON.stringify(collections)}`, + const electron = ensureElectron(); + const watch = this.activeWatch; + + log.debug(() => + JSON.stringify({ + f: "watch/allFileUploadsDone", + uploadItemsWithCollection, + collections, + watch, + }), + ); + + const { syncedFiles, ignoredFiles } = this.deduceSyncedAndIgnored( + uploadItemsWithCollection, + ); + + if (syncedFiles.length > 0) + await electron.watch.updateSyncedFiles( + watch.syncedFiles.concat(syncedFiles), + watch.folderPath, ); - const collection = collections.find( - (collection) => - collection.id === filesWithCollection[0].collectionID, + + if (ignoredFiles.length > 0) + await electron.watch.updateIgnoredFiles( + watch.ignoredFiles.concat(ignoredFiles), + watch.folderPath, ); - log.debug(() => `got collection ${!!collection}`); - log.debug( - () => - `${this.isEventRunning} ${this.currentEvent.collectionName} ${collection?.name}`, - ); - if ( - !this.isEventRunning || - this.currentEvent.collectionName !== collection?.name - ) { - return; - } - const syncedFiles: WatchMapping["syncedFiles"] = []; - const ignoredFiles: WatchMapping["ignoredFiles"] = []; - - for (const fileWithCollection of filesWithCollection) { - this.handleUploadedFile( - fileWithCollection, - syncedFiles, - ignoredFiles, - ); - } - - log.debug(() => `syncedFiles ${JSON.stringify(syncedFiles)}`); - log.debug(() => `ignoredFiles ${JSON.stringify(ignoredFiles)}`); - - if (syncedFiles.length > 0) { - this.currentlySyncedMapping.syncedFiles = [ - ...this.currentlySyncedMapping.syncedFiles, - ...syncedFiles, - ]; - await ensureElectron().updateWatchMappingSyncedFiles( - this.currentlySyncedMapping.folderPath, - this.currentlySyncedMapping.syncedFiles, - ); - } - if (ignoredFiles.length > 0) { - this.currentlySyncedMapping.ignoredFiles = [ - ...this.currentlySyncedMapping.ignoredFiles, - ...ignoredFiles, - ]; - await ensureElectron().updateWatchMappingIgnoredFiles( - this.currentlySyncedMapping.folderPath, - this.currentlySyncedMapping.ignoredFiles, - ); - } - - this.runPostUploadsAction(); - } catch (e) { - log.error("error while running all file uploads done", e); - } - } - - private runPostUploadsAction() { - this.setIsEventRunning(false); + this.activeWatch = undefined; this.uploadRunning = false; - this.runNextEvent(); + + this.debouncedRunNextEvent(); } - private handleUploadedFile( - fileWithCollection: FileWithCollection, - syncedFiles: WatchMapping["syncedFiles"], - ignoredFiles: WatchMapping["ignoredFiles"], + private deduceSyncedAndIgnored( + uploadItemsWithCollection: UploadItemWithCollection[], ) { - if (fileWithCollection.isLivePhoto) { - const imagePath = ( - fileWithCollection.livePhotoAssets.image as ElectronFile - ).path; - const videoPath = ( - fileWithCollection.livePhotoAssets.video as ElectronFile - ).path; + const syncedFiles: FolderWatch["syncedFiles"] = []; + const ignoredFiles: FolderWatch["ignoredFiles"] = []; - if ( - this.filePathToUploadedFileIDMap.has(imagePath) && - this.filePathToUploadedFileIDMap.has(videoPath) - ) { - const imageFile = { - path: imagePath, - uploadedFileID: - this.filePathToUploadedFileIDMap.get(imagePath).id, - collectionID: - this.filePathToUploadedFileIDMap.get(imagePath) - .collectionID, - }; - const videoFile = { - path: videoPath, - uploadedFileID: - this.filePathToUploadedFileIDMap.get(videoPath).id, - collectionID: - this.filePathToUploadedFileIDMap.get(videoPath) - .collectionID, - }; - syncedFiles.push(imageFile); - syncedFiles.push(videoFile); - log.debug( - () => - `added image ${JSON.stringify( - imageFile, - )} and video file ${JSON.stringify( - videoFile, - )} to uploadedFiles`, - ); - } else if ( - this.unUploadableFilePaths.has(imagePath) && - this.unUploadableFilePaths.has(videoPath) - ) { - ignoredFiles.push(imagePath); - ignoredFiles.push(videoPath); - log.debug( - () => - `added image ${imagePath} and video file ${videoPath} to rejectedFiles`, - ); - } - this.filePathToUploadedFileIDMap.delete(imagePath); - this.filePathToUploadedFileIDMap.delete(videoPath); - } else { - const filePath = (fileWithCollection.file as ElectronFile).path; - - if (this.filePathToUploadedFileIDMap.has(filePath)) { - const file = { - path: filePath, - uploadedFileID: - this.filePathToUploadedFileIDMap.get(filePath).id, - collectionID: - this.filePathToUploadedFileIDMap.get(filePath) - .collectionID, - }; - syncedFiles.push(file); - log.debug(() => `added file ${JSON.stringify(file)}`); - } else if (this.unUploadableFilePaths.has(filePath)) { - ignoredFiles.push(filePath); - log.debug(() => `added file ${filePath} to rejectedFiles`); - } - this.filePathToUploadedFileIDMap.delete(filePath); - } - } - - private async processTrashEvent() { - try { - if (this.checkAndIgnoreIfFileEventsFromTrashedDir()) { - return; - } - - const { paths } = this.currentEvent; - const filePathsToRemove = new Set(paths); - - const files = this.currentlySyncedMapping.syncedFiles.filter( - (file) => filePathsToRemove.has(file.path), - ); - - await this.trashByIDs(files); - - this.currentlySyncedMapping.syncedFiles = - this.currentlySyncedMapping.syncedFiles.filter( - (file) => !filePathsToRemove.has(file.path), - ); - await ensureElectron().updateWatchMappingSyncedFiles( - this.currentlySyncedMapping.folderPath, - this.currentlySyncedMapping.syncedFiles, - ); - } catch (e) { - log.error("error while running next trash", e); - } - } - - private async trashByIDs(toTrashFiles: WatchMapping["syncedFiles"]) { - try { - const files = await getLocalFiles(); - const toTrashFilesMap = new Map(); - for (const file of toTrashFiles) { - toTrashFilesMap.set(file.uploadedFileID, file); - } - const filesToTrash = files.filter((file) => { - if (toTrashFilesMap.has(file.id)) { - const fileToTrash = toTrashFilesMap.get(file.id); - if (fileToTrash.collectionID === file.collectionID) { - return true; - } - } + const markSynced = (file: EncryptedEnteFile, path: string) => { + syncedFiles.push({ + path, + uploadedFileID: file.id, + collectionID: file.collectionID, }); - const groupFilesByCollectionId = - groupFilesBasedOnCollectionID(filesToTrash); + this.uploadedFileForPath.delete(path); + }; - for (const [ - collectionID, - filesToTrash, - ] of groupFilesByCollectionId.entries()) { - await removeFromCollection(collectionID, filesToTrash); - } - this.syncWithRemote(); - } catch (e) { - log.error("error while trashing by IDs", e); - } - } + const markIgnored = (path: string) => { + log.debug(() => `Permanently ignoring file at ${path}`); + ignoredFiles.push(path); + this.unUploadableFilePaths.delete(path); + }; - private checkAndIgnoreIfFileEventsFromTrashedDir() { - if (this.trashingDirQueue.length !== 0) { - this.ignoreFileEventsFromTrashedDir(this.trashingDirQueue[0]); - this.trashingDirQueue.shift(); - return true; - } - return false; - } + for (const item of uploadItemsWithCollection) { + // Re the usage of ensureString: For desktop watch, the only + // possibility for a UploadItem is for it to be a string (the + // absolute path to a file on disk). + if (item.isLivePhoto) { + const imagePath = ensureString(item.livePhotoAssets.image); + const videoPath = ensureString(item.livePhotoAssets.video); - private ignoreFileEventsFromTrashedDir(trashingDir: string) { - this.eventQueue = this.eventQueue.filter((event) => - event.paths.every((path) => !path.startsWith(trashingDir)), - ); - } + const imageFile = this.uploadedFileForPath.get(imagePath); + const videoFile = this.uploadedFileForPath.get(videoPath); - async getCollectionNameAndFolderPath(filePath: string) { - try { - const mappings = await this.getWatchMappings(); - - const mapping = mappings.find( - (mapping) => - filePath.length > mapping.folderPath.length && - filePath.startsWith(mapping.folderPath) && - filePath[mapping.folderPath.length] === "/", - ); - - if (!mapping) { - throw Error(`no mapping found`); - } - - return { - collectionName: this.getCollectionNameForMapping( - mapping, - filePath, - ), - folderPath: mapping.folderPath, - }; - } catch (e) { - log.error("error while getting collection name", e); - } - } - - private getCollectionNameForMapping( - mapping: WatchMapping, - filePath: string, - ) { - return mapping.uploadStrategy === UPLOAD_STRATEGY.COLLECTION_PER_FOLDER - ? getParentFolderName(filePath) - : mapping.rootFolderName; - } - - async selectFolder(): Promise { - try { - const folderPath = await ensureElectron().selectDirectory(); - return folderPath; - } catch (e) { - log.error("error while selecting folder", e); - } - } - - // Batches all the files to be uploaded (or trashed) from the - // event queue of same collection as the next event - private clubSameCollectionEvents(): EventQueueItem { - const event = this.eventQueue.shift(); - while ( - this.eventQueue.length > 0 && - event.collectionName === this.eventQueue[0].collectionName && - event.type === this.eventQueue[0].type - ) { - if (event.type === "trash") { - event.paths = [...event.paths, ...this.eventQueue[0].paths]; + if (imageFile && videoFile) { + markSynced(imageFile, imagePath); + markSynced(videoFile, videoPath); + } else if ( + this.unUploadableFilePaths.has(imagePath) && + this.unUploadableFilePaths.has(videoPath) + ) { + markIgnored(imagePath); + markIgnored(videoPath); + } } else { - event.files = [...event.files, ...this.eventQueue[0].files]; + const path = ensureString(item.uploadItem); + const file = this.uploadedFileForPath.get(path); + if (file) { + markSynced(file, path); + } else if (this.unUploadableFilePaths.has(path)) { + markIgnored(path); + } } - this.eventQueue.shift(); - } - return event; - } - - async isFolder(folderPath: string) { - try { - const isFolder = await ensureElectron().isFolder(folderPath); - return isFolder; - } catch (e) { - log.error("error while checking if folder exists", e); - } - } - - pauseRunningSync() { - this.isPaused = true; - uploadManager.cancelRunningUpload(); - } - - resumePausedSync() { - this.isPaused = false; - this.getAndSyncDiffOfFiles(); - } -} - -const watchFolderService = new WatchFolderService(); - -export default watchFolderService; - -const getParentFolderName = (filePath: string) => { - const folderPath = filePath.substring(0, filePath.lastIndexOf("/")); - const folderName = folderPath.substring(folderPath.lastIndexOf("/") + 1); - return folderName; -}; - -async function diskFileAddedCallback(file: ElectronFile) { - try { - const collectionNameAndFolderPath = - await watchFolderService.getCollectionNameAndFolderPath(file.path); - - if (!collectionNameAndFolderPath) { - return; } - const { collectionName, folderPath } = collectionNameAndFolderPath; + return { syncedFiles, ignoredFiles }; + } - const event: EventQueueItem = { - type: "upload", - collectionName, - folderPath, - files: [file], - }; - watchFolderService.pushEvent(event); - log.info( - `added (upload) to event queue, collectionName:${event.collectionName} folderPath:${event.folderPath}, filesCount: ${event.files.length}`, + private pruneFileEventsFromDeletedFolderPaths() { + const deletedFolderPath = this.deletedFolderPaths.shift(); + if (!deletedFolderPath) return false; + + this.eventQueue = this.eventQueue.filter( + (event) => !event.filePath.startsWith(deletedFolderPath), ); - } catch (e) { - log.error("error while calling diskFileAddedCallback", e); + + return true; } -} -async function diskFileRemovedCallback(filePath: string) { - try { - const collectionNameAndFolderPath = - await watchFolderService.getCollectionNameAndFolderPath(filePath); + private async moveToTrash(syncedFiles: FolderWatch["syncedFiles"]) { + const syncedFileForID = new Map(); + for (const file of syncedFiles) + syncedFileForID.set(file.uploadedFileID, file); - if (!collectionNameAndFolderPath) { - return; - } - - const { collectionName, folderPath } = collectionNameAndFolderPath; - - const event: EventQueueItem = { - type: "trash", - collectionName, - folderPath, - paths: [filePath], - }; - watchFolderService.pushEvent(event); - log.info( - `added (trash) to event queue collectionName:${event.collectionName} folderPath:${event.folderPath} , pathsCount: ${event.paths.length}`, - ); - } catch (e) { - log.error("error while calling diskFileRemovedCallback", e); - } -} - -async function diskFolderRemovedCallback(folderPath: string) { - try { - const mappings = await watchFolderService.getWatchMappings(); - const mapping = mappings.find( - (mapping) => mapping.folderPath === folderPath, - ); - if (!mapping) { - log.info(`folder not found in mappings, ${folderPath}`); - throw Error(`Watch mapping not found`); - } - watchFolderService.pushTrashedDir(folderPath); - log.info(`added trashedDir, ${folderPath}`); - } catch (e) { - log.error("error while calling diskFolderRemovedCallback", e); - } -} - -export function getValidFilesToUpload( - files: ElectronFile[], - mapping: WatchMapping, -) { - const uniqueFilePaths = new Set(); - return files.filter((file) => { - if (!isSystemFile(file) && !isSyncedOrIgnoredFile(file, mapping)) { - if (!uniqueFilePaths.has(file.path)) { - uniqueFilePaths.add(file.path); + const files = await getLocalFiles(); + const filesToTrash = files.filter((file) => { + const correspondingSyncedFile = syncedFileForID.get(file.id); + if ( + correspondingSyncedFile && + correspondingSyncedFile.collectionID == file.collectionID + ) { return true; } + return false; + }); + + const filesByCollectionID = groupFilesBasedOnCollectionID(filesToTrash); + for (const [id, files] of filesByCollectionID.entries()) { + await removeFromCollection(id, files); } - return false; - }); + + this.requestSyncWithRemote(); + } } -function isSyncedOrIgnoredFile(file: ElectronFile, mapping: WatchMapping) { - return ( - mapping.ignoredFiles.includes(file.path) || - mapping.syncedFiles.find((f) => f.path === file.path) - ); +/** The singleton instance of {@link FolderWatcher}. */ +const watcher = new FolderWatcher(); + +export default watcher; + +/** + * A file system watch event encapsulates a change that has occurred on disk + * that needs us to take some action within Ente to synchronize with the user's + * Ente collections. + * + * Events get added in two ways: + * + * - When the app starts, it reads the current state of files on disk and + * compares that with its last known state to determine what all events it + * missed. This is easier than it sounds as we have only two events: add and + * remove. + * + * - When the app is running, it gets live notifications from our file system + * watcher (from the Node.js layer) about changes that have happened on disk, + * which the app then enqueues onto the event queue if they pertain to the + * files we're interested in. + */ +interface WatchEvent { + /** The action to take */ + action: "upload" | "trash"; + /** The path of the root folder corresponding to the {@link FolderWatch}. */ + folderPath: string; + /** The name of the Ente collection the file belongs to. */ + collectionName: string; + /** The absolute path to the file under consideration. */ + filePath: string; } + +/** + * A composite of multiple {@link WatchEvent}s that only differ in their + * {@link filePath}. + * + * When processing events, we combine a run of events with the same + * {@link action}, {@link folderPath} and {@link collectionName}. This allows us + * to process all the affected {@link filePaths} in one shot. + */ +type ClubbedWatchEvent = Omit & { + filePaths: string[]; +}; + +/** + * Determine which events we need to process to synchronize the watched on-disk + * folders to their corresponding collections. + */ +const deduceEvents = async (watches: FolderWatch[]): Promise => { + const electron = ensureElectron(); + const events: WatchEvent[] = []; + + for (const watch of watches) { + const folderPath = watch.folderPath; + + const filePaths = await electron.watch.findFiles(folderPath); + + // Files that are on disk but not yet synced. + for (const filePath of pathsToUpload(filePaths, watch)) + events.push({ + action: "upload", + folderPath, + collectionName: collectionNameForPath(filePath, watch), + filePath, + }); + + // Previously synced files that are no longer on disk. + for (const filePath of pathsToRemove(filePaths, watch)) + events.push({ + action: "trash", + folderPath, + collectionName: collectionNameForPath(filePath, watch), + filePath, + }); + } + + return events; +}; + +/** + * Filter out hidden files and previously synced or ignored paths from + * {@link paths} to get the list of paths that need to be uploaded to the Ente + * collection. + */ +const pathsToUpload = (paths: string[], watch: FolderWatch) => + paths + // Filter out hidden files (files whose names begins with a dot) + .filter((path) => !isHiddenFile(path)) + // Files that are on disk but not yet synced or ignored. + .filter((path) => !isSyncedOrIgnoredPath(path, watch)); + +/** + * Return true if the file at the given {@link path} is hidden. + * + * Hidden files are those whose names begin with a "." (dot). + */ +const isHiddenFile = (path: string) => basename(path).startsWith("."); + +/** + * Return the paths to previously synced files that are no longer on disk and so + * must be removed from the Ente collection. + */ +const pathsToRemove = (paths: string[], watch: FolderWatch) => + watch.syncedFiles + .map((f) => f.path) + .filter((path) => !paths.includes(path)); + +const isSyncedOrIgnoredPath = (path: string, watch: FolderWatch) => + watch.ignoredFiles.includes(path) || + watch.syncedFiles.find((f) => f.path === path); + +const collectionNameForPath = (path: string, watch: FolderWatch) => + watch.collectionMapping == "root" + ? basename(watch.folderPath) + : parentDirectoryName(path); + +const parentDirectoryName = (path: string) => basename(dirname(path)); diff --git a/web/apps/photos/src/types/entity.ts b/web/apps/photos/src/types/entity.ts index 9580bf3332..60844ce466 100644 --- a/web/apps/photos/src/types/entity.ts +++ b/web/apps/photos/src/types/entity.ts @@ -1,4 +1,4 @@ -import { Location } from "types/upload"; +import { Location } from "types/metadata"; export enum EntityType { LOCATION_TAG = "location", diff --git a/web/apps/photos/src/types/file/index.ts b/web/apps/photos/src/types/file/index.ts index 2991e1f8b0..c3d4cca440 100644 --- a/web/apps/photos/src/types/file/index.ts +++ b/web/apps/photos/src/types/file/index.ts @@ -1,10 +1,10 @@ +import type { Metadata } from "@/media/types/file"; import { SourceURLs } from "services/download"; import { EncryptedMagicMetadata, MagicMetadataCore, VISIBILITY_STATE, } from "types/magicMetadata"; -import { Metadata } from "types/upload"; export interface MetadataFileAttributes { encryptedData: string; diff --git a/web/apps/photos/src/types/machineLearning/index.ts b/web/apps/photos/src/types/machineLearning/index.ts index 7fee94815b..2c3961cdf6 100644 --- a/web/apps/photos/src/types/machineLearning/index.ts +++ b/web/apps/photos/src/types/machineLearning/index.ts @@ -290,7 +290,7 @@ export interface FaceEmbeddingService { export interface BlurDetectionService { method: Versioned; - detectBlur(alignedFaces: Float32Array): number[]; + detectBlur(alignedFaces: Float32Array, faces: Face[]): number[]; } export interface ClusteringService { diff --git a/web/apps/photos/src/types/metadata.ts b/web/apps/photos/src/types/metadata.ts new file mode 100644 index 0000000000..7994e62479 --- /dev/null +++ b/web/apps/photos/src/types/metadata.ts @@ -0,0 +1,11 @@ +export interface Location { + latitude: number; + longitude: number; +} + +export interface ParsedExtractedMetadata { + location: Location; + creationTime: number; + width: number; + height: number; +} diff --git a/web/apps/photos/src/types/search/index.ts b/web/apps/photos/src/types/search/index.ts index 29a1cffef2..cf50f4a060 100644 --- a/web/apps/photos/src/types/search/index.ts +++ b/web/apps/photos/src/types/search/index.ts @@ -1,4 +1,4 @@ -import { FILE_TYPE } from "constants/file"; +import { FILE_TYPE } from "@/media/file-type"; import { City } from "services/locationSearchService"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; diff --git a/web/apps/photos/src/types/upload/index.ts b/web/apps/photos/src/types/upload/index.ts deleted file mode 100644 index 0d38f6190f..0000000000 --- a/web/apps/photos/src/types/upload/index.ts +++ /dev/null @@ -1,170 +0,0 @@ -import { - B64EncryptionResult, - LocalFileAttributes, -} from "@ente/shared/crypto/types"; -import { FILE_TYPE } from "constants/file"; -import { Collection } from "types/collection"; -import { - FilePublicMagicMetadata, - FilePublicMagicMetadataProps, - MetadataFileAttributes, - S3FileAttributes, -} from "types/file"; -import { EncryptedMagicMetadata } from "types/magicMetadata"; - -export interface DataStream { - stream: ReadableStream; - chunkCount: number; -} - -export function isDataStream(object: any): object is DataStream { - return "stream" in object; -} - -export type Logger = (message: string) => void; - -export interface Metadata { - title: string; - creationTime: number; - modificationTime: number; - latitude: number; - longitude: number; - fileType: FILE_TYPE; - hasStaticThumbnail?: boolean; - hash?: string; - imageHash?: string; - videoHash?: string; - localID?: number; - version?: number; - deviceFolder?: string; -} - -export interface Location { - latitude: number; - longitude: number; -} - -export interface ParsedMetadataJSON { - creationTime: number; - modificationTime: number; - latitude: number; - longitude: number; -} - -export interface MultipartUploadURLs { - objectKey: string; - partURLs: string[]; - completeURL: string; -} - -export interface FileTypeInfo { - fileType: FILE_TYPE; - exactType: string; - mimeType?: string; - imageType?: string; - videoType?: string; -} - -/* - * ElectronFile is a custom interface that is used to represent - * any file on disk as a File-like object in the Electron desktop app. - * - * This was added to support the auto-resuming of failed uploads - * which needed absolute paths to the files which the - * normal File interface does not provide. - */ -export interface ElectronFile { - name: string; - path: string; - size: number; - lastModified: number; - stream: () => Promise>; - blob: () => Promise; - arrayBuffer: () => Promise; -} - -export interface UploadAsset { - isLivePhoto?: boolean; - file?: File | ElectronFile; - livePhotoAssets?: LivePhotoAssets; - isElectron?: boolean; -} -export interface LivePhotoAssets { - image: globalThis.File | ElectronFile; - video: globalThis.File | ElectronFile; -} - -export interface FileWithCollection extends UploadAsset { - localID: number; - collection?: Collection; - collectionID?: number; -} - -export type ParsedMetadataJSONMap = Map; - -export interface UploadURL { - url: string; - objectKey: string; -} - -export interface FileInMemory { - filedata: Uint8Array | DataStream; - thumbnail: Uint8Array; - hasStaticThumbnail: boolean; -} - -export interface FileWithMetadata - extends Omit { - metadata: Metadata; - localID: number; - pubMagicMetadata: FilePublicMagicMetadata; -} - -export interface EncryptedFile { - file: ProcessedFile; - fileKey: B64EncryptionResult; -} -export interface ProcessedFile { - file: LocalFileAttributes; - thumbnail: LocalFileAttributes; - metadata: LocalFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; - localID: number; -} -export interface BackupedFile { - file: S3FileAttributes; - thumbnail: S3FileAttributes; - metadata: MetadataFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; -} - -export interface UploadFile extends BackupedFile { - collectionID: number; - encryptedKey: string; - keyDecryptionNonce: string; -} - -export interface ParsedExtractedMetadata { - location: Location; - creationTime: number; - width: number; - height: number; -} - -// This is used to prompt the user the make upload strategy choice -export interface ImportSuggestion { - rootFolderName: string; - hasNestedFolders: boolean; - hasRootLevelFileWithFolder: boolean; -} - -export interface PublicUploadProps { - token: string; - passwordToken: string; - accessedThroughSharedURL: boolean; -} - -export interface ExtractMetadataResult { - metadata: Metadata; - publicMagicMetadata: FilePublicMagicMetadataProps; -} diff --git a/web/apps/photos/src/types/upload/ui.ts b/web/apps/photos/src/types/upload/ui.ts deleted file mode 100644 index bce381213f..0000000000 --- a/web/apps/photos/src/types/upload/ui.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload"; - -export type FileID = number; -export type FileName = string; - -export type PercentageUploaded = number; -export type UploadFileNames = Map; - -export interface UploadCounter { - finished: number; - total: number; -} - -export interface InProgressUpload { - localFileID: FileID; - progress: PercentageUploaded; -} - -export interface FinishedUpload { - localFileID: FileID; - result: UPLOAD_RESULT; -} - -export type InProgressUploads = Map; - -export type FinishedUploads = Map; - -export type SegregatedFinishedUploads = Map; - -export interface ProgressUpdater { - setPercentComplete: React.Dispatch>; - setUploadCounter: React.Dispatch>; - setUploadStage: React.Dispatch>; - setInProgressUploads: React.Dispatch< - React.SetStateAction - >; - setFinishedUploads: React.Dispatch< - React.SetStateAction - >; - setUploadFilenames: React.Dispatch>; - setHasLivePhotos: React.Dispatch>; - setUploadProgressView: React.Dispatch>; -} diff --git a/web/apps/photos/src/types/watchFolder/index.ts b/web/apps/photos/src/types/watchFolder/index.ts deleted file mode 100644 index bd55704de9..0000000000 --- a/web/apps/photos/src/types/watchFolder/index.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { UPLOAD_STRATEGY } from "constants/upload"; -import { ElectronFile } from "types/upload"; - -export interface WatchMappingSyncedFile { - path: string; - uploadedFileID: number; - collectionID: number; -} - -export interface WatchMapping { - rootFolderName: string; - folderPath: string; - uploadStrategy: UPLOAD_STRATEGY; - syncedFiles: WatchMappingSyncedFile[]; - ignoredFiles: string[]; -} - -export interface EventQueueItem { - type: "upload" | "trash"; - folderPath: string; - collectionName?: string; - paths?: string[]; - files?: ElectronFile[]; -} diff --git a/web/apps/photos/src/utils/billing/index.ts b/web/apps/photos/src/utils/billing/index.ts index 3dfde5384b..d2e593e9e1 100644 --- a/web/apps/photos/src/utils/billing/index.ts +++ b/web/apps/photos/src/utils/billing/index.ts @@ -31,44 +31,6 @@ enum RESPONSE_STATUS { fail = "fail", } -const StorageUnits = ["B", "KB", "MB", "GB", "TB"]; - -const ONE_GB = 1024 * 1024 * 1024; - -export function convertBytesToGBs(bytes: number, precision = 0): string { - return (bytes / (1024 * 1024 * 1024)).toFixed(precision); -} - -export function makeHumanReadableStorage( - bytes: number, - { roundUp } = { roundUp: false }, -): string { - if (bytes <= 0) { - return `0 ${t("STORAGE_UNITS.MB")}`; - } - const i = Math.floor(Math.log(bytes) / Math.log(1024)); - - let quantity = bytes / Math.pow(1024, i); - let unit = StorageUnits[i]; - - if (quantity > 100 && unit !== "GB") { - quantity /= 1024; - unit = StorageUnits[i + 1]; - } - - quantity = Number(quantity.toFixed(1)); - - if (bytes >= 10 * ONE_GB) { - if (roundUp) { - quantity = Math.ceil(quantity); - } else { - quantity = Math.round(quantity); - } - } - - return `${quantity} ${t(`STORAGE_UNITS.${unit}`)}`; -} - export function hasPaidSubscription(subscription: Subscription) { return ( subscription && @@ -160,9 +122,8 @@ export function isSubscriptionPastDue(subscription: Subscription) { ); } -export function isPopularPlan(plan: Plan) { - return plan.storage === 100 * ONE_GB; -} +export const isPopularPlan = (plan: Plan) => + plan.storage === 100 * 1024 * 1024 * 1024; /* 100 GB */ export async function updateSubscription( plan: Plan, diff --git a/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts deleted file mode 100644 index 8603171586..0000000000 --- a/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { haveWindow } from "@/next/env"; -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { Remote } from "comlink"; -import { DedicatedConvertWorker } from "worker/convert.worker"; - -class ComlinkConvertWorker { - private comlinkWorkerInstance: Remote; - - async getInstance() { - if (!this.comlinkWorkerInstance) { - this.comlinkWorkerInstance = - await getDedicatedConvertWorker().remote; - } - return this.comlinkWorkerInstance; - } -} - -export const getDedicatedConvertWorker = () => { - if (haveWindow()) { - const cryptoComlinkWorker = new ComlinkWorker< - typeof DedicatedConvertWorker - >( - "ente-convert-worker", - new Worker(new URL("worker/convert.worker.ts", import.meta.url)), - ); - return cryptoComlinkWorker; - } -}; - -export default new ComlinkConvertWorker(); diff --git a/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts deleted file mode 100644 index 29d19d6fa0..0000000000 --- a/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { Remote } from "comlink"; -import { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; - -class ComlinkFFmpegWorker { - private comlinkWorkerInstance: Promise>; - - async getInstance() { - if (!this.comlinkWorkerInstance) { - const comlinkWorker = getDedicatedFFmpegWorker(); - this.comlinkWorkerInstance = comlinkWorker.remote; - } - return this.comlinkWorkerInstance; - } -} - -const getDedicatedFFmpegWorker = () => { - const cryptoComlinkWorker = new ComlinkWorker( - "ente-ffmpeg-worker", - new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)), - ); - return cryptoComlinkWorker; -}; - -export default new ComlinkFFmpegWorker(); diff --git a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts index c1ed53f7b3..f312a2c5c0 100644 --- a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts +++ b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts @@ -1,6 +1,6 @@ import { haveWindow } from "@/next/env"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; -import { DedicatedMLWorker } from "worker/ml.worker"; +import { type DedicatedMLWorker } from "worker/ml.worker"; export const getDedicatedMLWorker = (name: string) => { if (haveWindow()) { diff --git a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts index bc65066056..4886bacda5 100644 --- a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts +++ b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts @@ -1,7 +1,7 @@ import { haveWindow } from "@/next/env"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { Remote } from "comlink"; -import { DedicatedSearchWorker } from "worker/search.worker"; +import { type DedicatedSearchWorker } from "worker/search.worker"; class ComlinkSearchWorker { private comlinkWorkerInstance: Remote; diff --git a/web/apps/photos/src/utils/ffmpeg/index.ts b/web/apps/photos/src/utils/ffmpeg/index.ts deleted file mode 100644 index 1b3445976a..0000000000 --- a/web/apps/photos/src/utils/ffmpeg/index.ts +++ /dev/null @@ -1,77 +0,0 @@ -import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; -import { NULL_LOCATION } from "constants/upload"; -import { ParsedExtractedMetadata } from "types/upload"; - -enum MetadataTags { - CREATION_TIME = "creation_time", - APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier", - APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto", - APPLE_CREATION_DATE = "com.apple.quicktime.creationdate", - APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709", - LOCATION = "location", -} - -export function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) { - const metadataString = new TextDecoder().decode(encodedMetadata); - const metadataPropertyArray = metadataString.split("\n"); - const metadataKeyValueArray = metadataPropertyArray.map((property) => - property.split("="), - ); - const validKeyValuePairs = metadataKeyValueArray.filter( - (keyValueArray) => keyValueArray.length === 2, - ) as Array<[string, string]>; - - const metadataMap = Object.fromEntries(validKeyValuePairs); - - const location = parseAppleISOLocation( - metadataMap[MetadataTags.APPLE_LOCATION_ISO] ?? - metadataMap[MetadataTags.LOCATION], - ); - - const creationTime = parseCreationTime( - metadataMap[MetadataTags.APPLE_CREATION_DATE] ?? - metadataMap[MetadataTags.CREATION_TIME], - ); - const parsedMetadata: ParsedExtractedMetadata = { - creationTime, - location: { - latitude: location.latitude, - longitude: location.longitude, - }, - width: null, - height: null, - }; - return parsedMetadata; -} - -function parseAppleISOLocation(isoLocation: string) { - let location = NULL_LOCATION; - if (isoLocation) { - const [latitude, longitude] = isoLocation - .match(/(\+|-)\d+\.*\d+/g) - .map((x) => parseFloat(x)); - - location = { latitude, longitude }; - } - return location; -} - -function parseCreationTime(creationTime: string) { - let dateTime = null; - if (creationTime) { - dateTime = validateAndGetCreationUnixTimeInMicroSeconds( - new Date(creationTime), - ); - } - return dateTime; -} - -export function splitFilenameAndExtension(filename: string): [string, string] { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return [filename, null]; - else - return [ - filename.slice(0, lastDotPosition), - filename.slice(lastDotPosition + 1), - ]; -} diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts index 785921cc91..98a8dd9481 100644 --- a/web/apps/photos/src/utils/file/index.ts +++ b/web/apps/photos/src/utils/file/index.ts @@ -1,40 +1,27 @@ -import { convertBytesToHumanReadable } from "@/next/file"; +import { FILE_TYPE } from "@/media/file-type"; +import { isNonWebImageFileExtension } from "@/media/formats"; +import { decodeLivePhoto } from "@/media/live-photo"; +import { lowercaseExtension } from "@/next/file"; import log from "@/next/log"; -import type { Electron } from "@/next/types/ipc"; +import { CustomErrorMessage, type Electron } from "@/next/types/ipc"; import { workerBridge } from "@/next/worker/worker-bridge"; import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { CustomError } from "@ente/shared/error"; -import { isPlaybackPossible } from "@ente/shared/media/video-playback"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { User } from "@ente/shared/user/types"; -import { downloadUsingAnchor } from "@ente/shared/utils"; -import { - FILE_TYPE, - RAW_FORMATS, - SUPPORTED_RAW_FORMATS, - TYPE_HEIC, - TYPE_HEIF, - TYPE_JPEG, - TYPE_JPG, -} from "constants/file"; +import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils"; import { t } from "i18next"; import isElectron from "is-electron"; import { moveToHiddenCollection } from "services/collectionService"; -import DownloadManager, { - LivePhotoSourceURL, - SourceURLs, -} from "services/download"; -import * as ffmpegService from "services/ffmpeg/ffmpegService"; +import { detectFileTypeInfo } from "services/detect-type"; +import DownloadManager from "services/download"; +import { updateFileCreationDateInEXIF } from "services/exif"; import { deleteFromTrash, trashFiles, updateFileMagicMetadata, updateFilePublicMagicMetadata, } from "services/fileService"; -import heicConversionService from "services/heicConversionService"; -import { decodeLivePhoto } from "services/livePhotoService"; -import { getFileType } from "services/typeDetectionService"; -import { updateFileCreationDateInEXIF } from "services/upload/exifService"; +import { heicToJPEG } from "services/heic-convert"; import { EncryptedEnteFile, EnteFile, @@ -50,12 +37,22 @@ import { SetFilesDownloadProgressAttributesCreator, } from "types/gallery"; import { VISIBILITY_STATE } from "types/magicMetadata"; -import { FileTypeInfo } from "types/upload"; import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata"; import { safeFileName } from "utils/native-fs"; import { writeStream } from "utils/native-stream"; -const WAIT_TIME_IMAGE_CONVERSION = 30 * 1000; +const SUPPORTED_RAW_FORMATS = [ + "heic", + "rw2", + "tiff", + "arw", + "cr3", + "cr2", + "nef", + "psd", + "dng", + "tif", +]; export enum FILE_OPS_TYPE { DOWNLOAD, @@ -67,16 +64,32 @@ export enum FILE_OPS_TYPE { DELETE_PERMANENTLY, } +class ModuleState { + /** + * This will be set to true if we get an error from the Node.js side of our + * desktop app telling us that native JPEG conversion is not available for + * the current OS/arch combination. + * + * That way, we can stop pestering it again and again (saving an IPC + * round-trip). + * + * Note the double negative when it is used. + */ + isNativeJPEGConversionNotAvailable = false; +} + +const moduleState = new ModuleState(); + export async function getUpdatedEXIFFileForDownload( fileReader: FileReader, file: EnteFile, fileStream: ReadableStream, ): Promise> { - const extension = getFileExtension(file.metadata.title); + const extension = lowercaseExtension(file.metadata.title); if ( file.metadata.fileType === FILE_TYPE.IMAGE && file.pubMagicMetadata?.data.editedTime && - (extension === TYPE_JPEG || extension === TYPE_JPG) + (extension == "jpeg" || extension == "jpg") ) { const fileBlob = await new Response(fileStream).blob(); const updatedFileBlob = await updateFileCreationDateInEXIF( @@ -97,21 +110,22 @@ export async function downloadFile(file: EnteFile) { await DownloadManager.getFile(file), ).blob(); if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - const livePhoto = await decodeLivePhoto(file, fileBlob); - const image = new File([livePhoto.image], livePhoto.imageNameTitle); - const imageType = await getFileType(image); + const { imageFileName, imageData, videoFileName, videoData } = + await decodeLivePhoto(file.metadata.title, fileBlob); + const image = new File([imageData], imageFileName); + const imageType = await detectFileTypeInfo(image); const tempImageURL = URL.createObjectURL( - new Blob([livePhoto.image], { type: imageType.mimeType }), + new Blob([imageData], { type: imageType.mimeType }), ); - const video = new File([livePhoto.video], livePhoto.videoNameTitle); - const videoType = await getFileType(video); + const video = new File([videoData], videoFileName); + const videoType = await detectFileTypeInfo(video); const tempVideoURL = URL.createObjectURL( - new Blob([livePhoto.video], { type: videoType.mimeType }), + new Blob([videoData], { type: videoType.mimeType }), ); - downloadUsingAnchor(tempImageURL, livePhoto.imageNameTitle); - downloadUsingAnchor(tempVideoURL, livePhoto.videoNameTitle); + downloadUsingAnchor(tempImageURL, imageFileName); + downloadUsingAnchor(tempVideoURL, videoFileName); } else { - const fileType = await getFileType( + const fileType = await detectFileTypeInfo( new File([fileBlob], file.metadata.title), ); fileBlob = await new Response( @@ -131,16 +145,16 @@ export async function downloadFile(file: EnteFile) { } } -export function groupFilesBasedOnCollectionID(files: EnteFile[]) { - const collectionWiseFiles = new Map(); +/** Segment the given {@link files} into lists indexed by their collection ID */ +export const groupFilesBasedOnCollectionID = (files: EnteFile[]) => { + const result = new Map(); for (const file of files) { - if (!collectionWiseFiles.has(file.collectionID)) { - collectionWiseFiles.set(file.collectionID, []); - } - collectionWiseFiles.get(file.collectionID).push(file); + const id = file.collectionID; + if (!result.has(id)) result.set(id, []); + result.get(id).push(file); } - return collectionWiseFiles; -} + return result; +}; function getSelectedFileIds(selectedFiles: SelectedState) { const filesIDs: number[] = []; @@ -247,32 +261,6 @@ export async function decryptFile( } } -export function getFileNameWithoutExtension(filename: string) { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return filename; - else return filename.slice(0, lastDotPosition); -} - -export function getFileExtensionWithDot(filename: string) { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return ""; - else return filename.slice(lastDotPosition); -} - -export function splitFilenameAndExtension(filename: string): [string, string] { - const lastDotPosition = filename.lastIndexOf("."); - if (lastDotPosition === -1) return [filename, null]; - else - return [ - filename.slice(0, lastDotPosition), - filename.slice(lastDotPosition + 1), - ]; -} - -export function getFileExtension(filename: string) { - return splitFilenameAndExtension(filename)[1]?.toLocaleLowerCase(); -} - export function generateStreamFromArrayBuffer(data: Uint8Array) { return new ReadableStream({ async start(controller: ReadableStreamDefaultController) { @@ -282,247 +270,62 @@ export function generateStreamFromArrayBuffer(data: Uint8Array) { }); } -export async function getRenderableFileURL( - file: EnteFile, - fileBlob: Blob, - originalFileURL: string, - forceConvert: boolean, -): Promise { - let srcURLs: SourceURLs["url"]; - switch (file.metadata.fileType) { - case FILE_TYPE.IMAGE: { - const convertedBlob = await getRenderableImage( - file.metadata.title, - fileBlob, - ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; - break; - } - case FILE_TYPE.LIVE_PHOTO: { - srcURLs = await getRenderableLivePhotoURL( - file, - fileBlob, - forceConvert, - ); - break; - } - case FILE_TYPE.VIDEO: { - const convertedBlob = await getPlayableVideo( - file.metadata.title, - fileBlob, - forceConvert, - ); - const convertedURL = getFileObjectURL( - originalFileURL, - fileBlob, - convertedBlob, - ); - srcURLs = convertedURL; - break; - } - default: { - srcURLs = originalFileURL; - break; - } - } - - let isOriginal: boolean; - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - isOriginal = false; - } else { - isOriginal = (srcURLs as string) === (originalFileURL as string); - } - - return { - url: srcURLs, - isOriginal, - isRenderable: - file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs, - type: - file.metadata.fileType === FILE_TYPE.LIVE_PHOTO - ? "livePhoto" - : "normal", - }; -} - -async function getRenderableLivePhotoURL( - file: EnteFile, - fileBlob: Blob, - forceConvert: boolean, -): Promise { - const livePhoto = await decodeLivePhoto(file, fileBlob); - - const getRenderableLivePhotoImageURL = async () => { - try { - const imageBlob = new Blob([livePhoto.image]); - const convertedImageBlob = await getRenderableImage( - livePhoto.imageNameTitle, - imageBlob, - ); - - return URL.createObjectURL(convertedImageBlob); - } catch (e) { - //ignore and return null - return null; - } - }; - - const getRenderableLivePhotoVideoURL = async () => { - try { - const videoBlob = new Blob([livePhoto.video]); - - const convertedVideoBlob = await getPlayableVideo( - livePhoto.videoNameTitle, - videoBlob, - forceConvert, - true, - ); - return URL.createObjectURL(convertedVideoBlob); - } catch (e) { - //ignore and return null - return null; - } - }; - - return { - image: getRenderableLivePhotoImageURL, - video: getRenderableLivePhotoVideoURL, - }; -} - -export async function getPlayableVideo( - videoNameTitle: string, - videoBlob: Blob, - forceConvert = false, - runOnWeb = false, -) { - try { - const isPlayable = await isPlaybackPossible( - URL.createObjectURL(videoBlob), - ); - if (isPlayable && !forceConvert) { - return videoBlob; - } else { - if (!forceConvert && !runOnWeb && !isElectron()) { - return null; - } - log.info( - `video format not supported, converting it name: ${videoNameTitle}`, - ); - const mp4ConvertedVideo = await ffmpegService.convertToMP4( - new File([videoBlob], videoNameTitle), - ); - log.info(`video successfully converted ${videoNameTitle}`); - return new Blob([await mp4ConvertedVideo.arrayBuffer()]); - } - } catch (e) { - log.error("video conversion failed", e); - return null; - } -} - -export async function getRenderableImage(fileName: string, imageBlob: Blob) { - let fileTypeInfo: FileTypeInfo; +export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { try { const tempFile = new File([imageBlob], fileName); - fileTypeInfo = await getFileType(tempFile); - log.debug(() => `file type info: ${JSON.stringify(fileTypeInfo)}`); - const { exactType } = fileTypeInfo; - let convertedImageBlob: Blob; - if (isRawFile(exactType)) { - try { - if (!isSupportedRawFormat(exactType)) { - throw Error(CustomError.UNSUPPORTED_RAW_FORMAT); - } + const fileTypeInfo = await detectFileTypeInfo(tempFile); + log.debug( + () => + `Need renderable image for ${JSON.stringify({ fileName, ...fileTypeInfo })}`, + ); + const { extension } = fileTypeInfo; - if (!isElectron()) { - throw new Error("not available on web"); - } - log.info( - `RawConverter called for ${fileName}-${convertBytesToHumanReadable( - imageBlob.size, - )}`, - ); - convertedImageBlob = await convertToJPEGInElectron( - imageBlob, - fileName, - ); - log.info(`${fileName} successfully converted`); - } catch (e) { - try { - if (!isFileHEIC(exactType)) { - throw e; - } - log.info( - `HEICConverter called for ${fileName}-${convertBytesToHumanReadable( - imageBlob.size, - )}`, - ); - convertedImageBlob = - await heicConversionService.convert(imageBlob); - log.info(`${fileName} successfully converted`); - } catch (e) { - throw Error(CustomError.NON_PREVIEWABLE_FILE); - } - } - return convertedImageBlob; - } else { + if (!isNonWebImageFileExtension(extension)) { + // Either it is something that the browser already knows how to + // render, or something we don't even about yet. return imageBlob; } - } catch (e) { - log.error( - `Failed to get renderable image for ${JSON.stringify(fileTypeInfo)}`, - e, - ); - return null; - } -} -const convertToJPEGInElectron = async ( - fileBlob: Blob, - filename: string, -): Promise => { - try { - const startTime = Date.now(); - const inputFileData = new Uint8Array(await fileBlob.arrayBuffer()); - const electron = globalThis.electron; - const convertedFileData = electron - ? await electron.convertToJPEG(inputFileData, filename) - : await workerBridge.convertToJPEG(inputFileData, filename); - log.info( - `originalFileSize:${convertBytesToHumanReadable( - fileBlob?.size, - )},convertedFileSize:${convertBytesToHumanReadable( - convertedFileData?.length, - )}, native conversion time: ${Date.now() - startTime}ms `, - ); - return new Blob([convertedFileData]); - } catch (e) { - if ( - e.message !== - CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED - ) { - log.error("failed to convert to jpeg natively", e); + const available = !moduleState.isNativeJPEGConversionNotAvailable; + if (isElectron() && available && isSupportedRawFormat(extension)) { + // If we're running in our desktop app, see if our Node.js layer can + // convert this into a JPEG using native tools for us. + try { + return await nativeConvertToJPEG(imageBlob); + } catch (e) { + if (e.message.endsWith(CustomErrorMessage.NotAvailable)) { + moduleState.isNativeJPEGConversionNotAvailable = true; + } else { + log.error("Native conversion to JPEG failed", e); + } + } } - throw e; + + if (extension == "heic" || extension == "heif") { + // For HEIC/HEIF files we can use our web HEIC converter. + return await heicToJPEG(imageBlob); + } + + return undefined; + } catch (e) { + log.error(`Failed to get renderable image for ${fileName}`, e); + return undefined; } }; -export function isFileHEIC(exactType: string) { - return ( - exactType.toLowerCase().endsWith(TYPE_HEIC) || - exactType.toLowerCase().endsWith(TYPE_HEIF) - ); -} - -export function isRawFile(exactType: string) { - return RAW_FORMATS.includes(exactType.toLowerCase()); -} +const nativeConvertToJPEG = async (imageBlob: Blob) => { + const startTime = Date.now(); + const imageData = new Uint8Array(await imageBlob.arrayBuffer()); + const electron = globalThis.electron; + // If we're running in a worker, we need to reroute the request back to + // the main thread since workers don't have access to the `window` (and + // thus, to the `window.electron`) object. + const jpegData = electron + ? await electron.convertToJPEG(imageData) + : await workerBridge.convertToJPEG(imageData); + log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`); + return new Blob([jpegData]); +}; export function isSupportedRawFormat(exactType: string) { return SUPPORTED_RAW_FORMATS.includes(exactType.toLowerCase()); @@ -616,6 +419,18 @@ export function isSharedFile(user: User, file: EnteFile) { return file.ownerID !== user.id; } +/** + * [Note: File name for local EnteFile objects] + * + * The title property in a file's metadata is the original file's name. The + * metadata of a file cannot be edited. So if later on the file's name is + * changed, then the edit is stored in the `editedName` property of the public + * metadata of the file. + * + * This function merges these edits onto the file object that we use locally. + * Effectively, post this step, the file's metadata.title can be used in lieu of + * its filename. + */ export function mergeMetadata(files: EnteFile[]): EnteFile[] { return files.map((file) => { if (file.pubMagicMetadata?.data.editedTime) { @@ -813,22 +628,31 @@ async function downloadFileDesktop( if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { const fileBlob = await new Response(updatedStream).blob(); - const livePhoto = await decodeLivePhoto(file, fileBlob); + const { imageFileName, imageData, videoFileName, videoData } = + await decodeLivePhoto(file.metadata.title, fileBlob); const imageExportName = await safeFileName( downloadDir, - livePhoto.imageNameTitle, + imageFileName, fs.exists, ); - const imageStream = generateStreamFromArrayBuffer(livePhoto.image); - await writeStream(`${downloadDir}/${imageExportName}`, imageStream); + const imageStream = generateStreamFromArrayBuffer(imageData); + await writeStream( + electron, + `${downloadDir}/${imageExportName}`, + imageStream, + ); try { const videoExportName = await safeFileName( downloadDir, - livePhoto.videoNameTitle, + videoFileName, fs.exists, ); - const videoStream = generateStreamFromArrayBuffer(livePhoto.video); - await writeStream(`${downloadDir}/${videoExportName}`, videoStream); + const videoStream = generateStreamFromArrayBuffer(videoData); + await writeStream( + electron, + `${downloadDir}/${videoExportName}`, + videoStream, + ); } catch (e) { await fs.rm(`${downloadDir}/${imageExportName}`); throw e; @@ -839,7 +663,11 @@ async function downloadFileDesktop( file.metadata.title, fs.exists, ); - await writeStream(`${downloadDir}/${fileExportName}`, updatedStream); + await writeStream( + electron, + `${downloadDir}/${fileExportName}`, + updatedStream, + ); } } @@ -851,7 +679,7 @@ export const getArchivedFiles = (files: EnteFile[]) => { }; export const createTypedObjectURL = async (blob: Blob, fileName: string) => { - const type = await getFileType(new File([blob], fileName)); + const type = await detectFileTypeInfo(new File([blob], fileName)); return URL.createObjectURL(new Blob([blob], { type: type.mimeType })); }; @@ -864,15 +692,14 @@ export const getUserOwnedFiles = (files: EnteFile[]) => { }; // doesn't work on firefox -export const copyFileToClipboard = async (fileUrl: string) => { +export const copyFileToClipboard = async (fileURL: string) => { const canvas = document.createElement("canvas"); const canvasCTX = canvas.getContext("2d"); const image = new Image(); const blobPromise = new Promise((resolve, reject) => { - let timeout: NodeJS.Timeout = null; try { - image.setAttribute("src", fileUrl); + image.setAttribute("src", fileURL); image.onload = () => { canvas.width = image.width; canvas.height = image.height; @@ -884,26 +711,17 @@ export const copyFileToClipboard = async (fileUrl: string) => { "image/png", 1, ); - - clearTimeout(timeout); }; } catch (e) { - log.error("failed to copy to clipboard", e); + log.error("Failed to copy to clipboard", e); reject(e); - } finally { - clearTimeout(timeout); } - timeout = setTimeout( - () => reject(new Error("Operation timed out")), - WAIT_TIME_IMAGE_CONVERSION, - ); }); - const { ClipboardItem } = window; + const blob = await withTimeout(blobPromise, 30 * 1000); - await navigator.clipboard - .write([new ClipboardItem({ "image/png": blobPromise })]) - .catch((e) => log.error("failed to copy to clipboard", e)); + const { ClipboardItem } = window; + await navigator.clipboard.write([new ClipboardItem({ "image/png": blob })]); }; export function getLatestVersionFiles(files: EnteFile[]) { @@ -1072,16 +890,3 @@ const fixTimeHelper = async ( ) => { setFixCreationTimeAttributes({ files: selectedFiles }); }; - -const getFileObjectURL = ( - originalFileURL: string, - originalBlob: Blob, - convertedBlob: Blob, -) => { - const convertedURL = convertedBlob - ? convertedBlob === originalBlob - ? originalFileURL - : URL.createObjectURL(convertedBlob) - : null; - return convertedURL; -}; diff --git a/web/apps/photos/src/utils/file/livePhoto.ts b/web/apps/photos/src/utils/file/livePhoto.ts deleted file mode 100644 index 7d687217ce..0000000000 --- a/web/apps/photos/src/utils/file/livePhoto.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { FILE_TYPE } from "constants/file"; -import { getFileExtension } from "utils/file"; - -const IMAGE_EXTENSIONS = [ - "heic", - "heif", - "jpeg", - "jpg", - "png", - "gif", - "bmp", - "tiff", - "webp", -]; - -const VIDEO_EXTENSIONS = [ - "mov", - "mp4", - "m4v", - "avi", - "wmv", - "flv", - "mkv", - "webm", - "3gp", - "3g2", - "avi", - "ogv", - "mpg", - "mp", -]; - -export function getFileTypeFromExtensionForLivePhotoClustering( - filename: string, -) { - const extension = getFileExtension(filename)?.toLowerCase(); - if (IMAGE_EXTENSIONS.includes(extension)) { - return FILE_TYPE.IMAGE; - } else if (VIDEO_EXTENSIONS.includes(extension)) { - return FILE_TYPE.VIDEO; - } -} diff --git a/web/apps/photos/src/utils/machineLearning/config.ts b/web/apps/photos/src/utils/machineLearning/config.ts index 4d2030ca3e..0c25356aba 100644 --- a/web/apps/photos/src/utils/machineLearning/config.ts +++ b/web/apps/photos/src/utils/machineLearning/config.ts @@ -10,6 +10,7 @@ import mlIDbStorage, { ML_SYNC_CONFIG_NAME, ML_SYNC_JOB_CONFIG_NAME, } from "utils/storage/mlIDbStorage"; +import { isInternalUserForML } from "utils/user"; export async function getMLSyncJobConfig() { return mlIDbStorage.getConfig( @@ -23,10 +24,15 @@ export async function getMLSyncConfig() { } export async function getMLSearchConfig() { - return mlIDbStorage.getConfig( - ML_SEARCH_CONFIG_NAME, - DEFAULT_ML_SEARCH_CONFIG, - ); + if (isInternalUserForML()) { + return mlIDbStorage.getConfig( + ML_SEARCH_CONFIG_NAME, + DEFAULT_ML_SEARCH_CONFIG, + ); + } + // Force disabled for everyone else while we finalize it to avoid redundant + // reindexing for users. + return DEFAULT_ML_SEARCH_CONFIG; } export async function updateMLSyncJobConfig(newConfig: JobConfig) { diff --git a/web/apps/photos/src/utils/machineLearning/index.ts b/web/apps/photos/src/utils/machineLearning/index.ts index 2c199981a1..bc9ae39749 100644 --- a/web/apps/photos/src/utils/machineLearning/index.ts +++ b/web/apps/photos/src/utils/machineLearning/index.ts @@ -1,9 +1,9 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { decodeLivePhoto } from "@/media/live-photo"; import log from "@/next/log"; -import { FILE_TYPE } from "constants/file"; import PQueue from "p-queue"; import DownloadManager from "services/download"; import { getLocalFiles } from "services/fileService"; -import { decodeLivePhoto } from "services/livePhotoService"; import { EnteFile } from "types/file"; import { Dimensions } from "types/image"; import { @@ -134,11 +134,11 @@ async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) { if (file.metadata.fileType === FILE_TYPE.IMAGE) { return await getRenderableImage(file.metadata.title, fileBlob); } else { - const livePhoto = await decodeLivePhoto(file, fileBlob); - return await getRenderableImage( - livePhoto.imageNameTitle, - new Blob([livePhoto.image]), + const { imageFileName, imageData } = await decodeLivePhoto( + file.metadata.title, + fileBlob, ); + return await getRenderableImage(imageFileName, new Blob([imageData])); } } diff --git a/web/apps/photos/src/utils/native-fs.ts b/web/apps/photos/src/utils/native-fs.ts index 2ef8963022..27ebdd1c12 100644 --- a/web/apps/photos/src/utils/native-fs.ts +++ b/web/apps/photos/src/utils/native-fs.ts @@ -1,5 +1,5 @@ /** - * @file Utilities for native filesystem access. + * @file Utilities for native file system access. * * While they don't have any direct dependencies to our desktop app, they were * written for use by the code that runs in our desktop app. diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts index 7dba1acf9c..4ed9da753a 100644 --- a/web/apps/photos/src/utils/native-stream.ts +++ b/web/apps/photos/src/utils/native-stream.ts @@ -2,39 +2,104 @@ * @file Streaming IPC communication with the Node.js layer of our desktop app. * * NOTE: These functions only work when we're running in our desktop app. + * + * See: [Note: IPC streams]. */ +import type { Electron, ZipItem } from "@/next/types/ipc"; + +/** + * Stream the given file or zip entry from the user's local file system. + * + * This only works when we're running in our desktop app since it uses the + * "stream://" protocol handler exposed by our custom code in the Node.js layer. + * See: [Note: IPC streams]. + * + * To avoid accidentally invoking it in a non-desktop app context, it requires + * the {@link Electron} object as a parameter (even though it doesn't use it). + * + * @param pathOrZipItem Either the path on the file on the user's local file + * system whose contents we want to stream. Or a tuple containing the path to a + * zip file and the name of the entry within it. + * + * @return A ({@link Response}, size, lastModifiedMs) triple. + * + * * The response contains the contents of the file. In particular, the `body` + * {@link ReadableStream} property of this response can be used to read the + * files contents in a streaming manner. + * + * * The size is the size of the file that we'll be reading from disk. + * + * * The lastModifiedMs value is the last modified time of the file that we're + * reading, expressed as epoch milliseconds. + */ +export const readStream = async ( + _: Electron, + pathOrZipItem: string | ZipItem, +): Promise<{ response: Response; size: number; lastModifiedMs: number }> => { + let url: URL; + if (typeof pathOrZipItem == "string") { + const params = new URLSearchParams({ path: pathOrZipItem }); + url = new URL(`stream://read?${params.toString()}`); + } else { + const [zipPath, entryName] = pathOrZipItem; + const params = new URLSearchParams({ zipPath, entryName }); + url = new URL(`stream://read-zip?${params.toString()}`); + } + + const req = new Request(url, { method: "GET" }); + + const res = await fetch(req); + if (!res.ok) + throw new Error( + `Failed to read stream from ${url}: HTTP ${res.status}`, + ); + + const size = readNumericHeader(res, "Content-Length"); + const lastModifiedMs = readNumericHeader(res, "X-Last-Modified-Ms"); + + return { response: res, size, lastModifiedMs }; +}; + +const readNumericHeader = (res: Response, key: string) => { + const valueText = res.headers.get(key); + const value = +valueText; + if (isNaN(value)) + throw new Error( + `Expected a numeric ${key} when reading a stream response, instead got ${valueText}`, + ); + return value; +}; + /** * Write the given stream to a file on the local machine. * - * **This only works when we're running in our desktop app**. It uses the + * This only works when we're running in our desktop app since it uses the * "stream://" protocol handler exposed by our custom code in the Node.js layer. * See: [Note: IPC streams]. * + * To avoid accidentally invoking it in a non-desktop app context, it requires + * the {@link Electron} object as a parameter (even though it doesn't use it). + * * @param path The path on the local machine where to write the file to. + * * @param stream The stream which should be written into the file. - * */ -export const writeStream = async (path: string, stream: ReadableStream) => { - // TODO(MR): This doesn't currently work. - // - // Not sure what I'm doing wrong here; I've opened an issue upstream - // https://github.com/electron/electron/issues/41872 - // - // A gist with a minimal reproduction - // https://gist.github.com/mnvr/e08d9f4876fb8400b7615347b4d268eb - // - // Meanwhile, write the complete body in one go (this'll eventually run into - // memory failures with large files - just a temporary stopgap to get the - // code to work). + */ +export const writeStream = async ( + _: Electron, + path: string, + stream: ReadableStream, +) => { + const params = new URLSearchParams({ path }); + const url = new URL(`stream://write?${params.toString()}`); - /* // The duplex parameter needs to be set to 'half' when streaming requests. // // Currently browsers, and specifically in our case, since this code runs // only within our desktop (Electron) app, Chromium, don't support 'full' // duplex mode (i.e. streaming both the request and the response). // https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests - const req = new Request(`stream://write${path}`, { + const req = new Request(url, { // GET can't have a body method: "POST", body: stream, @@ -43,12 +108,6 @@ export const writeStream = async (path: string, stream: ReadableStream) => { // https://github.com/node-fetch/node-fetch/issues/1769. duplex: "half", }); - */ - - const req = new Request(`stream://write${path}`, { - method: "POST", - body: await new Response(stream).blob(), - }); const res = await fetch(req); if (!res.ok) diff --git a/web/apps/photos/src/utils/photoFrame/index.ts b/web/apps/photos/src/utils/photoFrame/index.ts index faf0679e7f..93b680149f 100644 --- a/web/apps/photos/src/utils/photoFrame/index.ts +++ b/web/apps/photos/src/utils/photoFrame/index.ts @@ -1,5 +1,5 @@ +import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; -import { FILE_TYPE } from "constants/file"; import { LivePhotoSourceURL, SourceURLs } from "services/download"; import { EnteFile } from "types/file"; import { SetSelectedState } from "types/gallery"; diff --git a/web/apps/photos/src/utils/storage/mlIDbStorage.ts b/web/apps/photos/src/utils/storage/mlIDbStorage.ts index 6dccbb89d1..766c3ac9a9 100644 --- a/web/apps/photos/src/utils/storage/mlIDbStorage.ts +++ b/web/apps/photos/src/utils/storage/mlIDbStorage.ts @@ -97,10 +97,8 @@ class MLIDbStorage { wasMLSearchEnabled = searchConfig.enabled; } } catch (e) { - log.info( - "Ignoring likely harmless error while trying to determine ML search status during migration", - e, - ); + // The configs store might not exist (e.g. during logout). + // Ignore. } log.info( `Previous ML database v${oldVersion} had ML search ${wasMLSearchEnabled ? "enabled" : "disabled"}`, @@ -146,7 +144,13 @@ class MLIDbStorage { .objectStore("configs") .add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME); } + /* + This'll go in version 5. Note that version 4 was never released, + but it was in main for a while, so we'll just skip it to avoid + breaking the upgrade path for people who ran the mainline. + */ if (oldVersion < 4) { + /* try { await tx .objectStore("configs") @@ -165,8 +169,8 @@ class MLIDbStorage { // the shipped implementation should have a more // deterministic migration. } + */ } - log.info( `ML DB upgraded from version ${oldVersion} to version ${newVersion}`, ); diff --git a/web/apps/photos/src/utils/ui/index.tsx b/web/apps/photos/src/utils/ui/index.tsx index 1b01116d33..8f4895ead5 100644 --- a/web/apps/photos/src/utils/ui/index.tsx +++ b/web/apps/photos/src/utils/ui/index.tsx @@ -1,5 +1,5 @@ import { ensureElectron } from "@/next/electron"; -import { AppUpdateInfo } from "@/next/types/ipc"; +import { AppUpdate } from "@/next/types/ipc"; import { logoutUser } from "@ente/accounts/services/user"; import { DialogBoxAttributes } from "@ente/shared/components/DialogBox/types"; import AutoAwesomeOutlinedIcon from "@mui/icons-material/AutoAwesomeOutlined"; @@ -55,7 +55,7 @@ export const getTrashFileMessage = (deleteFileHelper): DialogBoxAttributes => ({ export const getUpdateReadyToInstallMessage = ({ version, -}: AppUpdateInfo): DialogBoxAttributes => ({ +}: AppUpdate): DialogBoxAttributes => ({ icon: , title: t("UPDATE_AVAILABLE"), content: t("UPDATE_INSTALLABLE_MESSAGE"), @@ -73,7 +73,7 @@ export const getUpdateReadyToInstallMessage = ({ export const getUpdateAvailableForDownloadMessage = ({ version, -}: AppUpdateInfo): DialogBoxAttributes => ({ +}: AppUpdate): DialogBoxAttributes => ({ icon: , title: t("UPDATE_AVAILABLE"), content: t("UPDATE_AVAILABLE_MESSAGE"), diff --git a/web/apps/photos/src/utils/units.ts b/web/apps/photos/src/utils/units.ts new file mode 100644 index 0000000000..4cb875b4dc --- /dev/null +++ b/web/apps/photos/src/utils/units.ts @@ -0,0 +1,85 @@ +import { t } from "i18next"; + +const StorageUnits = ["B", "KB", "MB", "GB", "TB"]; + +/** + * Convert the given number of {@link bytes} to their equivalent GB string with + * {@link precision}. + * + * The returned string does not have the GB suffix. + */ +export const bytesInGB = (bytes: number, precision = 0): string => + (bytes / (1024 * 1024 * 1024)).toFixed(precision); + +/** + * Convert the given number of {@link bytes} to a user visible string in an + * appropriately sized unit. + * + * The returned string includes the (localized) unit suffix, e.g. "TB". + * + * @param precision Modify the number of digits after the decimal point. + * Defaults to 2. + */ +export function formattedByteSize(bytes: number, precision = 2): string { + if (bytes === 0 || isNaN(bytes)) { + return "0 MB"; + } + + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; + return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; +} + +interface FormattedStorageByteSizeOptions { + /** + * If `true` then round up the fractional quantity we obtain when dividing + * the number of bytes by the number of bytes in the unit that got chosen. + * + * The default behaviour is to take the ceiling. + */ + round?: boolean; +} + +/** + * Convert the given number of storage {@link bytes} to a user visible string in + * an appropriately sized unit. + * + * This differs from {@link formattedByteSize} in that while + * {@link formattedByteSize} is meant for arbitrary byte sizes, this function + * has a few additional beautification heuristics that we want to apply when + * displaying the "storage size" (in different contexts) as opposed to, say, a + * generic "file size". + * + * @param options + * + * @return A user visible string, including the localized unit suffix. + */ +export const formattedStorageByteSize = ( + bytes: number, + options?: FormattedStorageByteSizeOptions, +): string => { + if (bytes <= 0) { + return `0 ${t("STORAGE_UNITS.MB")}`; + } + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + + let quantity = bytes / Math.pow(1024, i); + let unit = StorageUnits[i]; + + if (quantity > 100 && unit !== "GB") { + quantity /= 1024; + unit = StorageUnits[i + 1]; + } + + quantity = Number(quantity.toFixed(1)); + + if (bytes >= 10 * 1024 * 1024 * 1024 /* 10 GB */) { + if (options?.round) { + quantity = Math.ceil(quantity); + } else { + quantity = Math.round(quantity); + } + } + + return `${quantity} ${t(`STORAGE_UNITS.${unit}`)}`; +}; diff --git a/web/apps/photos/src/utils/upload/index.ts b/web/apps/photos/src/utils/upload/index.ts deleted file mode 100644 index 643c931fe5..0000000000 --- a/web/apps/photos/src/utils/upload/index.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { FILE_TYPE } from "constants/file"; -import { - A_SEC_IN_MICROSECONDS, - DEFAULT_IMPORT_SUGGESTION, - PICKED_UPLOAD_TYPE, -} from "constants/upload"; -import isElectron from "is-electron"; -import { exportMetadataDirectoryName } from "services/export"; -import { EnteFile } from "types/file"; -import { - ElectronFile, - FileWithCollection, - ImportSuggestion, - Metadata, -} from "types/upload"; - -const TYPE_JSON = "json"; -const DEDUPE_COLLECTION = new Set(["icloud library", "icloudlibrary"]); - -export function findMatchingExistingFiles( - existingFiles: EnteFile[], - newFileMetadata: Metadata, -): EnteFile[] { - const matchingFiles: EnteFile[] = []; - for (const existingFile of existingFiles) { - if (areFilesSame(existingFile.metadata, newFileMetadata)) { - matchingFiles.push(existingFile); - } - } - return matchingFiles; -} - -export function shouldDedupeAcrossCollection(collectionName: string): boolean { - // using set to avoid unnecessary regex for removing spaces for each upload - return DEDUPE_COLLECTION.has(collectionName.toLocaleLowerCase()); -} - -export function areFilesSame( - existingFile: Metadata, - newFile: Metadata, -): boolean { - if (hasFileHash(existingFile) && hasFileHash(newFile)) { - return areFilesWithFileHashSame(existingFile, newFile); - } else { - /* - * The maximum difference in the creation/modification times of two similar files is set to 1 second. - * This is because while uploading files in the web - browsers and users could have set reduced - * precision of file times to prevent timing attacks and fingerprinting. - * Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision - */ - if ( - existingFile.fileType === newFile.fileType && - Math.abs(existingFile.creationTime - newFile.creationTime) < - A_SEC_IN_MICROSECONDS && - Math.abs(existingFile.modificationTime - newFile.modificationTime) < - A_SEC_IN_MICROSECONDS && - existingFile.title === newFile.title - ) { - return true; - } else { - return false; - } - } -} - -export function hasFileHash(file: Metadata) { - return file.hash || (file.imageHash && file.videoHash); -} - -export function areFilesWithFileHashSame( - existingFile: Metadata, - newFile: Metadata, -): boolean { - if ( - existingFile.fileType !== newFile.fileType || - existingFile.title !== newFile.title - ) { - return false; - } - if (existingFile.fileType === FILE_TYPE.LIVE_PHOTO) { - return ( - existingFile.imageHash === newFile.imageHash && - existingFile.videoHash === newFile.videoHash - ); - } else { - return existingFile.hash === newFile.hash; - } -} - -export function segregateMetadataAndMediaFiles( - filesWithCollectionToUpload: FileWithCollection[], -) { - const metadataJSONFiles: FileWithCollection[] = []; - const mediaFiles: FileWithCollection[] = []; - filesWithCollectionToUpload.forEach((fileWithCollection) => { - const file = fileWithCollection.file; - if (file.name.toLowerCase().endsWith(TYPE_JSON)) { - metadataJSONFiles.push(fileWithCollection); - } else { - mediaFiles.push(fileWithCollection); - } - }); - return { mediaFiles, metadataJSONFiles }; -} - -export function areFileWithCollectionsSame( - firstFile: FileWithCollection, - secondFile: FileWithCollection, -): boolean { - return firstFile.localID === secondFile.localID; -} - -export function getImportSuggestion( - uploadType: PICKED_UPLOAD_TYPE, - toUploadFiles: File[] | ElectronFile[], -): ImportSuggestion { - if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) { - return DEFAULT_IMPORT_SUGGESTION; - } - - const paths: string[] = toUploadFiles.map((file) => file["path"]); - const getCharCount = (str: string) => (str.match(/\//g) ?? []).length; - paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2)); - const firstPath = paths[0]; - const lastPath = paths[paths.length - 1]; - - const L = firstPath.length; - let i = 0; - const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/")); - const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/")); - - while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++; - let commonPathPrefix = firstPath.substring(0, i); - - if (commonPathPrefix) { - commonPathPrefix = commonPathPrefix.substring( - 0, - commonPathPrefix.lastIndexOf("/"), - ); - if (commonPathPrefix) { - commonPathPrefix = commonPathPrefix.substring( - commonPathPrefix.lastIndexOf("/") + 1, - ); - } - } - return { - rootFolderName: commonPathPrefix || null, - hasNestedFolders: firstFileFolder !== lastFileFolder, - hasRootLevelFileWithFolder: firstFileFolder === "", - }; -} - -// This function groups files that are that have the same parent folder into collections -// For Example, for user files have a directory structure like this -// a -// / | \ -// b j c -// /|\ / \ -// e f g h i -// -// The files will grouped into 3 collections. -// [a => [j], -// b => [e,f,g], -// c => [h, i]] -export function groupFilesBasedOnParentFolder( - toUploadFiles: File[] | ElectronFile[], -) { - const collectionNameToFilesMap = new Map(); - for (const file of toUploadFiles) { - const filePath = file["path"] as string; - - let folderPath = filePath.substring(0, filePath.lastIndexOf("/")); - // If the parent folder of a file is "metadata" - // we consider it to be part of the parent folder - // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json] - // they will both we grouped into the collection "a" - // This is cluster the metadata json files in the same collection as the file it is for - if (folderPath.endsWith(exportMetadataDirectoryName)) { - folderPath = folderPath.substring(0, folderPath.lastIndexOf("/")); - } - const folderName = folderPath.substring( - folderPath.lastIndexOf("/") + 1, - ); - if (!folderName?.length) { - throw Error("folderName can't be null"); - } - if (!collectionNameToFilesMap.has(folderName)) { - collectionNameToFilesMap.set(folderName, []); - } - collectionNameToFilesMap.get(folderName).push(file); - } - return collectionNameToFilesMap; -} - -export function filterOutSystemFiles(files: File[] | ElectronFile[]) { - if (files[0] instanceof File) { - const browserFiles = files as File[]; - return browserFiles.filter((file) => { - return !isSystemFile(file); - }); - } else { - const electronFiles = files as ElectronFile[]; - return electronFiles.filter((file) => { - return !isSystemFile(file); - }); - } -} - -export function isSystemFile(file: File | ElectronFile) { - return file.name.startsWith("."); -} diff --git a/web/apps/photos/src/utils/upload/uploadRetrier.ts b/web/apps/photos/src/utils/upload/uploadRetrier.ts deleted file mode 100644 index 3d314fd141..0000000000 --- a/web/apps/photos/src/utils/upload/uploadRetrier.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { sleep } from "@ente/shared/utils"; - -const retrySleepTimeInMilliSeconds = [2000, 5000, 10000]; - -export async function retryHTTPCall( - func: () => Promise, - checkForBreakingError?: (error) => void, -): Promise { - const retrier = async ( - func: () => Promise, - attemptNumber: number = 0, - ) => { - try { - const resp = await func(); - return resp; - } catch (e) { - if (checkForBreakingError) { - checkForBreakingError(e); - } - if (attemptNumber < retrySleepTimeInMilliSeconds.length) { - await sleep(retrySleepTimeInMilliSeconds[attemptNumber]); - return await retrier(func, attemptNumber + 1); - } else { - throw e; - } - } - }; - return await retrier(func); -} diff --git a/web/apps/photos/src/utils/user/index.ts b/web/apps/photos/src/utils/user/index.ts index 17551014d0..68ffc9bbd7 100644 --- a/web/apps/photos/src/utils/user/index.ts +++ b/web/apps/photos/src/utils/user/index.ts @@ -1,4 +1,5 @@ import { getData, LS_KEYS } from "@ente/shared/storage/localStorage"; +import type { User } from "@ente/shared/user/types"; import { UserDetails } from "types/user"; export function getLocalUserDetails(): UserDetails { @@ -9,7 +10,12 @@ export const isInternalUser = () => { const userEmail = getData(LS_KEYS.USER)?.email; if (!userEmail) return false; - return ( - userEmail.endsWith("@ente.io") || userEmail === "kr.anand619@gmail.com" - ); + return userEmail.endsWith("@ente.io"); +}; + +export const isInternalUserForML = () => { + const userId = (getData(LS_KEYS.USER) as User)?.id; + if (userId == 1) return true; + + return isInternalUser(); }; diff --git a/web/apps/photos/src/worker/convert.worker.ts b/web/apps/photos/src/worker/convert.worker.ts deleted file mode 100644 index d8ab22d3ae..0000000000 --- a/web/apps/photos/src/worker/convert.worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import * as Comlink from "comlink"; -import HeicConvert from "heic-convert"; -import { getUint8ArrayView } from "services/readerService"; - -export class DedicatedConvertWorker { - async convertHEICToJPEG(fileBlob: Blob) { - return convertHEICToJPEG(fileBlob); - } -} - -Comlink.expose(DedicatedConvertWorker, self); - -/** - * Convert a HEIC file to a JPEG file. - * - * Both the input and output are blobs. - */ -export const convertHEICToJPEG = async (heicBlob: Blob): Promise => { - const filedata = await getUint8ArrayView(heicBlob); - const result = await HeicConvert({ buffer: filedata, format: "JPEG" }); - const convertedFileData = new Uint8Array(result); - const convertedFileBlob = new Blob([convertedFileData]); - return convertedFileBlob; -}; diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts index d3f503abb9..946a2090f0 100644 --- a/web/apps/photos/src/worker/ffmpeg.worker.ts +++ b/web/apps/photos/src/worker/ffmpeg.worker.ts @@ -1,15 +1,117 @@ -import * as Comlink from "comlink"; -import { WasmFFmpeg } from "services/wasm/ffmpeg"; +import log from "@/next/log"; +import { withTimeout } from "@ente/shared/utils"; +import QueueProcessor from "@ente/shared/utils/queueProcessor"; +import { expose } from "comlink"; +import { + ffmpegPathPlaceholder, + inputPathPlaceholder, + outputPathPlaceholder, +} from "constants/ffmpeg"; +import { FFmpeg, createFFmpeg } from "ffmpeg-wasm"; export class DedicatedFFmpegWorker { - wasmFFmpeg: WasmFFmpeg; + private ffmpeg: FFmpeg; + private ffmpegTaskQueue = new QueueProcessor(); + constructor() { - this.wasmFFmpeg = new WasmFFmpeg(); + this.ffmpeg = createFFmpeg({ + corePath: "/js/ffmpeg/ffmpeg-core.js", + mt: false, + }); } - run(cmd, inputFile, outputFileName, dontTimeout) { - return this.wasmFFmpeg.run(cmd, inputFile, outputFileName, dontTimeout); + /** + * Execute a FFmpeg {@link command} on {@link blob}. + * + * This is a sibling of {@link ffmpegExec} exposed by the desktop app in + * `ipc.ts`. See [Note: FFmpeg in Electron]. + */ + async exec( + command: string[], + blob: Blob, + outputFileExtension: string, + timeoutMs, + ): Promise { + if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load(); + + const go = () => + ffmpegExec(this.ffmpeg, command, outputFileExtension, blob); + + const request = this.ffmpegTaskQueue.queueUpRequest(() => + timeoutMs ? withTimeout(go(), timeoutMs) : go(), + ); + + return await request.promise; } } -Comlink.expose(DedicatedFFmpegWorker, self); +expose(DedicatedFFmpegWorker, self); + +const ffmpegExec = async ( + ffmpeg: FFmpeg, + command: string[], + outputFileExtension: string, + blob: Blob, +) => { + const inputPath = randomPrefix(); + const outputSuffix = outputFileExtension ? "." + outputFileExtension : ""; + const outputPath = randomPrefix() + outputSuffix; + + const cmd = substitutePlaceholders(command, inputPath, outputPath); + + const inputData = new Uint8Array(await blob.arrayBuffer()); + + try { + const startTime = Date.now(); + + ffmpeg.FS("writeFile", inputPath, inputData); + await ffmpeg.run(...cmd); + + const result = ffmpeg.FS("readFile", outputPath); + + const ms = Math.round(Date.now() - startTime); + log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`); + return result; + } finally { + try { + ffmpeg.FS("unlink", inputPath); + } catch (e) { + log.error(`Failed to remove input ${inputPath}`, e); + } + try { + ffmpeg.FS("unlink", outputPath); + } catch (e) { + log.error(`Failed to remove output ${outputPath}`, e); + } + } +}; + +/** Generate a random string suitable for being used as a file name prefix */ +const randomPrefix = () => { + const alphabet = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + + let result = ""; + for (let i = 0; i < 10; i++) + result += alphabet[Math.floor(Math.random() * alphabet.length)]; + return result; +}; + +const substitutePlaceholders = ( + command: string[], + inputFilePath: string, + outputFilePath: string, +) => + command + .map((segment) => { + if (segment == ffmpegPathPlaceholder) { + return undefined; + } else if (segment == inputPathPlaceholder) { + return inputFilePath; + } else if (segment == outputPathPlaceholder) { + return outputFilePath; + } else { + return segment; + } + }) + .filter((c) => !!c); diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/apps/photos/src/worker/heic-convert.worker.ts new file mode 100644 index 0000000000..96a1a94684 --- /dev/null +++ b/web/apps/photos/src/worker/heic-convert.worker.ts @@ -0,0 +1,22 @@ +import { expose } from "comlink"; +import HeicConvert from "heic-convert"; + +export class DedicatedHEICConvertWorker { + async heicToJPEG(heicBlob: Blob) { + return heicToJPEG(heicBlob); + } +} + +expose(DedicatedHEICConvertWorker, self); + +/** + * Convert a HEIC file to a JPEG file. + * + * Both the input and output are blobs. + */ +export const heicToJPEG = async (heicBlob: Blob): Promise => { + const buffer = new Uint8Array(await heicBlob.arrayBuffer()); + const result = await HeicConvert({ buffer, format: "JPEG" }); + const convertedData = new Uint8Array(result); + return new Blob([convertedData]); +}; diff --git a/web/apps/photos/tests/upload.test.ts b/web/apps/photos/tests/upload.test.ts index 6e58cf0c2d..c4d76d5240 100644 --- a/web/apps/photos/tests/upload.test.ts +++ b/web/apps/photos/tests/upload.test.ts @@ -1,13 +1,13 @@ -import { tryToParseDateTime } from "@ente/shared/time"; -import { FILE_TYPE } from "constants/file"; +import { FILE_TYPE } from "@/media/file-type"; import { getLocalCollections } from "services/collectionService"; import { getLocalFiles } from "services/fileService"; +import { tryToParseDateTime } from "services/upload/date"; import { MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT, getClippedMetadataJSONMapKeyForFile, getMetadataJSONMapKeyForFile, getMetadataJSONMapKeyForJSON, -} from "services/upload/metadataService"; +} from "services/upload/takeout"; import { getUserDetailsV2 } from "services/userService"; import { groupFilesBasedOnCollectionID } from "utils/file"; diff --git a/web/apps/photos/tests/zip-file-reading.test.ts b/web/apps/photos/tests/zip-file-reading.test.ts deleted file mode 100644 index 6ac20bfeed..0000000000 --- a/web/apps/photos/tests/zip-file-reading.test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload"; -import { getElectronFileStream, getFileStream } from "services/readerService"; -import { DataStream } from "types/upload"; -import { getImportSuggestion } from "utils/upload"; - -// This was for used to verify that converting from the browser readable stream -// to the node readable stream correctly handles files that align on the 4 MB -// data boundary. This expects a zip file containing random files of various -// sizes starting from 1M to 20M. -export const testZipFileReading = async () => { - try { - const electron = globalThis.electron; - if (!electron) { - console.log("testZipFileReading Check is for desktop only"); - return; - } - if (!process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH) { - throw Error( - "upload test failed NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH missing", - ); - } - const files = await electron.getElectronFilesFromGoogleZip( - process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH, - ); - if (!files?.length) { - throw Error( - `testZipFileReading Check failed ❌ - No files selected`, - ); - } - console.log("test zip file reading check started"); - let i = 0; - for (const file of files) { - i++; - let filedata: DataStream; - if (file instanceof File) { - filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); - } else { - filedata = await getElectronFileStream( - file, - FILE_READER_CHUNK_SIZE, - ); - } - const streamReader = filedata.stream.getReader(); - for (let i = 0; i < filedata.chunkCount; i++) { - const { done } = await streamReader.read(); - if (done) { - throw Error( - `testZipFileReading Check failed ❌ - ${getFileNameSize( - file, - )} less than expected chunks, expected: ${ - filedata.chunkCount - }, got ${i - 1}`, - ); - } - } - const { done } = await streamReader.read(); - - if (!done) { - throw Error( - `testZipFileReading Check failed ❌ - ${getFileNameSize( - file, - )} more than expected chunks, expected: ${ - filedata.chunkCount - }`, - ); - } - console.log(`${i}/${files.length} passed ✅`); - } - console.log("test zip file reading check passed ✅"); - } catch (e) { - console.log(e); - } -}; - -// This was used when fixing a bug around handling a zip file that has a photo -// at the root. -export const testZipWithRootFileReadingTest = async () => { - try { - const electron = globalThis.electron; - if (!electron) { - console.log("testZipFileReading Check is for desktop only"); - return; - } - if (!process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH) { - throw Error( - "upload test failed NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH missing", - ); - } - const files = await electron.getElectronFilesFromGoogleZip( - process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH, - ); - - const importSuggestion = getImportSuggestion( - PICKED_UPLOAD_TYPE.ZIPS, - files, - ); - if (!importSuggestion.rootFolderName) { - throw Error( - `testZipWithRootFileReadingTest Check failed ❌ - rootFolderName is missing`, - ); - } - console.log("testZipWithRootFileReadingTest passed ✅"); - } catch (e) { - console.log(e); - } -}; diff --git a/web/apps/staff/src/App.tsx b/web/apps/staff/src/App.tsx index f8984fecbd..01d79b18cc 100644 --- a/web/apps/staff/src/App.tsx +++ b/web/apps/staff/src/App.tsx @@ -9,7 +9,7 @@ export const App: React.FC = () => { .then((userDetails) => { console.log("Fetched user details", userDetails); }) - .catch((e) => { + .catch((e: unknown) => { console.error("Failed to fetch user details", e); }); }; diff --git a/web/docs/dependencies.md b/web/docs/dependencies.md index d0660bb3e8..83c4c16c84 100644 --- a/web/docs/dependencies.md +++ b/web/docs/dependencies.md @@ -110,7 +110,7 @@ with Next.js. For more details, see [translations.md](translations.md). -## Meta Frameworks +## Meta frameworks ### Next.js @@ -131,7 +131,17 @@ It is more lower level than Next, but the bells and whistles it doesn't have are the bells and whistles (and the accompanying complexity) that we don't need in some cases. -## Photos +## Media + +- ["jszip"](https://github.com/Stuk/jszip) is used for reading zip files in + JavaScript (Live photos are zip files under the hood). + +- ["file-type"](https://github.com/sindresorhus/file-type) is used for MIME + type detection. We are at an old version 16.5.4 because v17 onwards the + package became ESM only - for our limited use case, the custom Webpack + configuration that entails is not worth the upgrade. + +## Photos app specific ### Misc diff --git a/web/docs/storage.md b/web/docs/storage.md index d01654b234..9f19a6a46d 100644 --- a/web/docs/storage.md +++ b/web/docs/storage.md @@ -34,6 +34,6 @@ meant for larger, tabular data. OPFS is used for caching entire files when we're running under Electron (the Web Cache API is used in the browser). -As it name suggests, it is an entire filesystem, private for us ("origin"). In +As it name suggests, it is an entire file system, private for us ("origin"). In is not undbounded though, and the storage is not guaranteed to be persistent (at least with the APIs we use), hence the cache designation. diff --git a/web/package.json b/web/package.json index 2d5919eb1a..647ee3ba3a 100644 --- a/web/package.json +++ b/web/package.json @@ -27,8 +27,8 @@ "dev:payments": "yarn workspace payments dev", "dev:photos": "yarn workspace photos next dev", "dev:staff": "yarn workspace staff dev", - "lint": "yarn prettier --check . && yarn workspaces run eslint --report-unused-disable-directives .", - "lint-fix": "yarn prettier --write . && yarn workspaces run eslint --fix .", + "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives .", + "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix .", "preview": "yarn preview:photos", "preview:accounts": "yarn build:accounts && python3 -m http.server -d apps/accounts/out 3001", "preview:auth": "yarn build:auth && python3 -m http.server -d apps/auth/out 3000", diff --git a/web/packages/accounts/components/ChangeEmail.tsx b/web/packages/accounts/components/ChangeEmail.tsx index 3f47be8a11..ec647e6712 100644 --- a/web/packages/accounts/components/ChangeEmail.tsx +++ b/web/packages/accounts/components/ChangeEmail.tsx @@ -6,7 +6,7 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer"; import LinkButton from "@ente/shared/components/LinkButton"; import SubmitButton from "@ente/shared/components/SubmitButton"; import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage"; -import { sleep } from "@ente/shared/utils"; +import { wait } from "@ente/shared/utils"; import { Alert, Box, TextField } from "@mui/material"; import { Formik, FormikHelpers } from "formik"; import { t } from "i18next"; @@ -59,7 +59,7 @@ function ChangeEmailForm({ appName }: PageProps) { setData(LS_KEYS.USER, { ...getData(LS_KEYS.USER), email }); setLoading(false); setSuccess(true); - await sleep(1000); + await wait(1000); goToApp(); } catch (e) { setLoading(false); diff --git a/web/packages/accounts/components/two-factor/VerifyForm.tsx b/web/packages/accounts/components/two-factor/VerifyForm.tsx index 810a6c010f..b7f7fc2781 100644 --- a/web/packages/accounts/components/two-factor/VerifyForm.tsx +++ b/web/packages/accounts/components/two-factor/VerifyForm.tsx @@ -9,7 +9,7 @@ import { VerticallyCentered, } from "@ente/shared/components/Container"; import SubmitButton from "@ente/shared/components/SubmitButton"; -import { sleep } from "@ente/shared/utils"; +import { wait } from "@ente/shared/utils"; import { Box, Typography } from "@mui/material"; interface formValues { @@ -33,7 +33,7 @@ export default function VerifyTwoFactor(props: Props) { const markSuccessful = async () => { setWaiting(false); setSuccess(true); - await sleep(1000); + await wait(1000); }; const submitForm = async ( diff --git a/web/packages/accounts/services/user.ts b/web/packages/accounts/services/user.ts index fb0e1c9290..8f6d6609a1 100644 --- a/web/packages/accounts/services/user.ts +++ b/web/packages/accounts/services/user.ts @@ -40,10 +40,18 @@ export const logoutUser = async () => { } catch (e) { log.error("Ignoring error when clearing files", e); } - try { - globalThis.electron?.clearStores(); - } catch (e) { - log.error("Ignoring error when clearing electron stores", e); + const electron = globalThis.electron; + if (electron) { + try { + await electron.watch.reset(); + } catch (e) { + log.error("Ignoring error when resetting native folder watches", e); + } + try { + await electron.clearStores(); + } catch (e) { + log.error("Ignoring error when clearing native stores", e); + } } try { eventBus.emit(Events.LOGOUT); diff --git a/web/packages/build-config/eslintrc-base.js b/web/packages/build-config/eslintrc-base.js index b302be36d4..3e65638c1b 100644 --- a/web/packages/build-config/eslintrc-base.js +++ b/web/packages/build-config/eslintrc-base.js @@ -10,4 +10,20 @@ module.exports = { parserOptions: { project: true }, parser: "@typescript-eslint/parser", ignorePatterns: [".eslintrc.js"], + rules: { + /* Allow numbers to be used in template literals */ + "@typescript-eslint/restrict-template-expressions": [ + "error", + { + allowNumber: true, + }, + ], + /* Allow void expressions as the entire body of an arrow function */ + "@typescript-eslint/no-confusing-void-expression": [ + "error", + { + ignoreArrowShorthand: true, + }, + ], + }, }; diff --git a/web/packages/media/.eslintrc.js b/web/packages/media/.eslintrc.js new file mode 100644 index 0000000000..348075cd4f --- /dev/null +++ b/web/packages/media/.eslintrc.js @@ -0,0 +1,3 @@ +module.exports = { + extends: ["@/build-config/eslintrc-next"], +}; diff --git a/web/packages/media/README.md b/web/packages/media/README.md new file mode 100644 index 0000000000..70d6424f29 --- /dev/null +++ b/web/packages/media/README.md @@ -0,0 +1,11 @@ +## @/media + +A package for sharing code between our apps that show media (photos, videos). + +Specifically, this is the intersection of code required by both the photos and +cast apps. + +### Packaging + +This (internal) package exports a React TypeScript library. We rely on the +importing project to transpile and bundle it. diff --git a/web/packages/media/file-type.ts b/web/packages/media/file-type.ts new file mode 100644 index 0000000000..b180918cda --- /dev/null +++ b/web/packages/media/file-type.ts @@ -0,0 +1,63 @@ +export enum FILE_TYPE { + IMAGE, + VIDEO, + LIVE_PHOTO, + OTHERS, +} + +export interface FileTypeInfo { + fileType: FILE_TYPE; + /** + * A lowercased, standardized extension for files of the current type. + * + * TODO(MR): This in not valid for LIVE_PHOTO. + */ + extension: string; + mimeType?: string; + imageType?: string; + videoType?: string; +} + +// list of format that were missed by type-detection for some files. +export const KnownFileTypeInfos: FileTypeInfo[] = [ + { fileType: FILE_TYPE.IMAGE, extension: "jpeg", mimeType: "image/jpeg" }, + { fileType: FILE_TYPE.IMAGE, extension: "jpg", mimeType: "image/jpeg" }, + { fileType: FILE_TYPE.VIDEO, extension: "webm", mimeType: "video/webm" }, + { fileType: FILE_TYPE.VIDEO, extension: "mod", mimeType: "video/mpeg" }, + { fileType: FILE_TYPE.VIDEO, extension: "mp4", mimeType: "video/mp4" }, + { fileType: FILE_TYPE.IMAGE, extension: "gif", mimeType: "image/gif" }, + { fileType: FILE_TYPE.VIDEO, extension: "dv", mimeType: "video/x-dv" }, + { + fileType: FILE_TYPE.VIDEO, + extension: "wmv", + mimeType: "video/x-ms-asf", + }, + { + fileType: FILE_TYPE.VIDEO, + extension: "hevc", + mimeType: "video/hevc", + }, + { + fileType: FILE_TYPE.IMAGE, + extension: "raf", + mimeType: "image/x-fuji-raf", + }, + { + fileType: FILE_TYPE.IMAGE, + extension: "orf", + mimeType: "image/x-olympus-orf", + }, + + { + fileType: FILE_TYPE.IMAGE, + extension: "crw", + mimeType: "image/x-canon-crw", + }, + { + fileType: FILE_TYPE.VIDEO, + extension: "mov", + mimeType: "video/quicktime", + }, +]; + +export const KnownNonMediaFileExtensions = ["xmp", "html", "txt"]; diff --git a/web/packages/media/file.ts b/web/packages/media/file.ts new file mode 100644 index 0000000000..c840500498 --- /dev/null +++ b/web/packages/media/file.ts @@ -0,0 +1,4 @@ +import type { Metadata } from "./types/file"; + +export const hasFileHash = (file: Metadata) => + !!file.hash || (!!file.imageHash && !!file.videoHash); diff --git a/web/packages/media/formats.ts b/web/packages/media/formats.ts new file mode 100644 index 0000000000..24d2c7c877 --- /dev/null +++ b/web/packages/media/formats.ts @@ -0,0 +1,26 @@ +/** + * Image file extensions that we know the browser is unlikely to have native + * support for. + */ +const nonWebImageFileExtensions = [ + "heic", + "rw2", + "tiff", + "arw", + "cr3", + "cr2", + "raf", + "nef", + "psd", + "dng", + "tif", +]; + +/** + * Return `true` if {@link extension} is from amongst a known set of image file + * extensions that we know that the browser is unlikely to have native support + * for. If we want to display such files in the browser, we'll need to convert + * them to some other format first. + */ +export const isNonWebImageFileExtension = (extension: string) => + nonWebImageFileExtensions.includes(extension.toLowerCase()); diff --git a/web/packages/media/live-photo.ts b/web/packages/media/live-photo.ts new file mode 100644 index 0000000000..35a186a418 --- /dev/null +++ b/web/packages/media/live-photo.ts @@ -0,0 +1,144 @@ +import { + fileNameFromComponents, + lowercaseExtension, + nameAndExtension, +} from "@/next/file"; +import JSZip from "jszip"; +import { FILE_TYPE } from "./file-type"; + +const potentialImageExtensions = [ + "heic", + "heif", + "jpeg", + "jpg", + "png", + "gif", + "bmp", + "tiff", + "webp", +]; + +const potentialVideoExtensions = [ + "mov", + "mp4", + "m4v", + "avi", + "wmv", + "flv", + "mkv", + "webm", + "3gp", + "3g2", + "avi", + "ogv", + "mpg", + "mp", +]; + +/** + * Use the file extension of the given {@link fileName} to deduce if is is + * potentially the image or the video part of a Live Photo. + */ +export const potentialFileTypeFromExtension = ( + fileName: string, +): FILE_TYPE | undefined => { + const ext = lowercaseExtension(fileName); + if (!ext) return undefined; + + if (potentialImageExtensions.includes(ext)) return FILE_TYPE.IMAGE; + else if (potentialVideoExtensions.includes(ext)) return FILE_TYPE.VIDEO; + else return undefined; +}; + +/** + * An in-memory representation of a live photo. + */ +interface LivePhoto { + imageFileName: string; + imageData: Uint8Array; + videoFileName: string; + videoData: Uint8Array; +} + +/** + * Convert a binary serialized representation of a live photo to an in-memory + * {@link LivePhoto}. + * + * A live photo is a zip file containing two files - an image and a video. This + * functions reads that zip file (blob), and return separate bytes (and + * filenames) for the image and video parts. + * + * @param fileName The name of the overall live photo. Both the image and video + * parts of the decompressed live photo use this as their name, combined with + * their original extensions. + * + * @param zipBlob A blob contained the zipped data (i.e. the binary serialized + * live photo). + */ +export const decodeLivePhoto = async ( + fileName: string, + zipBlob: Blob, +): Promise => { + let imageFileName, videoFileName: string | undefined; + let imageData, videoData: Uint8Array | undefined; + + const [name] = nameAndExtension(fileName); + const zip = await JSZip.loadAsync(zipBlob, { createFolders: true }); + + for (const zipFileName in zip.files) { + if (zipFileName.startsWith("image")) { + const [, imageExt] = nameAndExtension(zipFileName); + imageFileName = fileNameFromComponents([name, imageExt]); + imageData = await zip.files[zipFileName]?.async("uint8array"); + } else if (zipFileName.startsWith("video")) { + const [, videoExt] = nameAndExtension(zipFileName); + videoFileName = fileNameFromComponents([name, videoExt]); + videoData = await zip.files[zipFileName]?.async("uint8array"); + } + } + + if (!imageFileName || !imageData) + throw new Error( + `Decoded live photo ${fileName} does not have an image`, + ); + + if (!videoFileName || !videoData) + throw new Error( + `Decoded live photo ${fileName} does not have an image`, + ); + + return { imageFileName, imageData, videoFileName, videoData }; +}; + +/** Variant of {@link LivePhoto}, but one that allows files and data. */ +interface EncodeLivePhotoInput { + imageFileName: string; + imageFileOrData: File | Uint8Array; + videoFileName: string; + videoFileOrData: File | Uint8Array; +} + +/** + * Return a binary serialized representation of a live photo. + * + * This function takes the (in-memory) image and video data from the + * {@link livePhoto} object, writes them to a zip file (using the respective + * filenames), and returns the {@link Uint8Array} that represent the bytes of + * this zip file. + * + * @param livePhoto The in-mem photo to serialized. + */ +export const encodeLivePhoto = async ({ + imageFileName, + imageFileOrData, + videoFileName, + videoFileOrData, +}: EncodeLivePhotoInput) => { + const [, imageExt] = nameAndExtension(imageFileName); + const [, videoExt] = nameAndExtension(videoFileName); + + const zip = new JSZip(); + zip.file(fileNameFromComponents(["image", imageExt]), imageFileOrData); + zip.file(fileNameFromComponents(["video", videoExt]), videoFileOrData); + return await zip.generateAsync({ type: "uint8array" }); +}; diff --git a/web/packages/media/package.json b/web/packages/media/package.json new file mode 100644 index 0000000000..8be7e8bb6c --- /dev/null +++ b/web/packages/media/package.json @@ -0,0 +1,10 @@ +{ + "name": "@/media", + "version": "0.0.0", + "private": true, + "dependencies": { + "@/next": "*", + "file-type": "16.5.4", + "jszip": "^3.10" + } +} diff --git a/web/packages/media/tsconfig.json b/web/packages/media/tsconfig.json new file mode 100644 index 0000000000..f29c348113 --- /dev/null +++ b/web/packages/media/tsconfig.json @@ -0,0 +1,5 @@ +{ + "extends": "@/build-config/tsconfig-typecheck.json", + /* Typecheck all files with the given extensions (here or in subfolders) */ + "include": ["**/*.ts", "**/*.tsx"] +} diff --git a/web/packages/media/types/file.ts b/web/packages/media/types/file.ts new file mode 100644 index 0000000000..b6314b7cdd --- /dev/null +++ b/web/packages/media/types/file.ts @@ -0,0 +1,73 @@ +import type { FILE_TYPE } from "../file-type"; + +/** + * Information about the file that never changes post upload. + * + * [Note: Metadatum] + * + * There are three different sources of metadata relating to a file. + * + * 1. Metadata + * 2. Magic Metadata + * 3. Public Magic Metadata + * + * The names of API entities are such for historical reasons, but we can think + * of them as: + * + * 1. Metadata + * 2. Private Mutable Metadata + * 3. Shared Mutable Metadata + * + * Metadata is the original metadata that we attached to the file when it was + * uploaded. It is immutable, and it never changes. + * + * Later on, the user might make changes to the file's metadata. Since the + * metadata is immutable, we need a place to keep these mutations. + * + * Some mutations are "private" to the user who owns the file. For example, the + * user might archive the file. Such modifications get written to (2), Private + * Mutable Metadata. + * + * Other mutations are "public" across all the users with whom the file is + * shared. For example, if the user (owner) edits the name of the file, all + * people with whom this file is shared can see the new edited name. Such + * modifications get written to (3), Shared Mutable Metadata. + * + * When the client needs to show a file, it needs to "merge" in 2 or 3 of these + * sources. + * + * - When showing a shared file, (1) and (3) are merged, with changes from (3) + * taking precedence, to obtain the full metadata pertinent to the file. + * - When showing a normal (un-shared) file, (1), (2) and (3) are merged, with + * changes from (2) and (3) taking precedence, to obtain the full metadata. + * (2) and (3) have no intersection of keys, so they can be merged in any + * order. + * + * While these sources can be conceptually merged, it is important for the + * client to also retain the original sources unchanged. This is because the + * metadatas (any of the three) might have keys that the current client does not + * yet understand, so when updating some key, say filename in (3), it should + * only edit the key it knows about but retain the rest of the source JSON + * unchanged. + */ +export interface Metadata { + /** + * The file name. + * + * See: [Note: File name for local EnteFile objects] + */ + title: string; + creationTime: number; + modificationTime: number; + latitude: number; + longitude: number; + /** The "Ente" file type. */ + fileType: FILE_TYPE; + hasStaticThumbnail?: boolean; + hash?: string; + imageHash?: string; + videoHash?: string; + localID?: number; + version?: number; + deviceFolder?: string; +} diff --git a/web/packages/next/blob-cache.ts b/web/packages/next/blob-cache.ts index 8789a50786..e6c3734df2 100644 --- a/web/packages/next/blob-cache.ts +++ b/web/packages/next/blob-cache.ts @@ -50,8 +50,6 @@ export type BlobCacheNamespace = (typeof blobCacheNames)[number]; * ([the WebKit bug](https://bugs.webkit.org/show_bug.cgi?id=231706)), so it's * not trivial to use this as a full on replacement of the Web Cache in the * browser. So for now we go with this split implementation. - * - * See also: [Note: Increased disk cache for the desktop app]. */ export interface BlobCache { /** @@ -113,6 +111,10 @@ export const openCache = async ( * * await blob.arrayBuffer() * + * To convert from a Blob to Uint8Array, chain the two steps + * + * new Uint8Array(await blob.arrayBuffer()) + * * To convert from an ArrayBuffer or Uint8Array to Blob * * new Blob([arrayBuffer, andOrAnyArray, andOrstring]) diff --git a/web/packages/next/file.ts b/web/packages/next/file.ts index b69fece505..bd2c043930 100644 --- a/web/packages/next/file.ts +++ b/web/packages/next/file.ts @@ -1,32 +1,82 @@ -import type { ElectronFile } from "./types/file"; +/** + * The two parts of a file name - the name itself, and an (optional) extension. + * + * The extension does not include the dot. + */ +type FileNameComponents = [name: string, extension: string | undefined]; /** * Split a filename into its components - the name itself, and the extension (if * any) - returning both. The dot is not included in either. * * For example, `foo-bar.png` will be split into ["foo-bar", "png"]. + * + * See {@link fileNameFromComponents} for the inverse operation. */ -export const nameAndExtension = ( - fileName: string, -): [string, string | undefined] => { +export const nameAndExtension = (fileName: string): FileNameComponents => { const i = fileName.lastIndexOf("."); + // No extension if (i == -1) return [fileName, undefined]; - else return [fileName.slice(0, i), fileName.slice(i + 1)]; + // A hidden file without an extension, e.g. ".gitignore" + if (i == 0) return [fileName, undefined]; + // Both components present, just omit the dot. + return [fileName.slice(0, i), fileName.slice(i + 1)]; }; -export function getFileNameSize(file: File | ElectronFile) { - return `${file.name}_${convertBytesToHumanReadable(file.size)}`; -} +/** + * If the file name or path has an extension, return a lowercased version of it. + * + * This is handy when comparing the extension to a known set without worrying + * about case sensitivity. + * + * See {@link nameAndExtension} for its more generic sibling. + */ +export const lowercaseExtension = ( + fileNameOrPath: string, +): string | undefined => { + // We rely on the implementation of nameAndExtension using lastIndexOf to + // allow us to also work on paths. + const [, ext] = nameAndExtension(fileNameOrPath); + return ext?.toLowerCase(); +}; -export function convertBytesToHumanReadable( - bytes: number, - precision = 2, -): string { - if (bytes === 0 || isNaN(bytes)) { - return "0 MB"; +/** + * Construct a file name from its components (name and extension). + * + * Inverse of {@link nameAndExtension}. + */ +export const fileNameFromComponents = (components: FileNameComponents) => + components.filter((x) => !!x).join("."); + +/** + * Return the file name portion from the given {@link path}. + * + * This tries to emulate the UNIX `basename` command. In particular, any + * trailing slashes on the path are trimmed, so this function can be used to get + * the name of the directory too. + * + * The path is assumed to use POSIX separators ("/"). + */ +export const basename = (path: string) => { + const pathComponents = path.split("/"); + for (let i = pathComponents.length - 1; i >= 0; i--) + if (pathComponents[i] !== "") return pathComponents[i]; + return path; +}; + +/** + * Return the directory portion from the given {@link path}. + * + * This tries to emulate the UNIX `dirname` command. In particular, any trailing + * slashes on the path are trimmed, so this function can be used to get the path + * leading up to a directory too. + * + * The path is assumed to use POSIX separators ("/"). + */ +export const dirname = (path: string) => { + const pathComponents = path.split("/"); + while (pathComponents.pop() == "") { + /* no-op */ } - - const i = Math.floor(Math.log(bytes) / Math.log(1024)); - const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; - return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; -} + return pathComponents.join("/"); +}; diff --git a/web/packages/next/i18n.ts b/web/packages/next/i18n.ts index 913ecf746e..cdc60e27ca 100644 --- a/web/packages/next/i18n.ts +++ b/web/packages/next/i18n.ts @@ -22,6 +22,7 @@ import { object, string } from "yup"; export const supportedLocales = [ "en-US" /* English */, "fr-FR" /* French */, + "de-DE" /* German */, "zh-CN" /* Simplified Chinese */, "nl-NL" /* Dutch */, "es-ES" /* Spanish */, @@ -209,6 +210,8 @@ const closestSupportedLocale = ( return "en-US"; } else if (ls.startsWith("fr")) { return "fr-FR"; + } else if (ls.startsWith("de")) { + return "de-DE"; } else if (ls.startsWith("zh")) { return "zh-CN"; } else if (ls.startsWith("nl")) { diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json index 1661e8fac0..6960de7945 100644 --- a/web/packages/next/locales/bg-BG/translation.json +++ b/web/packages/next/locales/bg-BG/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json index 38b877fd43..f2151c8c83 100644 --- a/web/packages/next/locales/de-DE/translation.json +++ b/web/packages/next/locales/de-DE/translation.json @@ -340,11 +340,11 @@ "UPDATE_CREATION_TIME_COMPLETED": "Alle Dateien erfolgreich aktualisiert", "UPDATE_CREATION_TIME_COMPLETED_WITH_ERROR": "Aktualisierung der Dateizeit für einige Dateien fehlgeschlagen, bitte versuche es erneut", "CAPTION_CHARACTER_LIMIT": "Maximal 5000 Zeichen", - "DATE_TIME_ORIGINAL": "", - "DATE_TIME_DIGITIZED": "", - "METADATA_DATE": "", + "DATE_TIME_ORIGINAL": "EXIF:DateTimeOriginal", + "DATE_TIME_DIGITIZED": "EXIF:DateTimeDigitized", + "METADATA_DATE": "EXIF:MetadataDate", "CUSTOM_TIME": "Benutzerdefinierte Zeit", - "REOPEN_PLAN_SELECTOR_MODAL": "", + "REOPEN_PLAN_SELECTOR_MODAL": "Aboauswahl erneut öffnen", "OPEN_PLAN_SELECTOR_MODAL_FAILED": "Fehler beim Öffnen der Pläne", "INSTALL": "Installieren", "SHARING_DETAILS": "Details teilen", @@ -374,7 +374,7 @@ "ADD_MORE": "Mehr hinzufügen", "VIEWERS": "Zuschauer", "OR_ADD_EXISTING": "Oder eine Vorherige auswählen", - "REMOVE_PARTICIPANT_MESSAGE": "", + "REMOVE_PARTICIPANT_MESSAGE": "

{{selectedEmail}} wird vom Album entfernt

Alle Bilder von {{selectedEmail}} werden ebenfalls aus dem Album entfernt

", "NOT_FOUND": "404 - Nicht gefunden", "LINK_EXPIRED": "Link ist abgelaufen", "LINK_EXPIRED_MESSAGE": "Dieser Link ist abgelaufen oder wurde deaktiviert!", @@ -388,9 +388,9 @@ "LINK_EXPIRY": "Ablaufdatum des Links", "NEVER": "Niemals", "DISABLE_FILE_DOWNLOAD": "Download deaktivieren", - "DISABLE_FILE_DOWNLOAD_MESSAGE": "", + "DISABLE_FILE_DOWNLOAD_MESSAGE": "

Bist du sicher, dass du den Downloadbutton für Dateien deaktivieren möchtest?

Betrachter können weiterhin Screenshots machen oder die Bilder mithilfe externer Werkzeuge speichern

", "SHARED_USING": "Freigegeben über ", - "SHARING_REFERRAL_CODE": "", + "SHARING_REFERRAL_CODE": "Benutze den code {{referralCode}} für 10GB extra", "LIVE": "LIVE", "DISABLE_PASSWORD": "Passwort-Sperre deaktivieren", "DISABLE_PASSWORD_MESSAGE": "Sind Sie sicher, dass Sie die Passwort-Sperre deaktivieren möchten?", @@ -400,12 +400,12 @@ "UPLOAD_FILES": "Datei", "UPLOAD_DIRS": "Ordner", "UPLOAD_GOOGLE_TAKEOUT": "Google Takeout", - "DEDUPLICATE_FILES": "", + "DEDUPLICATE_FILES": "Duplikate bereinigen", "NO_DUPLICATES_FOUND": "Du hast keine Duplikate, die gelöscht werden können", "FILES": "dateien", - "EACH": "", - "DEDUPLICATE_BASED_ON_SIZE": "", - "STOP_ALL_UPLOADS_MESSAGE": "", + "EACH": "pro Datei", + "DEDUPLICATE_BASED_ON_SIZE": "Die folgenden Dateien wurden aufgrund ihrer Größe zusammengefasst. Bitte prüfe und lösche Dateien, die du für duplikate hälst", + "STOP_ALL_UPLOADS_MESSAGE": "Bist du sicher, dass du alle laufenden Uploads abbrechen möchtest?", "STOP_UPLOADS_HEADER": "Hochladen stoppen?", "YES_STOP_UPLOADS": "Ja, Hochladen stoppen", "STOP_DOWNLOADS_HEADER": "Downloads anhalten?", @@ -415,14 +415,13 @@ "albums_other": "{{count, number}} Alben", "ALL_ALBUMS": "Alle Alben", "ALBUMS": "Alben", - "ALL_HIDDEN_ALBUMS": "", - "HIDDEN_ALBUMS": "", - "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", + "ALL_HIDDEN_ALBUMS": "Alle versteckten Alben", + "HIDDEN_ALBUMS": "Versteckte Alben", + "HIDDEN_ITEMS": "Versteckte Dateien", "ENTER_TWO_FACTOR_OTP": "Gib den 6-stelligen Code aus\ndeiner Authentifizierungs-App ein.", "CREATE_ACCOUNT": "Account erstellen", "COPIED": "Kopiert", - "WATCH_FOLDERS": "", + "WATCH_FOLDERS": "Überwachte Ordner", "UPGRADE_NOW": "Jetzt upgraden", "RENEW_NOW": "Jetzt erneuern", "STORAGE": "Speicher", @@ -431,21 +430,21 @@ "FAMILY": "Familie", "FREE": "frei", "OF": "von", - "WATCHED_FOLDERS": "", + "WATCHED_FOLDERS": "Überwachte Ordner", "NO_FOLDERS_ADDED": "Noch keine Ordner hinzugefügt!", - "FOLDERS_AUTOMATICALLY_MONITORED": "", - "UPLOAD_NEW_FILES_TO_ENTE": "", + "FOLDERS_AUTOMATICALLY_MONITORED": "Die Ordner, die du hier hinzufügst, werden überwacht, um automatisch", + "UPLOAD_NEW_FILES_TO_ENTE": "Neue Dateien bei Ente zu sichern", "REMOVE_DELETED_FILES_FROM_ENTE": "Gelöschte Dateien aus Ente entfernen", "ADD_FOLDER": "Ordner hinzufügen", - "STOP_WATCHING": "", - "STOP_WATCHING_FOLDER": "", - "STOP_WATCHING_DIALOG_MESSAGE": "", + "STOP_WATCHING": "Nicht mehr überwachen", + "STOP_WATCHING_FOLDER": "Ordner nicht mehr überwachen?", + "STOP_WATCHING_DIALOG_MESSAGE": "Deine bestehenden Dateien werden nicht gelöscht, aber das verknüpfte Ente-Album wird bei Änderungen in diesem Ordner nicht mehr aktualisiert.", "YES_STOP": "Ja, Stopp", - "MONTH_SHORT": "", + "MONTH_SHORT": "M", "YEAR": "Jahr", "FAMILY_PLAN": "Familientarif", "DOWNLOAD_LOGS": "Logs herunterladen", - "DOWNLOAD_LOGS_MESSAGE": "", + "DOWNLOAD_LOGS_MESSAGE": "

Hier kannst du Debug-Logs herunterladen, die du uns zur Fehleranalyse zusenden kannst.

Beachte bitte, dass die Logs Dateinamen enthalten, um Probleme mit bestimmten Dateien nachvollziehen zu können.

", "CHANGE_FOLDER": "Ordner ändern", "TWO_MONTHS_FREE": "Erhalte 2 Monate kostenlos bei Jahresabonnements", "GB": "GB", @@ -453,12 +452,12 @@ "FREE_PLAN_OPTION_LABEL": "Mit kostenloser Testversion fortfahren", "FREE_PLAN_DESCRIPTION": "1 GB für 1 Jahr", "CURRENT_USAGE": "Aktuelle Nutzung ist {{usage}}", - "WEAK_DEVICE": "", - "DRAG_AND_DROP_HINT": "", - "CONFIRM_ACCOUNT_DELETION_MESSAGE": "Ihre hochgeladenen Daten werden zur Löschung vorgemerkt, und Ihr Konto wird endgültig gelöscht.

Dieser Vorgang kann nicht rückgängig gemacht werden.", + "WEAK_DEVICE": "Dein Browser ist nicht leistungsstark genug, um deine Bilder zu verschlüsseln. Versuche, dich an einem Computer bei Ente anzumelden, oder lade dir die Ente-App für dein Gerät (Handy oder Desktop) herunter.", + "DRAG_AND_DROP_HINT": "Oder ziehe Dateien per Drag-and-Drop in das Ente-Fenster", + "CONFIRM_ACCOUNT_DELETION_MESSAGE": "Deine hochgeladenen Daten werden zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.

Dieser Vorgang kann nicht rückgängig gemacht werden.", "AUTHENTICATE": "Authentifizieren", - "UPLOADED_TO_SINGLE_COLLECTION": "", - "UPLOADED_TO_SEPARATE_COLLECTIONS": "", + "UPLOADED_TO_SINGLE_COLLECTION": "In einzelnes Album hochgeladen", + "UPLOADED_TO_SEPARATE_COLLECTIONS": "In separate Alben hochgeladen", "NEVERMIND": "Egal", "UPDATE_AVAILABLE": "Neue Version verfügbar", "UPDATE_INSTALLABLE_MESSAGE": "Eine neue Version von Ente ist für die Installation bereit.", @@ -471,10 +470,10 @@ "YESTERDAY": "Gestern", "NAME_PLACEHOLDER": "Name...", "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED": "Alben können nicht aus Datei/Ordnermix erstellt werden", - "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "", + "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "

Du hast sowohl Dateien als auch Ordner in das Ente-Fenster gezogen.

Bitte wähle entweder nur Dateien oder nur Ordner aus, wenn separate Alben erstellt werden sollen

", "CHOSE_THEME": "Design auswählen", "ML_SEARCH": "Gesichtserkennung", - "ENABLE_ML_SEARCH_DESCRIPTION": "", + "ENABLE_ML_SEARCH_DESCRIPTION": "

Hiermit wird on-device machine learning aktiviert, und die Gesichtserkennung beginnt damit, die Fotos auf deinem Gerät zu analysieren.

Beim ersten Durchlauf nach der Anmeldung oder Aktivierung der Funktion werden alle Bilder auf dein Gerät heruntergeladen, um analysiert zu werden. Bitte aktiviere diese Funktion nur, wenn du einverstanden bist, dass dein Gerät die dafür benötigte Bandbreite und Rechenleistung aufbringt.

Falls dies das erste Mal ist, dass du diese Funktion aktivierst, werden wir deine Erlaubnis zur Verarbeitung von Gesichtsdaten einholen.

", "ML_MORE_DETAILS": "Weitere Details", "ENABLE_FACE_SEARCH": "Gesichtserkennung aktivieren", "ENABLE_FACE_SEARCH_TITLE": "Gesichtserkennung aktivieren?", @@ -482,18 +481,18 @@ "DISABLE_BETA": "Beta deaktivieren", "DISABLE_FACE_SEARCH": "Gesichtserkennung deaktivieren", "DISABLE_FACE_SEARCH_TITLE": "Gesichtserkennung deaktivieren?", - "DISABLE_FACE_SEARCH_DESCRIPTION": "", + "DISABLE_FACE_SEARCH_DESCRIPTION": "

Ente wird aufhören, Gesichtsdaten zu verarbeiten.

Du kannst die Gesichtserkennung jederzeit wieder aktivieren, wenn du möchtest, daher ist dieser Vorgang risikofrei.

", "ADVANCED": "Erweitert", "FACE_SEARCH_CONFIRMATION": "Ich verstehe und möchte Ente erlauben, Gesichtsgeometrie zu verarbeiten", "LABS": "Experimente", - "YOURS": "", + "YOURS": "von dir", "PASSPHRASE_STRENGTH_WEAK": "Passwortstärke: Schwach", "PASSPHRASE_STRENGTH_MODERATE": "Passwortstärke: Moderat", "PASSPHRASE_STRENGTH_STRONG": "Passwortstärke: Stark", "PREFERENCES": "Einstellungen", "LANGUAGE": "Sprache", "EXPORT_DIRECTORY_DOES_NOT_EXIST": "Ungültiges Exportverzeichnis", - "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "", + "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "

Das von dir gewählte Exportverzeichnis existiert nicht.

Bitte wähle einen gültigen Ordner.

", "SUBSCRIPTION_VERIFICATION_ERROR": "Verifizierung des Abonnements fehlgeschlagen", "STORAGE_UNITS": { "B": "B", @@ -516,39 +515,39 @@ "CREATE_PUBLIC_SHARING": "Öffentlichen Link erstellen", "PUBLIC_LINK_CREATED": "Öffentlicher Link erstellt", "PUBLIC_LINK_ENABLED": "Öffentlicher Link aktiviert", - "COLLECT_PHOTOS": "", - "PUBLIC_COLLECT_SUBTEXT": "", + "COLLECT_PHOTOS": "Bilder sammeln", + "PUBLIC_COLLECT_SUBTEXT": "Erlaube Personen mit diesem Link, Fotos zum gemeinsamen Album hinzuzufügen.", "STOP_EXPORT": "Stop", - "EXPORT_PROGRESS": "", + "EXPORT_PROGRESS": "{{progress.success, number}} / {{progress.total, number}} Dateien synchronisiert", "MIGRATING_EXPORT": "Vorbereiten...", "RENAMING_COLLECTION_FOLDERS": "Albumordner umbenennen...", - "TRASHING_DELETED_FILES": "", - "TRASHING_DELETED_COLLECTIONS": "", - "CONTINUOUS_EXPORT": "", - "PENDING_ITEMS": "", - "EXPORT_STARTING": "", - "DELETE_ACCOUNT_REASON_LABEL": "", - "DELETE_ACCOUNT_REASON_PLACEHOLDER": "", + "TRASHING_DELETED_FILES": "Verschiebe gelöschte Dateien in den Trash-Ordner...", + "TRASHING_DELETED_COLLECTIONS": "Verschiebe gelöschte Alben in den Trash-Ordner...", + "CONTINUOUS_EXPORT": "Stets aktuell halten", + "PENDING_ITEMS": "Ausstehende Dateien", + "EXPORT_STARTING": "Starte Export...", + "DELETE_ACCOUNT_REASON_LABEL": "Was ist der Hauptgrund für die Löschung deines Kontos?", + "DELETE_ACCOUNT_REASON_PLACEHOLDER": "Wähle einen Grund aus", "DELETE_REASON": { - "MISSING_FEATURE": "", - "BROKEN_BEHAVIOR": "", - "FOUND_ANOTHER_SERVICE": "", - "NOT_LISTED": "" + "MISSING_FEATURE": "Es fehlt eine wichtige Funktion die ich benötige", + "BROKEN_BEHAVIOR": "Die App oder eine bestimmte Funktion verhält sich nicht so wie gedacht", + "FOUND_ANOTHER_SERVICE": "Ich habe einen anderen Dienst gefunden, der mir mehr zusagt", + "NOT_LISTED": "Mein Grund ist nicht aufgeführt" }, - "DELETE_ACCOUNT_FEEDBACK_LABEL": "", + "DELETE_ACCOUNT_FEEDBACK_LABEL": "Wir bedauern sehr, dass uns verlässt. Bitte hilf uns besser zu werden, indem du uns sagst warum du gehst.", "DELETE_ACCOUNT_FEEDBACK_PLACEHOLDER": "Feedback", "CONFIRM_DELETE_ACCOUNT_CHECKBOX_LABEL": "Ja, ich möchte dieses Konto und alle enthaltenen Daten endgültig und unwiderruflich löschen", "CONFIRM_DELETE_ACCOUNT": "Kontolöschung bestätigen", - "FEEDBACK_REQUIRED": "", + "FEEDBACK_REQUIRED": "Bitte hilf uns durch das Angeben dieser Daten", "FEEDBACK_REQUIRED_FOUND_ANOTHER_SERVICE": "Was macht der andere Dienst besser?", "RECOVER_TWO_FACTOR": "Zwei-Faktor wiederherstellen", - "at": "", + "at": "um", "AUTH_NEXT": "Weiter", - "AUTH_DOWNLOAD_MOBILE_APP": "", + "AUTH_DOWNLOAD_MOBILE_APP": "Lade unsere smartphone App herunter, um deine Schlüssel zu verwalten", "HIDDEN": "Versteckt", "HIDE": "Ausblenden", "UNHIDE": "Einblenden", - "UNHIDE_TO_COLLECTION": "", + "UNHIDE_TO_COLLECTION": "In Album wieder sichtbar machen", "SORT_BY": "Sortieren nach", "NEWEST_FIRST": "Neueste zuerst", "OLDEST_FIRST": "Älteste zuerst", @@ -562,14 +561,14 @@ "DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} Dateien", "CHRISTMAS": "Weihnachten", "CHRISTMAS_EVE": "Heiligabend", - "NEW_YEAR": "", - "NEW_YEAR_EVE": "", + "NEW_YEAR": "Neujahr", + "NEW_YEAR_EVE": "Silvester", "IMAGE": "Bild", "VIDEO": "Video", "LIVE_PHOTO": "Live-Foto", "CONVERT": "Konvertieren", - "CONFIRM_EDITOR_CLOSE_MESSAGE": "", - "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "", + "CONFIRM_EDITOR_CLOSE_MESSAGE": "Editor wirklich schließen?", + "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Lade dein bearbeitetes Bild herunter oder speichere es in Ente, um die Änderungen nicht zu verlieren.", "BRIGHTNESS": "Helligkeit", "CONTRAST": "Kontrast", "SATURATION": "Sättigung", @@ -581,7 +580,7 @@ "ROTATE_RIGHT": "Nach rechts drehen", "FLIP_VERTICALLY": "Vertikal spiegeln", "FLIP_HORIZONTALLY": "Horizontal spiegeln", - "DOWNLOAD_EDITED": "", + "DOWNLOAD_EDITED": "Bearbeitetes Bild herunterladen", "SAVE_A_COPY_TO_ENTE": "Kopie in Ente speichern", "RESTORE_ORIGINAL": "Original wiederherstellen", "TRANSFORM": "Transformieren", @@ -590,24 +589,24 @@ "ROTATION": "Drehen", "RESET": "Zurücksetzen", "PHOTO_EDITOR": "Foto-Editor", - "FASTER_UPLOAD": "", - "FASTER_UPLOAD_DESCRIPTION": "", - "MAGIC_SEARCH_STATUS": "", + "FASTER_UPLOAD": "Schnelleres hochladen", + "FASTER_UPLOAD_DESCRIPTION": "Uploads über nahegelegene Server leiten", + "MAGIC_SEARCH_STATUS": "Status der magischen Suche", "INDEXED_ITEMS": "Indizierte Elemente", "CAST_ALBUM_TO_TV": "Album auf Fernseher wiedergeben", "ENTER_CAST_PIN_CODE": "Gib den Code auf dem Fernseher unten ein, um dieses Gerät zu koppeln.", "PAIR_DEVICE_TO_TV": "Geräte koppeln", "TV_NOT_FOUND": "Fernseher nicht gefunden. Hast du die PIN korrekt eingegeben?", - "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", - "PAIR_WITH_PIN": "", - "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", - "VISIT_CAST_ENTE_IO": "", - "CAST_AUTO_PAIR_FAILED": "", + "AUTO_CAST_PAIR": "Automatisch verbinden", + "AUTO_CAST_PAIR_DESC": "Automatisches Verbinden funktioniert nur mit Geräten, die Chromecast unterstützen.", + "PAIR_WITH_PIN": "Mit PIN verbinden", + "CHOOSE_DEVICE_FROM_BROWSER": "Wähle ein Cast-Gerät aus dem Browser-Popup aus.", + "PAIR_WITH_PIN_DESC": "\"Mit PIN verbinden\" funktioniert mit jedem Bildschirm, auf dem du dein Album sehen möchtest.", + "VISIT_CAST_ENTE_IO": "Besuche {{url}} auf dem Gerät, das du verbinden möchtest.", + "CAST_AUTO_PAIR_FAILED": "Das automatische Verbinden über Chromecast ist fehlgeschlagen. Bitte versuche es erneut.", "FREEHAND": "Freihand", - "APPLY_CROP": "", - "PHOTO_EDIT_REQUIRED_TO_SAVE": "", + "APPLY_CROP": "Zuschnitt anwenden", + "PHOTO_EDIT_REQUIRED_TO_SAVE": "Es muss mindestens eine Transformation oder Farbanpassung vorgenommen werden, bevor gespeichert werden kann.", "PASSKEYS": "Passkeys", "DELETE_PASSKEY": "Passkey löschen", "DELETE_PASSKEY_CONFIRMATION": "Bist du sicher, dass du diesen Passkey löschen willst? Dieser Vorgang ist nicht umkehrbar.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "Ein Fehler trat auf beim Anmelden mit dem Passkey auf.", "TRY_AGAIN": "Erneut versuchen", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Folge den Schritten in deinem Browser, um mit dem Anmelden fortzufahren.", - "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden" + "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden", + "autogenerated_first_album_name": "Mein erstes Album", + "autogenerated_default_album_name": "Neues Album" } diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json index 5fdb380d5b..3474f1b710 100644 --- a/web/packages/next/locales/en-US/translation.json +++ b/web/packages/next/locales/en-US/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "All hidden albums", "HIDDEN_ALBUMS": "Hidden albums", "HIDDEN_ITEMS": "Hidden items", - "HIDDEN_ITEMS_SECTION_NAME": "Hidden_items", "ENTER_TWO_FACTOR_OTP": "Enter the 6-digit code from your authenticator app.", "CREATE_ACCOUNT": "Create account", "COPIED": "Copied", @@ -598,13 +597,13 @@ "ENTER_CAST_PIN_CODE": "Enter the code you see on the TV below to pair this device.", "PAIR_DEVICE_TO_TV": "Pair devices", "TV_NOT_FOUND": "TV not found. Did you enter the PIN correctly?", - "AUTO_CAST_PAIR": "Auto Pair", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "Auto Pair requires connecting to Google servers and only works with Chromecast supported devices. Google will not receive sensitive data, such as your photos.", + "AUTO_CAST_PAIR": "Auto pair", + "AUTO_CAST_PAIR_DESC": "Auto pair works only with devices that support Chromecast.", "PAIR_WITH_PIN": "Pair with PIN", "CHOOSE_DEVICE_FROM_BROWSER": "Choose a cast-compatible device from the browser popup.", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "Pair with PIN works for any large screen device you want to play your album on.", + "PAIR_WITH_PIN_DESC": "Pair with PIN works with any screen you wish to view your album on.", "VISIT_CAST_ENTE_IO": "Visit {{url}} on the device you want to pair.", - "CAST_AUTO_PAIR_FAILED": "Chromecast Auto Pair failed. Please try again.", + "CAST_AUTO_PAIR_FAILED": "Chromecast auto pair failed. Please try again.", "FREEHAND": "Freehand", "APPLY_CROP": "Apply Crop", "PHOTO_EDIT_REQUIRED_TO_SAVE": "At least one transformation or color adjustment must be performed before saving.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "An error occurred while logging in with passkey.", "TRY_AGAIN": "Try again", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Follow the steps from your browser to continue logging in.", - "LOGIN_WITH_PASSKEY": "Login with passkey" + "LOGIN_WITH_PASSKEY": "Login with passkey", + "autogenerated_first_album_name": "My First Album", + "autogenerated_default_album_name": "New Album" } diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json index 5435514573..24b6c08797 100644 --- a/web/packages/next/locales/es-ES/translation.json +++ b/web/packages/next/locales/es-ES/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "Ingrese el código de seis dígitos de su aplicación de autenticación a continuación.", "CREATE_ACCOUNT": "Crear cuenta", "COPIED": "Copiado", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json index 9dc5ccb7a8..05e3c47d6f 100644 --- a/web/packages/next/locales/fa-IR/translation.json +++ b/web/packages/next/locales/fa-IR/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json index 2d2a56b54c..1408fbbe62 100644 --- a/web/packages/next/locales/fi-FI/translation.json +++ b/web/packages/next/locales/fi-FI/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json index 308728b982..1c549b5a69 100644 --- a/web/packages/next/locales/fr-FR/translation.json +++ b/web/packages/next/locales/fr-FR/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "Tous les albums masqués", "HIDDEN_ALBUMS": "Albums masqués", "HIDDEN_ITEMS": "Éléments masqués", - "HIDDEN_ITEMS_SECTION_NAME": "Éléments masqués", "ENTER_TWO_FACTOR_OTP": "Saisir le code à 6 caractères de votre appli d'authentification.", "CREATE_ACCOUNT": "Créer un compte", "COPIED": "Copié", @@ -598,13 +597,13 @@ "ENTER_CAST_PIN_CODE": "Entrez le code que vous voyez sur la TV ci-dessous pour appairer cet appareil.", "PAIR_DEVICE_TO_TV": "Associer les appareils", "TV_NOT_FOUND": "TV introuvable. Avez-vous entré le code PIN correctement ?", - "AUTO_CAST_PAIR": "Paire automatique", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "La paire automatique nécessite la connexion aux serveurs Google et ne fonctionne qu'avec les appareils pris en charge par Chromecast. Google ne recevra pas de données sensibles, telles que vos photos.", + "AUTO_CAST_PAIR": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "Associer avec le code PIN", "CHOOSE_DEVICE_FROM_BROWSER": "Choisissez un périphérique compatible avec la caste à partir de la fenêtre pop-up du navigateur.", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "L'association avec le code PIN fonctionne pour tout appareil grand écran sur lequel vous voulez lire votre album.", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "Visitez {{url}} sur l'appareil que vous voulez associer.", - "CAST_AUTO_PAIR_FAILED": "La paire automatique de Chromecast a échoué. Veuillez réessayer.", + "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "Main levée", "APPLY_CROP": "Appliquer le recadrage", "PHOTO_EDIT_REQUIRED_TO_SAVE": "Au moins une transformation ou un ajustement de couleur doit être effectué avant de sauvegarder.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "Une erreur s'est produite lors de la connexion avec le code d'accès.", "TRY_AGAIN": "Réessayer", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Suivez les étapes de votre navigateur pour poursuivre la connexion.", - "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès" + "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json index eb3e6bfa88..43898574a5 100644 --- a/web/packages/next/locales/it-IT/translation.json +++ b/web/packages/next/locales/it-IT/translation.json @@ -7,7 +7,7 @@ "HERO_SLIDE_3": "Android, iOS, Web, Desktop", "LOGIN": "Accedi", "SIGN_UP": "Registrati", - "NEW_USER": "", + "NEW_USER": "Prima volta con Ente", "EXISTING_USER": "Accedi", "ENTER_NAME": "Inserisci il nome", "PUBLIC_UPLOADER_NAME_MESSAGE": "Aggiungi un nome in modo che i tuoi amici sappiano chi ringraziare per queste fantastiche foto!", @@ -168,18 +168,18 @@ "UPDATE_PAYMENT_METHOD": "Aggiorna metodo di pagamento", "MONTHLY": "Mensile", "YEARLY": "Annuale", - "update_subscription_title": "", + "update_subscription_title": "Conferma le modifiche al piano", "UPDATE_SUBSCRIPTION_MESSAGE": "Sei sicuro di voler cambiare il piano?", "UPDATE_SUBSCRIPTION": "Cambia piano", "CANCEL_SUBSCRIPTION": "Annulla abbonamento", "CANCEL_SUBSCRIPTION_MESSAGE": "

Tutti i tuoi dati saranno cancellati dai nostri server alla fine di questo periodo di fatturazione.

Sei sicuro di voler annullare il tuo abbonamento?

", - "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "", + "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "

Sei sicuro di volere annullare il tuo abbonamento?

", "SUBSCRIPTION_CANCEL_FAILED": "Impossibile annullare l'abbonamento", "SUBSCRIPTION_CANCEL_SUCCESS": "Abbonamento annullato con successo", "REACTIVATE_SUBSCRIPTION": "Riattiva abbonamento", "REACTIVATE_SUBSCRIPTION_MESSAGE": "Una volta riattivato, ti verrà addebitato il valore di {{date, dateTime}}", "SUBSCRIPTION_ACTIVATE_SUCCESS": "Iscrizione attivata con successo ", - "SUBSCRIPTION_ACTIVATE_FAILED": "", + "SUBSCRIPTION_ACTIVATE_FAILED": "Impossibile riattivare il rinnovo dell'abbonamento", "SUBSCRIPTION_PURCHASE_SUCCESS_TITLE": "Grazie", "CANCEL_SUBSCRIPTION_ON_MOBILE": "Annulla abbonamento mobile", "CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE": "", @@ -201,7 +201,7 @@ "CREATE_ALBUM_FAILED": "Operazione di creazione dell'album fallita, per favore riprova", "SEARCH": "Ricerca", "SEARCH_RESULTS": "Risultati della ricerca", - "NO_RESULTS": "", + "NO_RESULTS": "Nessun risultato trovato", "SEARCH_HINT": "", "SEARCH_TYPE": { "COLLECTION": "Album", @@ -219,7 +219,7 @@ "photos_count_other": "", "TERMS_AND_CONDITIONS": "", "ADD_TO_COLLECTION": "Aggiungi all'album", - "SELECTED": "", + "SELECTED": "selezionato", "PEOPLE": "Persone", "INDEXING_SCHEDULED": "", "ANALYZING_PHOTOS": "", @@ -241,8 +241,8 @@ "DISABLE_MAPS": "Disattivare Mappa?", "ENABLE_MAP_DESCRIPTION": "", "DISABLE_MAP_DESCRIPTION": "", - "DISABLE_MAP": "", - "DETAILS": "", + "DISABLE_MAP": "Disattivare Mappa", + "DETAILS": "Dettagli", "VIEW_EXIF": "", "NO_EXIF": "", "EXIF": "EXIF", @@ -258,23 +258,23 @@ "LOST_DEVICE": "", "INCORRECT_CODE": "Codice errato", "TWO_FACTOR_INFO": "Aggiungi un ulteriore livello di sicurezza richiedendo più informazioni rispetto a email e password per eseguire l'accesso al tuo account", - "DISABLE_TWO_FACTOR_LABEL": "", + "DISABLE_TWO_FACTOR_LABEL": "Disabilita l'autenticazione a due fattori", "UPDATE_TWO_FACTOR_LABEL": "", "DISABLE": "", "RECONFIGURE": "", "UPDATE_TWO_FACTOR": "", "UPDATE_TWO_FACTOR_MESSAGE": "", - "UPDATE": "", + "UPDATE": "Aggiorna", "DISABLE_TWO_FACTOR": "", "DISABLE_TWO_FACTOR_MESSAGE": "", "TWO_FACTOR_DISABLE_FAILED": "", "EXPORT_DATA": "Esporta dati", - "SELECT_FOLDER": "", - "DESTINATION": "", + "SELECT_FOLDER": "Seleziona cartella", + "DESTINATION": "Destinazione", "START": "", "LAST_EXPORT_TIME": "", - "EXPORT_AGAIN": "", - "LOCAL_STORAGE_NOT_ACCESSIBLE": "", + "EXPORT_AGAIN": "Risincronizza", + "LOCAL_STORAGE_NOT_ACCESSIBLE": "Archivio locale non accessibile", "LOCAL_STORAGE_NOT_ACCESSIBLE_MESSAGE": "", "SEND_OTT": "Invia OTP", "EMAIl_ALREADY_OWNED": "Email già in uso", @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "Crea account", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json index 63b6491def..8c37ab400e 100644 --- a/web/packages/next/locales/ko-KR/translation.json +++ b/web/packages/next/locales/ko-KR/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json index c12a38f8b3..bc33aec26e 100644 --- a/web/packages/next/locales/nl-NL/translation.json +++ b/web/packages/next/locales/nl-NL/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "Alle verborgen albums", "HIDDEN_ALBUMS": "Verborgen albums", "HIDDEN_ITEMS": "Verborgen bestanden", - "HIDDEN_ITEMS_SECTION_NAME": "Verborgen_items", "ENTER_TWO_FACTOR_OTP": "Voer de 6-cijferige code van uw verificatie app in.", "CREATE_ACCOUNT": "Account aanmaken", "COPIED": "Gekopieerd", @@ -599,12 +598,12 @@ "PAIR_DEVICE_TO_TV": "Koppel apparaten", "TV_NOT_FOUND": "TV niet gevonden. Heeft u de pincode correct ingevoerd?", "AUTO_CAST_PAIR": "Automatisch koppelen", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "Automatisch koppelen vereist verbinding met Google-servers en werkt alleen met apparaten die door Chromecast worden ondersteund. Google zal geen gevoelige gegevens ontvangen, zoals uw foto's.", + "AUTO_CAST_PAIR_DESC": "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen.", "PAIR_WITH_PIN": "Koppelen met PIN", "CHOOSE_DEVICE_FROM_BROWSER": "Kies een compatibel apparaat uit de browser popup.", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "Koppelen met PIN werkt op elk groot schermapparaat waarop u uw album wilt afspelen.", + "PAIR_WITH_PIN_DESC": "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien.", "VISIT_CAST_ENTE_IO": "Bezoek {{url}} op het apparaat dat je wilt koppelen.", - "CAST_AUTO_PAIR_FAILED": "Auto koppelen van Chromecast is mislukt. Probeer het opnieuw.", + "CAST_AUTO_PAIR_FAILED": "Automatisch koppelen van Chromecast mislukt. Probeer het opnieuw.", "FREEHAND": "Losse hand", "APPLY_CROP": "Bijsnijden toepassen", "PHOTO_EDIT_REQUIRED_TO_SAVE": "Tenminste één transformatie of kleuraanpassing moet worden uitgevoerd voordat u opslaat.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "Er is een fout opgetreden tijdens het inloggen met een passkey.", "TRY_AGAIN": "Probeer opnieuw", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Volg de stappen van je browser om door te gaan met inloggen.", - "LOGIN_WITH_PASSKEY": "Inloggen met passkey" + "LOGIN_WITH_PASSKEY": "Inloggen met passkey", + "autogenerated_first_album_name": "Mijn eerste album", + "autogenerated_default_album_name": "Nieuw album" } diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json index 5749591d17..717c4360fc 100644 --- a/web/packages/next/locales/pt-BR/translation.json +++ b/web/packages/next/locales/pt-BR/translation.json @@ -239,7 +239,7 @@ "ENABLE_MAPS": "Habilitar mapa?", "ENABLE_MAP": "Habilitar mapa", "DISABLE_MAPS": "Desativar Mapas?", - "ENABLE_MAP_DESCRIPTION": "Isto mostrará suas fotos em um mapa do mundo.

Este mapa é hospedado pelo OpenStreetMap , e os exatos locais de suas fotos nunca são compartilhados.

Você pode desativar esse recurso a qualquer momento nas Configurações.

", + "ENABLE_MAP_DESCRIPTION": "

Isto mostrará suas fotos em um mapa do mundo.

Este mapa é hospedado pelo OpenStreetMap, e os exatos locais de suas fotos nunca são compartilhados.

Você pode desativar esse recurso a qualquer momento nas Configurações.

", "DISABLE_MAP_DESCRIPTION": "

Isto irá desativar a exibição de suas fotos em um mapa mundial.

Você pode ativar este recurso a qualquer momento nas Configurações.

", "DISABLE_MAP": "Desabilitar mapa", "DETAILS": "Detalhes", @@ -380,14 +380,14 @@ "LINK_EXPIRED_MESSAGE": "Este link expirou ou foi desativado!", "MANAGE_LINK": "Gerenciar link", "LINK_TOO_MANY_REQUESTS": "Desculpe, este álbum foi visualizado em muitos dispositivos!", - "FILE_DOWNLOAD": "Permitir transferências", + "FILE_DOWNLOAD": "Permitir downloads", "LINK_PASSWORD_LOCK": "Bloqueio de senha", "PUBLIC_COLLECT": "Permitir adicionar fotos", "LINK_DEVICE_LIMIT": "Limite de dispositivos", "NO_DEVICE_LIMIT": "Nenhum", "LINK_EXPIRY": "Expiração do link", "NEVER": "Nunca", - "DISABLE_FILE_DOWNLOAD": "Desabilitar transferência", + "DISABLE_FILE_DOWNLOAD": "Desabilitar download", "DISABLE_FILE_DOWNLOAD_MESSAGE": "

Tem certeza de que deseja desativar o botão de download para arquivos?

Os visualizadores ainda podem capturar imagens da tela ou salvar uma cópia de suas fotos usando ferramentas externas.

", "SHARED_USING": "Compartilhar usando ", "SHARING_REFERRAL_CODE": "Use o código {{referralCode}} para obter 10 GB de graça", @@ -408,8 +408,8 @@ "STOP_ALL_UPLOADS_MESSAGE": "Tem certeza que deseja parar todos os envios em andamento?", "STOP_UPLOADS_HEADER": "Parar envios?", "YES_STOP_UPLOADS": "Sim, parar envios", - "STOP_DOWNLOADS_HEADER": "Parar transferências?", - "YES_STOP_DOWNLOADS": "Sim, parar transferências", + "STOP_DOWNLOADS_HEADER": "Parar downloads?", + "YES_STOP_DOWNLOADS": "Sim, parar downloads", "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?", "albums_one": "1 Álbum", "albums_other": "{{count, number}} Álbuns", @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "Todos os álbuns ocultos", "HIDDEN_ALBUMS": "Álbuns ocultos", "HIDDEN_ITEMS": "Itens ocultos", - "HIDDEN_ITEMS_SECTION_NAME": "Itens_ocultos", "ENTER_TWO_FACTOR_OTP": "Digite o código de 6 dígitos de\nseu aplicativo autenticador.", "CREATE_ACCOUNT": "Criar uma conta", "COPIED": "Copiado", @@ -556,8 +555,8 @@ "SELECT_COLLECTION": "Selecionar álbum", "PIN_ALBUM": "Fixar álbum", "UNPIN_ALBUM": "Desafixar álbum", - "DOWNLOAD_COMPLETE": "Transferência concluída", - "DOWNLOADING_COLLECTION": "Transferindo {{name}}", + "DOWNLOAD_COMPLETE": "Download concluído", + "DOWNLOADING_COLLECTION": "Fazendo download de {{name}}", "DOWNLOAD_FAILED": "Falha ao baixar", "DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos", "CHRISTMAS": "Natal", @@ -599,12 +598,12 @@ "PAIR_DEVICE_TO_TV": "Parear dispositivos", "TV_NOT_FOUND": "TV não encontrada. Você inseriu o PIN correto?", "AUTO_CAST_PAIR": "Pareamento automático", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "O Auto Pair requer a conexão com servidores do Google e só funciona com dispositivos Chromecast. O Google não receberá dados confidenciais, como suas fotos.", + "AUTO_CAST_PAIR_DESC": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.", "PAIR_WITH_PIN": "Parear com PIN", "CHOOSE_DEVICE_FROM_BROWSER": "Escolha um dispositivo compatível com casts no navegador popup.", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "Parear com o PIN funciona para qualquer dispositivo de tela grande onde você deseja reproduzir seu álbum.", + "PAIR_WITH_PIN_DESC": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.", "VISIT_CAST_ENTE_IO": "Acesse
{{url}} no dispositivo que você deseja parear.", - "CAST_AUTO_PAIR_FAILED": "Chromecast Auto Pair falhou. Por favor, tente novamente.", + "CAST_AUTO_PAIR_FAILED": "Falha no pareamento automático do Chromecast. Por favor, tente novamente.", "FREEHAND": "Mão livre", "APPLY_CROP": "Aplicar Recorte", "PHOTO_EDIT_REQUIRED_TO_SAVE": "Pelo menos uma transformação ou ajuste de cor deve ser feito antes de salvar.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "Ocorreu um erro ao entrar com a chave de acesso.", "TRY_AGAIN": "Tente novamente", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Siga os passos do seu navegador para continuar acessando.", - "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso" + "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso", + "autogenerated_first_album_name": "Meu Primeiro Álbum", + "autogenerated_default_album_name": "Novo Álbum" } diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json index 20ec4d9ea9..44ec3361c7 100644 --- a/web/packages/next/locales/pt-PT/translation.json +++ b/web/packages/next/locales/pt-PT/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json index 95c4f6c58b..537ba0692a 100644 --- a/web/packages/next/locales/ru-RU/translation.json +++ b/web/packages/next/locales/ru-RU/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "Все скрытые альбомы", "HIDDEN_ALBUMS": "Скрытые альбомы", "HIDDEN_ITEMS": "Скрытые предметы", - "HIDDEN_ITEMS_SECTION_NAME": "Скрытые_элементы", "ENTER_TWO_FACTOR_OTP": "Введите 6-значный код из вашего приложения для проверки подлинности.", "CREATE_ACCOUNT": "Создать аккаунт", "COPIED": "Скопированный", @@ -598,13 +597,13 @@ "ENTER_CAST_PIN_CODE": "Введите код, который вы видите на экране телевизора ниже, чтобы выполнить сопряжение с этим устройством.", "PAIR_DEVICE_TO_TV": "Сопряжение устройств", "TV_NOT_FOUND": "Телевизор не найден. Вы правильно ввели PIN-код?", - "AUTO_CAST_PAIR": "Автоматическое сопряжение", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "Автоматическое сопряжение требует подключения к серверам Google и работает только с устройствами, поддерживающими Chromecast. Google не будет получать конфиденциальные данные, такие как ваши фотографии.", + "AUTO_CAST_PAIR": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "Соединение с помощью булавки", "CHOOSE_DEVICE_FROM_BROWSER": "Выберите устройство, совместимое с cast, во всплывающем окне браузера.", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "Сопряжение с помощью PIN-кода работает на любом устройстве с большим экраном, на котором вы хотите воспроизвести свой альбом.", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "Перейдите на страницу {{url}} на устройстве, которое вы хотите подключить.", - "CAST_AUTO_PAIR_FAILED": "Не удалось выполнить автоматическое сопряжение Chromecast. Пожалуйста, попробуйте снова.", + "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "От руки", "APPLY_CROP": "Применить обрезку", "PHOTO_EDIT_REQUIRED_TO_SAVE": "Перед сохранением необходимо выполнить по крайней мере одно преобразование или корректировку цвета.", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "При входе в систему с помощью пароля произошла ошибка.", "TRY_AGAIN": "Пробовать снова", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.", - "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля" + "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json index 77462524d5..69a29a5bfa 100644 --- a/web/packages/next/locales/sv-SE/translation.json +++ b/web/packages/next/locales/sv-SE/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json index 2d2a56b54c..1408fbbe62 100644 --- a/web/packages/next/locales/th-TH/translation.json +++ b/web/packages/next/locales/th-TH/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json index 2d2a56b54c..1408fbbe62 100644 --- a/web/packages/next/locales/tr-TR/translation.json +++ b/web/packages/next/locales/tr-TR/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "", "HIDDEN_ALBUMS": "", "HIDDEN_ITEMS": "", - "HIDDEN_ITEMS_SECTION_NAME": "", "ENTER_TWO_FACTOR_OTP": "", "CREATE_ACCOUNT": "", "COPIED": "", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "", "TV_NOT_FOUND": "", "AUTO_CAST_PAIR": "", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "", + "AUTO_CAST_PAIR_DESC": "", "PAIR_WITH_PIN": "", "CHOOSE_DEVICE_FROM_BROWSER": "", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "", + "PAIR_WITH_PIN_DESC": "", "VISIT_CAST_ENTE_IO": "", "CAST_AUTO_PAIR_FAILED": "", "FREEHAND": "", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json index 7a76b58b60..3ac0e16e8f 100644 --- a/web/packages/next/locales/zh-CN/translation.json +++ b/web/packages/next/locales/zh-CN/translation.json @@ -418,7 +418,6 @@ "ALL_HIDDEN_ALBUMS": "所有隐藏的相册", "HIDDEN_ALBUMS": "隐藏的相册", "HIDDEN_ITEMS": "隐藏的项目", - "HIDDEN_ITEMS_SECTION_NAME": "隐藏的项目", "ENTER_TWO_FACTOR_OTP": "请输入您从身份验证应用上获得的6位数代码", "CREATE_ACCOUNT": "创建账户", "COPIED": "已复制", @@ -599,10 +598,10 @@ "PAIR_DEVICE_TO_TV": "配对设备", "TV_NOT_FOUND": "未找到电视。您输入的 PIN 码正确吗?", "AUTO_CAST_PAIR": "自动配对", - "AUTO_CAST_PAIR_REQUIRES_CONNECTION_TO_GOOGLE": "自动配对需要连接到 Google 服务器,且仅适用于支持 Chromecast 的设备。Google 不会接收敏感数据,例如您的照片。", + "AUTO_CAST_PAIR_DESC": "自动配对仅适用于支持 Chromecast 的设备。", "PAIR_WITH_PIN": "用 PIN 配对", "CHOOSE_DEVICE_FROM_BROWSER": "从浏览器弹出窗口中选择兼容 Cast 的设备。", - "PAIR_WITH_PIN_WORKS_FOR_ANY_LARGE_SCREEN_DEVICE": "用 PIN 配对适用于任何大屏幕设备,您可以在这些设备上播放您的相册。", + "PAIR_WITH_PIN_DESC": "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。", "VISIT_CAST_ENTE_IO": "在您要配对的设备上访问 {{url}} 。", "CAST_AUTO_PAIR_FAILED": "Chromecast 自动配对失败。请再试一次。", "FREEHAND": "手画", @@ -621,5 +620,7 @@ "PASSKEY_LOGIN_ERRORED": "使用通行密钥登录时出错。", "TRY_AGAIN": "重试", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "按照浏览器中提示的步骤继续登录。", - "LOGIN_WITH_PASSKEY": "使用通行密钥来登录" + "LOGIN_WITH_PASSKEY": "使用通行密钥来登录", + "autogenerated_first_album_name": "我的第一个相册", + "autogenerated_default_album_name": "新建相册" } diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts index 3dadbd2887..f9ef7e5493 100644 --- a/web/packages/next/log.ts +++ b/web/packages/next/log.ts @@ -17,7 +17,7 @@ export const logToDisk = (message: string) => { }; const workerLogToDisk = (message: string) => { - workerBridge.logToDisk(message).catch((e) => { + workerBridge.logToDisk(message).catch((e: unknown) => { console.error( "Failed to log a message from worker", e, @@ -27,27 +27,30 @@ const workerLogToDisk = (message: string) => { }); }; -const logError = (message: string, e?: unknown) => { - if (!e) { - logError_(message); - return; - } +const messageWithError = (message: string, e?: unknown) => { + if (!e) return message; let es: string; if (e instanceof Error) { // In practice, we expect ourselves to be called with Error objects, so // this is the happy path so to say. - es = `${e.name}: ${e.message}\n${e.stack}`; + es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n"); } else { // For the rest rare cases, use the default string serialization of e. es = String(e); } - logError_(`${message}: ${es}`); + return `${message}: ${es}`; }; -const logError_ = (message: string) => { - const m = `[error] ${message}`; +const logError = (message: string, e?: unknown) => { + const m = `[error] ${messageWithError(message, e)}`; + if (isDevBuild) console.error(m); + logToDisk(m); +}; + +const logWarn = (message: string, e?: unknown) => { + const m = `[warn] ${messageWithError(message, e)}`; if (isDevBuild) console.error(m); logToDisk(m); }; @@ -90,6 +93,11 @@ export default { * printed to the browser console. */ error: logError, + /** + * Sibling of {@link error}, with the same parameters and behaviour, except + * it gets prefixed with a warning instead of an error tag. + */ + warn: logWarn, /** * Log a message. * diff --git a/web/packages/next/types/file.ts b/web/packages/next/types/file.ts deleted file mode 100644 index dc8a148e93..0000000000 --- a/web/packages/next/types/file.ts +++ /dev/null @@ -1,35 +0,0 @@ -export enum UPLOAD_STRATEGY { - SINGLE_COLLECTION, - COLLECTION_PER_FOLDER, -} - -/* - * ElectronFile is a custom interface that is used to represent - * any file on disk as a File-like object in the Electron desktop app. - * - * This was added to support the auto-resuming of failed uploads - * which needed absolute paths to the files which the - * normal File interface does not provide. - */ -export interface ElectronFile { - name: string; - path: string; - size: number; - lastModified: number; - stream: () => Promise>; - blob: () => Promise; - arrayBuffer: () => Promise; -} - -export interface DataStream { - stream: ReadableStream; - chunkCount: number; -} - -export interface EventQueueItem { - type: "upload" | "trash"; - folderPath: string; - collectionName?: string; - paths?: string[]; - files?: ElectronFile[]; -} diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index 85986b6391..b4ef2b6b24 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -3,24 +3,6 @@ // // See [Note: types.ts <-> preload.ts <-> ipc.ts] -import type { ElectronFile } from "./file"; - -export interface AppUpdateInfo { - autoUpdatable: boolean; - version: string; -} - -export enum FILE_PATH_TYPE { - FILES = "files", - ZIPS = "zips", -} - -export enum PICKED_UPLOAD_TYPE { - FILES = "files", - FOLDERS = "folders", - ZIPS = "zips", -} - /** * Extra APIs provided by our Node.js layer when our code is running inside our * desktop (Electron) app. @@ -67,6 +49,20 @@ export interface Electron { */ openLogDirectory: () => Promise; + /** + * Ask the user to select a directory on their local file system, and return + * it path. + * + * The returned path is guaranteed to use POSIX separators ('/'). + * + * We don't strictly need IPC for this, we can use a hidden element + * and trigger its click for the same behaviour (as we do for the + * `useFileInput` hook that we use for uploads). However, it's a bit + * cumbersome, and we anyways will need to IPC to get back its full path, so + * it is just convenient to expose this direct method. + */ + selectDirectory: () => Promise; + /** * Clear any stored data. * @@ -111,7 +107,7 @@ export interface Electron { * Note: Setting a callback clears any previous callbacks. */ onAppUpdateAvailable: ( - cb?: ((updateInfo: AppUpdateInfo) => void) | undefined, + cb?: ((update: AppUpdate) => void) | undefined, ) => void; /** @@ -138,18 +134,20 @@ export interface Electron { */ skipAppUpdate: (version: string) => void; + // - FS + /** - * A subset of filesystem access APIs. + * A subset of file system access APIs. * * The renderer process, being a web process, does not have full access to - * the local filesystem apart from files explicitly dragged and dropped (or + * the local file system apart from files explicitly dragged and dropped (or * selected by the user in a native file open dialog). * - * The main process, however, has full filesystem access (limited only be an + * The main process, however, has full fil system access (limited only be an * OS level sandbox on the entire process). * * When we're running in the desktop app, we want to better utilize the - * local filesystem access to provide more integrated features to the user - + * local file system access to provide more integrated features to the user; * things that are not currently possible using web technologies. For * example, continuous exports to an arbitrary user chosen location on disk, * or watching some folders for changes and syncing them automatically. @@ -199,34 +197,101 @@ export interface Electron { * @param contents The string contents to write. */ writeFile: (path: string, contents: string) => Promise; - }; - /* - * TODO: AUDIT below this - Some of the types we use below are not copyable - * across process boundaries, and such functions will (expectedly) fail at - * runtime. For such functions, find an efficient alternative or refactor - * the dataflow. - */ + /** + * Return true if there is an item at {@link dirPath}, and it is as + * directory. + */ + isDir: (dirPath: string) => Promise; + }; // - Conversion - convertToJPEG: ( - fileData: Uint8Array, - filename: string, - ) => Promise; + /** + * Try to convert an arbitrary image into JPEG using native layer tools. + * + * The behaviour is OS dependent. On macOS we use the `sips` utility, and on + * some Linux architectures we use an ImageMagick executable bundled with + * our desktop app. + * + * In other cases (primarily Windows), where native JPEG conversion is not + * yet possible, this function will throw an error with the + * {@link CustomErrorMessage.NotAvailable} message. + * + * @param imageData The raw image data (the contents of the image file). + * + * @returns JPEG data of the converted image. + */ + convertToJPEG: (imageData: Uint8Array) => Promise; + /** + * Generate a JPEG thumbnail for the given image. + * + * The behaviour is OS dependent. On macOS we use the `sips` utility, and on + * some Linux architectures we use an ImageMagick executable bundled with + * our desktop app. + * + * In other cases (primarily Windows), where native thumbnail generation is + * not yet possible, this function will throw an error with the + * {@link CustomErrorMessage.NotAvailable} message. + * + * @param dataOrPathOrZipItem The file whose thumbnail we want to generate. + * It can be provided as raw image data (the contents of the image file), or + * the path to the image file, or a tuple containing the path of the zip + * file along with the name of an entry in it. + * + * @param maxDimension The maximum width or height of the generated + * thumbnail. + * + * @param maxSize Maximum size (in bytes) of the generated thumbnail. + * + * @returns JPEG data of the generated thumbnail. + */ generateImageThumbnail: ( - inputFile: File | ElectronFile, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ) => Promise; - runFFmpegCmd: ( - cmd: string[], - inputFile: File | ElectronFile, - outputFileName: string, - dontTimeout?: boolean, - ) => Promise; + /** + * Execute a FFmpeg {@link command} on the given + * {@link dataOrPathOrZipItem}. + * + * This executes the command using a FFmpeg executable we bundle with our + * desktop app. We also have a wasm FFmpeg wasm implementation that we use + * when running on the web, which has a sibling function with the same + * parameters. See [Note: ffmpeg in Electron]. + * + * @param command An array of strings, each representing one positional + * parameter in the command to execute. Placeholders for the input, output + * and ffmpeg's own path are replaced before executing the command + * (respectively {@link inputPathPlaceholder}, + * {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}). + * + * @param dataOrPathOrZipItem The bytes of the input file, or the path to + * the input file on the user's local disk, or the path to a zip file on the + * user's disk and the name of an entry in it. In all three cases, the data + * gets serialized to a temporary file, and then that path gets substituted + * in the FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}. + * + * @param outputFileExtension The extension (without the dot, e.g. "jpeg") + * to use for the output file that we ask FFmpeg to create in + * {@param command}. While this file will eventually get deleted, and we'll + * just return its contents, for some FFmpeg command the extension matters + * (e.g. conversion to a JPEG fails if the extension is arbitrary). + * + * @param timeoutMS If non-zero, then abort and throw a timeout error if the + * ffmpeg command takes more than the given number of milliseconds. + * + * @returns The contents of the output file produced by the ffmpeg command + * (specified as {@link outputPathPlaceholder} in {@link command}). + */ + ffmpegExec: ( + command: string[], + dataOrPathOrZipItem: Uint8Array | string | ZipItem, + outputFileExtension: string, + timeoutMS: number, + ) => Promise; // - ML @@ -242,7 +307,18 @@ export interface Electron { clipImageEmbedding: (jpegImageData: Uint8Array) => Promise; /** - * Return a CLIP embedding of the given image. + * Return a CLIP embedding of the given image if we already have the model + * downloaded and prepped. If the model is not available return `undefined`. + * + * This differs from the other sibling ML functions in that it doesn't wait + * for the model download to finish. It does trigger a model download, but + * then immediately returns `undefined`. At some future point, when the + * model downloaded finishes, calls to this function will start returning + * the result we seek. + * + * The reason for doing it in this asymmetric way is because CLIP text + * embeddings are used as part of deducing user initiated search results, + * and we don't want to block that interaction on a large network request. * * See: [Note: CLIP based magic search] * @@ -250,7 +326,9 @@ export interface Electron { * * @returns A CLIP embedding. */ - clipTextEmbedding: (text: string) => Promise; + clipTextEmbeddingIfAvailable: ( + text: string, + ) => Promise; /** * Detect faces in the given image using YOLO. @@ -268,89 +346,308 @@ export interface Electron { */ faceEmbedding: (input: Float32Array) => Promise; - // - File selection - // TODO: Deprecated - use dialogs on the renderer process itself - - selectDirectory: () => Promise; - - showUploadFilesDialog: () => Promise; - - showUploadDirsDialog: () => Promise; - - showUploadZipDialog: () => Promise<{ - zipPaths: string[]; - files: ElectronFile[]; - }>; + /** + * Return a face crop stored by a previous version of ML. + * + * [Note: Legacy face crops] + * + * Older versions of ML generated and stored face crops in a "face-crops" + * cache directory on the Electron side. For the time being, we have + * disabled the face search whilst we put finishing touches to it. However, + * it'll be nice to still show the existing faces that have been clustered + * for people who opted in to the older beta. + * + * So we retain the older "face-crops" disk cache, and use this method to + * serve faces from it when needed. + * + * @param faceID An identifier corresponding to which the face crop had been + * stored by the older version of our app. + * + * @returns the JPEG data of the face crop if a file is found for the given + * {@link faceID}, otherwise undefined. + */ + legacyFaceCrop: (faceID: string) => Promise; // - Watch - registerWatcherFunctions: ( - addFile: (file: ElectronFile) => Promise, - removeFile: (path: string) => Promise, - removeFolder: (folderPath: string) => Promise, - ) => void; + /** + * Interface with the file system watcher running in our Node.js layer. + * + * [Note: Folder vs Directory in the context of FolderWatch-es] + * + * A note on terminology: The word "folder" is used to the top level root + * folder for which a {@link FolderWatch} has been added. This folder is + * also in 1-1 correspondence to be a directory on the user's disk. It can + * have other, nested directories too (which may or may not be getting + * mapped to separate Ente collections), but we'll not refer to these nested + * directories as folders - only the root of the tree, which the user + * dragged/dropped or selected to set up the folder watch, will be referred + * to as a folder when naming things. + */ + watch: { + /** + * Return the list of folder watches, pruning non-existing directories. + * + * The list of folder paths (and auxillary details) is persisted in the + * Node.js layer. The implementation of this function goes through the + * list, permanently removes any watches whose on-disk directory is no + * longer present, and returns this pruned list of watches. + */ + get: () => Promise; - addWatchMapping: ( - collectionName: string, - folderPath: string, - uploadStrategy: number, - ) => Promise; + /** + * Add a new folder watch for the given {@link folderPath}. + * + * This adds a new entry in the list of watches (persisting them on + * disk), and also starts immediately observing for file system events + * that happen within {@link folderPath}. + * + * @param collectionMapping Determines how nested directories (if any) + * get mapped to Ente collections. + * + * @returns The updated list of watches. + */ + add: ( + folderPath: string, + collectionMapping: CollectionMapping, + ) => Promise; - removeWatchMapping: (folderPath: string) => Promise; + /** + * Remove the pre-existing watch for the given {@link folderPath}. + * + * Persist this removal, and also stop listening for file system events + * that happen within the {@link folderPath}. + * + * @returns The updated list of watches. + */ + remove: (folderPath: string) => Promise; - getWatchMappings: () => Promise; + /** + * Update the list of synced files for the folder watch associated + * with the given {@link folderPath}. + */ + updateSyncedFiles: ( + syncedFiles: FolderWatch["syncedFiles"], + folderPath: string, + ) => Promise; - updateWatchMappingSyncedFiles: ( - folderPath: string, - files: FolderWatch["syncedFiles"], - ) => Promise; + /** + * Update the list of ignored file paths for the folder watch + * associated with the given {@link folderPath}. + */ + updateIgnoredFiles: ( + ignoredFiles: FolderWatch["ignoredFiles"], + folderPath: string, + ) => Promise; - updateWatchMappingIgnoredFiles: ( - folderPath: string, - files: FolderWatch["ignoredFiles"], - ) => Promise; + /** + * Register the function to invoke when a file is added in one of the + * folders we are watching. + * + * The callback function is passed the path to the file that was added, + * and the folder watch it was associated with. + * + * The path is guaranteed to use POSIX separators ('/'). + */ + onAddFile: (f: (path: string, watch: FolderWatch) => void) => void; - // - FS legacy - isFolder: (dirPath: string) => Promise; + /** + * Register the function to invoke when a file is removed in one of the + * folders we are watching. + * + * The callback function is passed the path to the file that was + * removed, and the folder watch it was associated with. + * + * The path is guaranteed to use POSIX separators ('/'). + */ + onRemoveFile: (f: (path: string, watch: FolderWatch) => void) => void; + + /** + * Register the function to invoke when a directory is removed in one of + * the folders we are watching. + * + * The callback function is passed the path to the directory that was + * removed, and the folder watch it was associated with. + * + * The path is guaranteed to use POSIX separators ('/'). + */ + onRemoveDir: (f: (path: string, watch: FolderWatch) => void) => void; + + /** + * Return the paths of all the files under the given folder. + * + * This function walks the directory tree starting at {@link folderPath} + * and returns a list of the absolute paths of all the files that exist + * therein. It will recursively traverse into nested directories, and + * return the absolute paths of the files there too. + * + * The returned paths are guaranteed to use POSIX separators ('/'). + */ + findFiles: (folderPath: string) => Promise; + + /** + * Stop watching all existing folder watches and remove any callbacks. + * + * This function is meant to be called when the user logs out. It stops + * all existing folder watches and forgets about any "on*" callback + * functions that have been registered. + * + * The persisted state itself gets cleared via {@link clearStores}. + */ + reset: () => Promise; + }; // - Upload - getPendingUploads: () => Promise<{ - files: ElectronFile[]; - collectionName: string; - type: string; - }>; - setToUploadFiles: ( - /** TODO(MR): This is the actual type */ - // type: FILE_PATH_TYPE, - type: PICKED_UPLOAD_TYPE, - filePaths: string[], - ) => Promise; - getElectronFilesFromGoogleZip: ( - filePath: string, - ) => Promise; - setToUploadCollection: (collectionName: string) => Promise; - getDirFiles: (dirPath: string) => Promise; + /** + * Return the file system path that this File object points to. + * + * This method is a bit different from the other methods on the Electron + * object in the sense that there is no actual IPC happening - the + * implementation of this method is completely in the preload script. Thus + * we can pass it an otherwise unserializable File object. + * + * Consequently, it is also _not_ async. + */ + pathForFile: (file: File) => string; + + /** + * Get the list of files that are present in the given zip file. + * + * @param zipPath The path of the zip file on the user's local file system. + * + * @returns A list of (zipPath, entryName) tuples, one for each file in the + * given zip. Directories are traversed recursively, but the directory + * entries themselves will be excluded from the returned list. File entries + * whose file name begins with a dot (i.e. "hidden" files) will also be + * excluded. + * + * To read the contents of the files themselves, see [Note: IPC streams]. + */ + listZipItems: (zipPath: string) => Promise; + + /** + * Return the size in bytes of the file at the given path or of a particular + * entry within a zip file. + */ + pathOrZipItemSize: (pathOrZipItem: string | ZipItem) => Promise; + + /** + * Return any pending uploads that were previously enqueued but haven't yet + * been completed. + * + * Return undefined if there are no such pending uploads. + * + * The state of pending uploads is persisted in the Node.js layer. Or app + * start, we read in this data from the Node.js layer via this IPC method. + * The Node.js code returns the persisted data after filtering out any files + * that no longer exist on disk. + */ + pendingUploads: () => Promise; + + /** + * Set the state of pending uploads. + * + * - Typically, this would be called at the start of an upload. + * + * - Thereafter, as each item gets uploaded one by one, we'd call + * {@link markUploadedFiles} or {@link markUploadedZipItems}. + * + * - Finally, once the upload completes (or gets cancelled), we'd call + * {@link clearPendingUploads} to complete the circle. + */ + setPendingUploads: (pendingUploads: PendingUploads) => Promise; + + /** + * Mark the given files (given by their {@link paths}) as having been + * uploaded. + */ + markUploadedFiles: (paths: PendingUploads["filePaths"]) => Promise; + + /** + * Mark the given {@link ZipItem}s as having been uploaded. + */ + markUploadedZipItems: (items: PendingUploads["zipItems"]) => Promise; + + /** + * Clear any pending uploads. + */ + clearPendingUploads: () => Promise; +} + +/** + * Errors that have special semantics on the web side. + * + * [Note: Custom errors across Electron/Renderer boundary] + * + * If we need to identify errors thrown by the main process when invoked from + * the renderer process, we can only use the `message` field because: + * + * > Errors thrown throw `handle` in the main process are not transparent as + * > they are serialized and only the `message` property from the original error + * > is provided to the renderer process. + * > + * > - https://www.electronjs.org/docs/latest/tutorial/ipc + * > + * > Ref: https://github.com/electron/electron/issues/24427 + */ +export const CustomErrorMessage = { + NotAvailable: "This feature in not available on the current OS/arch", +}; + +/** + * Data passed across the IPC bridge when an app update is available. + */ +export interface AppUpdate { + /** `true` if the user automatically update to this (new) version */ + autoUpdatable: boolean; + /** The new version that is available */ + version: string; } /** * A top level folder that was selected by the user for watching. * * The user can set up multiple such watches. Each of these can in turn be - * syncing multiple on disk folders to one or more (dependening on the - * {@link uploadStrategy}) Ente albums. + * syncing multiple on disk folders to one or more Ente collections (depending + * on the value of {@link collectionMapping}). * * This type is passed across the IPC boundary. It is persisted on the Node.js * side. */ export interface FolderWatch { - rootFolderName: string; - uploadStrategy: number; + /** + * Specify if nested files should all be mapped to the same single root + * collection, or if there should be a collection per directory that has + * files. @see {@link CollectionMapping}. + */ + collectionMapping: CollectionMapping; + /** + * The path to the (root) folder we are watching. + */ folderPath: string; + /** + * Files that have already been uploaded. + */ syncedFiles: FolderWatchSyncedFile[]; + /** + * Files (paths) that should be ignored when uploading. + */ ignoredFiles: string[]; } +/** + * The ways in which directories are mapped to collection. + * + * This comes into play when we have nested directories that we are trying to + * upload or watch on the user's local file system. + */ +export type CollectionMapping = + /** All files go into a single collection named after the root directory. */ + | "root" + /** Each file goes to a collection named after its parent directory. */ + | "parent"; + /** * An on-disk file that was synced as part of a folder watch. */ @@ -359,3 +656,58 @@ export interface FolderWatchSyncedFile { uploadedFileID: number; collectionID: number; } + +/** + * A particular file within a zip file. + * + * When the user uploads a zip file, we create a "zip item" for each entry + * within the zip file. Each such entry is a tuple containing the (path to a zip + * file itself, and the name of an entry within it). + * + * The name of the entry is not just the file name, but rather is the full path + * of the file within the zip. That is, each entry name uniquely identifies a + * particular file within the given zip. + * + * When `entryName` is a path within a nested directory, it is guaranteed to use + * the POSIX path separator ("/") since that is the path separator required by + * the ZIP format itself + * + * > 4.4.17.1 The name of the file, with optional relative path. + * > + * > The path stored MUST NOT contain a drive or device letter, or a leading + * > slash. All slashes MUST be forward slashes '/' as opposed to backwards + * > slashes '\' for compatibility with Amiga and UNIX file systems etc. If + * > input came from standard input, there is no file name field. + * > + * > https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT + */ +export type ZipItem = [zipPath: string, entryName: string]; + +/** + * State about pending and in-progress uploads. + * + * When the user starts an upload, we remember the files they'd selected (or + * drag-dropped) so that we can resume if they restart the app in before the + * uploads have been completed. This state is kept on the Electron side, and + * this object is the IPC intermediary. + */ +export interface PendingUploads { + /** + * The collection to which we're uploading, or the root collection. + * + * This is name of the collection (when uploading to a singular collection) + * or the root collection (when uploading to separate * albums) to which we + * these uploads are meant to go to. See {@link CollectionMapping}. + * + * It will not be set if we're just uploading standalone files. + */ + collectionName?: string; + /** + * Paths of regular files that need to be uploaded. + */ + filePaths: string[]; + /** + * {@link ZipItem} (zip path and entry name) that need to be uploaded. + */ + zipItems: ZipItem[]; +} diff --git a/web/packages/next/worker/comlink-worker.ts b/web/packages/next/worker/comlink-worker.ts index a5237fccc5..5929e5361b 100644 --- a/web/packages/next/worker/comlink-worker.ts +++ b/web/packages/next/worker/comlink-worker.ts @@ -12,24 +12,17 @@ export class ComlinkWorker InstanceType> { this.name = name; this.worker = worker; - this.worker.onerror = (ev) => { + worker.onerror = (event) => { log.error( - `Got error event from worker: ${JSON.stringify({ - errorEvent: JSON.stringify(ev), - name: this.name, - })}`, + `Got error event from worker: ${JSON.stringify({ event, name })}`, ); }; - log.debug(() => `Initiated ${this.name}`); - const comlink = wrap(this.worker); + log.debug(() => `Initiated web worker ${name}`); + const comlink = wrap(worker); this.remote = new comlink() as Promise>>; expose(workerBridge, worker); } - public getName() { - return this.name; - } - public terminate() { this.worker.terminate(); log.debug(() => `Terminated ${this.name}`); @@ -43,15 +36,16 @@ export class ComlinkWorker InstanceType> { * `workerBridge` object after importing it from `worker-bridge.ts`. * * Not all workers need access to all these functions, and this can indeed be - * done in a more fine-grained, per-worker, manner if needed. + * done in a more fine-grained, per-worker, manner if needed. For now, since it + * is a motley bunch, we just inject them all. */ const workerBridge = { // Needed: generally (presumably) logToDisk, // Needed by ML worker getAuthToken: () => ensureLocalUser().then((user) => user.token), - convertToJPEG: (inputFileData: Uint8Array, filename: string) => - ensureElectron().convertToJPEG(inputFileData, filename), + convertToJPEG: (imageData: Uint8Array) => + ensureElectron().convertToJPEG(imageData), detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input), faceEmbedding: (input: Float32Array) => ensureElectron().faceEmbedding(input), diff --git a/web/packages/shared/components/Navbar/base.tsx b/web/packages/shared/components/Navbar/base.tsx index 101506cfd0..403dc808ca 100644 --- a/web/packages/shared/components/Navbar/base.tsx +++ b/web/packages/shared/components/Navbar/base.tsx @@ -1,6 +1,9 @@ import { styled } from "@mui/material"; import { FlexWrapper } from "../../components/Container"; -const NavbarBase = styled(FlexWrapper)<{ isMobile: boolean }>` + +const NavbarBase = styled(FlexWrapper, { + shouldForwardProp: (propName) => propName != "isMobile", +})<{ isMobile: boolean }>` min-height: 64px; position: sticky; top: 0; diff --git a/web/packages/shared/crypto/index.ts b/web/packages/shared/crypto/index.ts index 00ac8d32f1..4e20fb92a1 100644 --- a/web/packages/shared/crypto/index.ts +++ b/web/packages/shared/crypto/index.ts @@ -1,6 +1,6 @@ import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { Remote } from "comlink"; -import { DedicatedCryptoWorker } from "./internal/crypto.worker"; +import { type DedicatedCryptoWorker } from "./internal/crypto.worker"; class ComlinkCryptoWorker { private comlinkWorkerInstance: diff --git a/web/packages/shared/crypto/types.ts b/web/packages/shared/crypto/types.ts index 4cf4c56b1f..e591820f08 100644 --- a/web/packages/shared/crypto/types.ts +++ b/web/packages/shared/crypto/types.ts @@ -1,17 +1,3 @@ -import { DataStream } from "@/next/types/file"; - -export interface LocalFileAttributes< - T extends string | Uint8Array | DataStream, -> { - encryptedData: T; - decryptionHeader: string; -} - -export interface EncryptionResult { - file: LocalFileAttributes; - key: string; -} - export interface B64EncryptionResult { encryptedData: string; key: string; diff --git a/web/packages/shared/error/index.ts b/web/packages/shared/error/index.ts index 12a87d2dba..d226d62b62 100644 --- a/web/packages/shared/error/index.ts +++ b/web/packages/shared/error/index.ts @@ -22,13 +22,10 @@ export function isApiErrorResponse(object: any): object is ApiErrorResponse { } export const CustomError = { - THUMBNAIL_GENERATION_FAILED: "thumbnail generation failed", VIDEO_PLAYBACK_FAILED: "video playback failed", ETAG_MISSING: "no header/etag present in response body", KEY_MISSING: "encrypted key missing from localStorage", FAILED_TO_LOAD_WEB_WORKER: "failed to load web worker", - CHUNK_MORE_THAN_EXPECTED: "chunks more than expected", - CHUNK_LESS_THAN_EXPECTED: "chunks less than expected", UNSUPPORTED_FILE_FORMAT: "unsupported file format", FILE_TOO_LARGE: "file too large", SUBSCRIPTION_EXPIRED: "subscription expired", @@ -49,9 +46,6 @@ export const CustomError = { SUBSCRIPTION_NEEDED: "subscription not present", NOT_FOUND: "not found ", NO_METADATA: "no metadata", - TOO_LARGE_LIVE_PHOTO_ASSETS: "too large live photo assets", - NOT_A_DATE: "not a date", - NOT_A_LOCATION: "not a location", FILE_ID_NOT_FOUND: "file with id not found", WEAK_DEVICE: "password decryption failed on the device", INCORRECT_PASSWORD: "incorrect password", @@ -60,8 +54,6 @@ export const CustomError = { HIDDEN_COLLECTION_SYNC_FILE_ATTEMPTED: "hidden collection sync file attempted", UNKNOWN_ERROR: "Something went wrong, please try again", - TYPE_DETECTION_FAILED: (fileFormat: string) => - `type detection failed ${fileFormat}`, WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED: "Windows native image processing is not supported", NETWORK_ERROR: "Network Error", @@ -73,9 +65,6 @@ export const CustomError = { AUTH_KEY_NOT_FOUND: "auth key not found", EXIF_DATA_NOT_FOUND: "exif data not found", SELECT_FOLDER_ABORTED: "select folder aborted", - NON_MEDIA_FILE: "non media file", - UNSUPPORTED_RAW_FORMAT: "unsupported raw format", - NON_PREVIEWABLE_FILE: "non previewable file", PROCESSING_FAILED: "processing failed", EXPORT_RECORD_JSON_PARSING_FAILED: "export record json parsing failed", TWO_FACTOR_ENABLED: "two factor enabled", @@ -84,8 +73,6 @@ export const CustomError = { ServerError: "server error", FILE_NOT_FOUND: "file not found", UNSUPPORTED_PLATFORM: "Unsupported platform", - MODEL_DOWNLOAD_PENDING: - "Model download pending, skipping clip search request", UPDATE_URL_FILE_ID_MISMATCH: "update url file id mismatch", URL_ALREADY_SET: "url already set", FILE_CONVERSION_FAILED: "file conversion failed", diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx index b357d918ee..71f027cefe 100644 --- a/web/packages/shared/hooks/useFileInput.tsx +++ b/web/packages/shared/hooks/useFileInput.tsx @@ -1,10 +1,40 @@ import { useCallback, useRef, useState } from "react"; -export interface FileWithPath extends File { - readonly path?: string; +interface UseFileInputParams { + directory?: boolean; + accept?: string; } -export default function useFileInput({ directory }: { directory?: boolean }) { +/** + * Return three things: + * + * - A function that can be called to trigger the showing of the select file / + * directory dialog. + * + * - The list of properties that should be passed to a dummy `input` element + * that needs to be created to anchor the select file dialog. This input HTML + * element is not going to be visible, but it needs to be part of the DOM fro + * the open trigger to have effect. + * + * - The list of files that the user selected. This will be a list even if the + * user selected directories - in that case, it will be the recursive list of + * files within this directory. + * + * @param param0 + * + * - If {@link directory} is true, the file open dialog will ask the user to + * select directories. Otherwise it'll ask the user to select files. + * + * - If {@link accept} is specified, it'll restrict the type of files that the + * user can select by setting the "accept" attribute of the underlying HTML + * input element we use to surface the file selector dialog. For value of + * accept can be an extension or a MIME type (See + * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept). + */ +export default function useFileInput({ + directory, + accept, +}: UseFileInputParams) { const [selectedFiles, setSelectedFiles] = useState([]); const inputRef = useRef(); @@ -19,21 +49,34 @@ export default function useFileInput({ directory }: { directory?: boolean }) { event, ) => { if (!!event.target && !!event.target.files) { - const files = [...event.target.files].map((file) => - toFileWithPath(file), - ); - setSelectedFiles(files); + setSelectedFiles([...event.target.files]); } }; + // [Note: webkitRelativePath] + // + // If the webkitdirectory attribute of an HTML element is set then + // the File objects that we get will have `webkitRelativePath` property + // containing the relative path to the selected directory. + // + // https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory + // + // These paths use the POSIX path separator ("/"). + // https://stackoverflow.com/questions/62806233/when-using-webkitrelativepath-is-the-path-separator-operating-system-specific + // + const directoryOpts = directory + ? { directory: "", webkitdirectory: "" } + : {}; + const getInputProps = useCallback( () => ({ type: "file", multiple: true, style: { display: "none" }, - ...(directory ? { directory: "", webkitdirectory: "" } : {}), + ...directoryOpts, ref: inputRef, onChange: handleChange, + ...(accept ? { accept } : {}), }), [], ); @@ -44,26 +87,3 @@ export default function useFileInput({ directory }: { directory?: boolean }) { selectedFiles: selectedFiles, }; } - -// https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L88 -export function toFileWithPath(file: File, path?: string): FileWithPath { - if (typeof (file as any).path !== "string") { - // on electron, path is already set to the absolute path - const { webkitRelativePath } = file; - Object.defineProperty(file, "path", { - value: - typeof path === "string" - ? path - : typeof webkitRelativePath === "string" && // If is set, - // the File will have a {webkitRelativePath} property - // https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory - webkitRelativePath.length > 0 - ? webkitRelativePath - : file.name, - writable: false, - configurable: false, - enumerable: true, - }); - } - return file; -} diff --git a/web/packages/shared/network/cast.ts b/web/packages/shared/network/cast.ts index b240eab32d..a18767baa2 100644 --- a/web/packages/shared/network/cast.ts +++ b/web/packages/shared/network/cast.ts @@ -58,11 +58,14 @@ class CastGateway { return resp.data.publicKey; } - public async registerDevice(code: string, publicKey: string) { - await HTTPService.post(getEndpoint() + "/cast/device-info/", { - deviceCode: `${code}`, - publicKey: publicKey, - }); + public async registerDevice(publicKey: string): Promise { + const resp = await HTTPService.post( + getEndpoint() + "/cast/device-info/", + { + publicKey: publicKey, + }, + ); + return resp.data.deviceCode; } public async publishCastPayload( diff --git a/web/packages/shared/time/index.ts b/web/packages/shared/time/index.ts index d98bc411be..87e1d9648b 100644 --- a/web/packages/shared/time/index.ts +++ b/web/packages/shared/time/index.ts @@ -5,17 +5,6 @@ export interface TimeDelta { years?: number; } -interface DateComponent { - year: T; - month: T; - day: T; - hour: T; - minute: T; - second: T; -} - -const currentYear = new Date().getFullYear(); - export function getUnixTimeInMicroSecondsWithDelta(delta: TimeDelta): number { let currentDate = new Date(); if (delta?.hours) { @@ -71,116 +60,3 @@ function _addYears(date: Date, years: number) { result.setFullYear(date.getFullYear() + years); return result; } - -/* -generates data component for date in format YYYYMMDD-HHMMSS - */ -export function parseDateFromFusedDateString(dateTime: string) { - const dateComponent: DateComponent = convertDateComponentToNumber({ - year: dateTime.slice(0, 4), - month: dateTime.slice(4, 6), - day: dateTime.slice(6, 8), - hour: dateTime.slice(9, 11), - minute: dateTime.slice(11, 13), - second: dateTime.slice(13, 15), - }); - return validateAndGetDateFromComponents(dateComponent); -} - -/* sample date format = 2018-08-19 12:34:45 - the date has six symbol separated number values - which we would extract and use to form the date - */ -export function tryToParseDateTime(dateTime: string): Date { - const dateComponent = getDateComponentsFromSymbolJoinedString(dateTime); - if (dateComponent.year?.length === 8 && dateComponent.month?.length === 6) { - // the filename has size 8 consecutive and then 6 consecutive digits - // high possibility that the it is a date in format YYYYMMDD-HHMMSS - const possibleDateTime = dateComponent.year + "-" + dateComponent.month; - return parseDateFromFusedDateString(possibleDateTime); - } - return validateAndGetDateFromComponents( - convertDateComponentToNumber(dateComponent), - ); -} - -function getDateComponentsFromSymbolJoinedString( - dateTime: string, -): DateComponent { - const [year, month, day, hour, minute, second] = - dateTime.match(/\d+/g) ?? []; - - return { year, month, day, hour, minute, second }; -} - -function validateAndGetDateFromComponents( - dateComponent: DateComponent, - options = { minYear: 1990, maxYear: currentYear + 1 }, -) { - let date = getDateFromComponents(dateComponent); - if (hasTimeValues(dateComponent) && !isTimePartValid(date, dateComponent)) { - // if the date has time values but they are not valid - // then we remove the time values and try to validate the date - date = getDateFromComponents(removeTimeValues(dateComponent)); - } - if (!isDatePartValid(date, dateComponent)) { - return null; - } - if ( - date.getFullYear() < options.minYear || - date.getFullYear() > options.maxYear - ) { - return null; - } - return date; -} - -function isTimePartValid(date: Date, dateComponent: DateComponent) { - return ( - date.getHours() === dateComponent.hour && - date.getMinutes() === dateComponent.minute && - date.getSeconds() === dateComponent.second - ); -} - -function isDatePartValid(date: Date, dateComponent: DateComponent) { - return ( - date.getFullYear() === dateComponent.year && - date.getMonth() === dateComponent.month && - date.getDate() === dateComponent.day - ); -} - -function convertDateComponentToNumber( - dateComponent: DateComponent, -): DateComponent { - return { - year: Number(dateComponent.year), - // https://stackoverflow.com/questions/2552483/why-does-the-month-argument-range-from-0-to-11-in-javascripts-date-constructor - month: Number(dateComponent.month) - 1, - day: Number(dateComponent.day), - hour: Number(dateComponent.hour), - minute: Number(dateComponent.minute), - second: Number(dateComponent.second), - }; -} - -function getDateFromComponents(dateComponent: DateComponent) { - const { year, month, day, hour, minute, second } = dateComponent; - if (hasTimeValues(dateComponent)) { - return new Date(year, month, day, hour, minute, second); - } else { - return new Date(year, month, day); - } -} - -function hasTimeValues(dateComponent: DateComponent) { - const { hour, minute, second } = dateComponent; - return !isNaN(hour) && !isNaN(minute) && !isNaN(second); -} - -function removeTimeValues( - dateComponent: DateComponent, -): DateComponent { - return { ...dateComponent, hour: 0, minute: 0, second: 0 }; -} diff --git a/web/packages/shared/utils/index.ts b/web/packages/shared/utils/index.ts index c027b6cb62..568ec5cc40 100644 --- a/web/packages/shared/utils/index.ts +++ b/web/packages/shared/utils/index.ts @@ -4,9 +4,8 @@ * This function is a promisified `setTimeout`. It returns a promise that * resolves after {@link ms} milliseconds. */ -export async function sleep(ms: number) { - await new Promise((resolve) => setTimeout(resolve, ms)); -} +export const wait = (ms: number) => + new Promise((resolve) => setTimeout(resolve, ms)); export function downloadAsFile(filename: string, content: string) { const file = new Blob([content], { @@ -49,29 +48,27 @@ export async function retryAsyncFunction( if (attemptNumber === waitTimeBeforeNextTry.length) { throw e; } - await sleep(waitTimeBeforeNextTry[attemptNumber]); + await wait(waitTimeBeforeNextTry[attemptNumber]); } } } -export const promiseWithTimeout = async ( - request: Promise, - timeout: number, -): Promise => { - const timeoutRef = { current: null }; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutRef.current = setTimeout( +/** + * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it + * does not resolve within {@link timeoutMS}, then reject with a timeout error. + */ +export const withTimeout = async (promise: Promise, ms: number) => { + let timeoutId: ReturnType; + const rejectOnTimeout = new Promise((_, reject) => { + timeoutId = setTimeout( () => reject(new Error("Operation timed out")), - timeout, + ms, ); }); - const requestWithTimeOutCancellation = async () => { - const resp = await request; - clearTimeout(timeoutRef.current); - return resp; + const promiseAndCancelTimeout = async () => { + const result = await promise; + clearTimeout(timeoutId); + return result; }; - return await Promise.race([ - requestWithTimeOutCancellation(), - rejectOnTimeout, - ]); + return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); }; diff --git a/web/packages/shared/utils/temp.ts b/web/packages/shared/utils/temp.ts deleted file mode 100644 index 984f4abb05..0000000000 --- a/web/packages/shared/utils/temp.ts +++ /dev/null @@ -1,14 +0,0 @@ -const CHARACTERS = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - -export function generateTempName(length: number, suffix: string) { - let tempName = ""; - - const charactersLength = CHARACTERS.length; - for (let i = 0; i < length; i++) { - tempName += CHARACTERS.charAt( - Math.floor(Math.random() * charactersLength), - ); - } - return `${tempName}-${suffix}`; -} diff --git a/web/packages/utils/array.ts b/web/packages/utils/array.ts new file mode 100644 index 0000000000..660aef6795 --- /dev/null +++ b/web/packages/utils/array.ts @@ -0,0 +1,15 @@ +/** + * Shuffle. + * + * Return a new array containing the shuffled elements of the given array. + * + * The algorithm used is not the most efficient, but is effectively a one-liner + * whilst being reasonably efficient. To each element we assign a random key, + * then we sort by this key. Since the key is random, the sorted array will have + * the original elements in a random order. + */ +export const shuffled = (xs: T[]) => + xs + .map((x) => [Math.random(), x]) + .sort() + .map(([, x]) => x) as T[]; diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts new file mode 100644 index 0000000000..93706bfb61 --- /dev/null +++ b/web/packages/utils/ensure.ts @@ -0,0 +1,17 @@ +/** + * Throw an exception if the given value is `null` or `undefined`. + */ +export const ensure = (v: T | null | undefined): T => { + if (v === null) throw new Error("Required value was null"); + if (v === undefined) throw new Error("Required value was not found"); + return v; +}; + +/** + * Throw an exception if the given value is not a string. + */ +export const ensureString = (v: unknown): string => { + if (typeof v != "string") + throw new Error(`Expected a string, instead found ${String(v)}`); + return v; +}; diff --git a/web/yarn.lock b/web/yarn.lock index 11cc8b8e12..972b14df1c 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -528,7 +528,7 @@ dependencies: eslint-visitor-keys "^3.3.0" -"@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": +"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.6.1": version "4.10.0" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== @@ -1000,6 +1000,11 @@ "@types/node" "*" base-x "^3.0.6" +"@types/chromecast-caf-receiver@^6.0.14": + version "6.0.14" + resolved "https://registry.yarnpkg.com/@types/chromecast-caf-receiver/-/chromecast-caf-receiver-6.0.14.tgz#e1e781c62c84ee85899fd20d658e258f8f45f5be" + integrity sha512-qvN4uE4MlYCEtniTtjxG4D+KeEXfs/Sgqex9sSZdPVh5rffdifINYzKH3z3QRl+0mk41vD6vYZ8s8ZfW/8iFoQ== + "@types/estree@1.0.5": version "1.0.5" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" @@ -1018,7 +1023,7 @@ "@types/react" "*" hoist-non-react-statics "^3.3.0" -"@types/json-schema@^7.0.12": +"@types/json-schema@^7.0.15": version "7.0.15" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== @@ -1134,10 +1139,10 @@ resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.8.tgz#ce5ace04cfeabe7ef87c0091e50752e36707deff" integrity sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A== -"@types/semver@^7.5.0": - version "7.5.7" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.7.tgz#326f5fdda70d13580777bcaa1bc6fa772a5aef0e" - integrity sha512-/wdoPq1QqkSj9/QOeKkFquEuPzQbHTWAMPH/PaUMB+JuR31lXhlWXRZ52IpfDYVlDOUBvX09uBrPwxGT1hjNBg== +"@types/semver@^7.5.8": + version "7.5.8" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e" + integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ== "@types/uuid@^9.0.2": version "9.0.8" @@ -1150,21 +1155,21 @@ integrity sha512-Tuk4q7q0DnpzyJDI4aMeghGuFu2iS1QAdKpabn8JfbtfGmVDUgvZv1I7mEjP61Bvnp3ljKCC8BE6YYSTNxmvRQ== "@typescript-eslint/eslint-plugin@^7": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.0.2.tgz#c13a34057be425167cc4a765158c46fdf2fd981d" - integrity sha512-/XtVZJtbaphtdrWjr+CJclaCVGPtOdBpFEnvtNf/jRV0IiEemRrL0qABex/nEt8isYcnFacm3nPHYQwL+Wb7qg== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz#c78e309fe967cb4de05b85cdc876fb95f8e01b6f" + integrity sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg== dependencies: - "@eslint-community/regexpp" "^4.5.1" - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/type-utils" "7.0.2" - "@typescript-eslint/utils" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@eslint-community/regexpp" "^4.10.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/type-utils" "7.8.0" + "@typescript-eslint/utils" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" graphemer "^1.4.0" - ignore "^5.2.4" + ignore "^5.3.1" natural-compare "^1.4.0" - semver "^7.5.4" - ts-api-utils "^1.0.1" + semver "^7.6.0" + ts-api-utils "^1.3.0" "@typescript-eslint/parser@^5.4.2 || ^6.0.0": version "6.21.0" @@ -1178,14 +1183,14 @@ debug "^4.3.4" "@typescript-eslint/parser@^7": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.0.2.tgz#95c31233d343db1ca1df8df7811b5b87ca7b1a68" - integrity sha512-GdwfDglCxSmU+QTS9vhz2Sop46ebNCXpPPvsByK7hu0rFGRHL+AusKQJ7SoN+LbLh6APFpQwHKmDSwN35Z700Q== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.8.0.tgz#1e1db30c8ab832caffee5f37e677dbcb9357ddc8" + integrity sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ== dependencies: - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/typescript-estree" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" "@typescript-eslint/scope-manager@6.21.0": @@ -1196,33 +1201,33 @@ "@typescript-eslint/types" "6.21.0" "@typescript-eslint/visitor-keys" "6.21.0" -"@typescript-eslint/scope-manager@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.0.2.tgz#6ec4cc03752758ddd1fdaae6fbd0ed9a2ca4fe63" - integrity sha512-l6sa2jF3h+qgN2qUMjVR3uCNGjWw4ahGfzIYsCtFrQJCjhbrDPdiihYT8FnnqFwsWX+20hK592yX9I2rxKTP4g== +"@typescript-eslint/scope-manager@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz#bb19096d11ec6b87fb6640d921df19b813e02047" + integrity sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g== dependencies: - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" -"@typescript-eslint/type-utils@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.0.2.tgz#a7fc0adff0c202562721357e7478207d380a757b" - integrity sha512-IKKDcFsKAYlk8Rs4wiFfEwJTQlHcdn8CLwLaxwd6zb8HNiMcQIFX9sWax2k4Cjj7l7mGS5N1zl7RCHOVwHq2VQ== +"@typescript-eslint/type-utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz#9de166f182a6e4d1c5da76e94880e91831e3e26f" + integrity sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A== dependencies: - "@typescript-eslint/typescript-estree" "7.0.2" - "@typescript-eslint/utils" "7.0.2" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/utils" "7.8.0" debug "^4.3.4" - ts-api-utils "^1.0.1" + ts-api-utils "^1.3.0" "@typescript-eslint/types@6.21.0": version "6.21.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.21.0.tgz#205724c5123a8fef7ecd195075fa6e85bac3436d" integrity sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg== -"@typescript-eslint/types@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.0.2.tgz#b6edd108648028194eb213887d8d43ab5750351c" - integrity sha512-ZzcCQHj4JaXFjdOql6adYV4B/oFOFjPOC9XYwCaZFRvqN8Llfvv4gSxrkQkd2u4Ci62i2c6W6gkDwQJDaRc4nA== +"@typescript-eslint/types@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.8.0.tgz#1fd2577b3ad883b769546e2d1ef379f929a7091d" + integrity sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw== "@typescript-eslint/typescript-estree@6.21.0": version "6.21.0" @@ -1238,32 +1243,32 @@ semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/typescript-estree@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.0.2.tgz#3c6dc8a3b9799f4ef7eca0d224ded01974e4cb39" - integrity sha512-3AMc8khTcELFWcKcPc0xiLviEvvfzATpdPj/DXuOGIdQIIFybf4DMT1vKRbuAEOFMwhWt7NFLXRkbjsvKZQyvw== +"@typescript-eslint/typescript-estree@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz#b028a9226860b66e623c1ee55cc2464b95d2987c" + integrity sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg== dependencies: - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" - minimatch "9.0.3" - semver "^7.5.4" - ts-api-utils "^1.0.1" + minimatch "^9.0.4" + semver "^7.6.0" + ts-api-utils "^1.3.0" -"@typescript-eslint/utils@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.0.2.tgz#8756123054cd934c8ba7db6a6cffbc654b10b5c4" - integrity sha512-PZPIONBIB/X684bhT1XlrkjNZJIEevwkKDsdwfiu1WeqBxYEEdIgVDgm8/bbKHVu+6YOpeRqcfImTdImx/4Bsw== +"@typescript-eslint/utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.8.0.tgz#57a79f9c0c0740ead2f622e444cfaeeb9fd047cd" + integrity sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ== dependencies: "@eslint-community/eslint-utils" "^4.4.0" - "@types/json-schema" "^7.0.12" - "@types/semver" "^7.5.0" - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/typescript-estree" "7.0.2" - semver "^7.5.4" + "@types/json-schema" "^7.0.15" + "@types/semver" "^7.5.8" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + semver "^7.6.0" "@typescript-eslint/visitor-keys@6.21.0": version "6.21.0" @@ -1273,13 +1278,13 @@ "@typescript-eslint/types" "6.21.0" eslint-visitor-keys "^3.4.1" -"@typescript-eslint/visitor-keys@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.0.2.tgz#2899b716053ad7094962beb895d11396fc12afc7" - integrity sha512-8Y+YiBmqPighbm5xA2k4wKTxRzx9EkBu7Rlw+WHqMvRJ3RPz/BMBO9b2ru0LUNmXg120PHUXD5+SWFy2R8DqlQ== +"@typescript-eslint/visitor-keys@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz#7285aab991da8bee411a42edbd5db760d22fdd91" + integrity sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA== dependencies: - "@typescript-eslint/types" "7.0.2" - eslint-visitor-keys "^3.4.1" + "@typescript-eslint/types" "7.8.0" + eslint-visitor-keys "^3.4.3" "@ungap/structured-clone@^1.2.0": version "1.2.0" @@ -2505,7 +2510,7 @@ file-selector@^0.4.0: dependencies: tslib "^2.0.3" -file-type@^16.5.4: +file-type@16.5.4: version "16.5.4" resolved "https://registry.yarnpkg.com/file-type/-/file-type-16.5.4.tgz#474fb4f704bee427681f98dd390058a172a6c2fd" integrity sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw== @@ -2893,7 +2898,7 @@ ieee754@^1.2.1: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.2.0, ignore@^5.2.4: +ignore@^5.2.0, ignore@^5.2.4, ignore@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw== @@ -3252,7 +3257,7 @@ jssha@~3.3.1: object.assign "^4.1.4" object.values "^1.1.6" -jszip@3.10.1: +jszip@^3.10: version "3.10.1" resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.10.1.tgz#34aee70eb18ea1faec2f589208a157d1feb091c2" integrity sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g== @@ -3449,6 +3454,13 @@ minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: dependencies: brace-expansion "^1.1.7" +minimatch@^9.0.4: + version "9.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" + integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== + dependencies: + brace-expansion "^2.0.1" + minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" @@ -4173,7 +4185,7 @@ semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.5.4: +semver@^7.5.4, semver@^7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== @@ -4565,10 +4577,10 @@ truncate-utf8-bytes@^1.0.0: dependencies: utf8-byte-length "^1.0.1" -ts-api-utils@^1.0.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.2.1.tgz#f716c7e027494629485b21c0df6180f4d08f5e8b" - integrity sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA== +ts-api-utils@^1.0.1, ts-api-utils@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" + integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== tsconfig-paths@^3.15.0: version "3.15.0" @@ -4659,9 +4671,9 @@ typed-array-length@^1.0.6: possible-typed-array-names "^1.0.0" typescript@^5: - version "5.3.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" - integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== unbox-primitive@^1.0.2: version "1.0.2"