[desktop] Fetch face indexes - Part 2/x (#2320)
Continuation of https://github.com/ente-io/ente/pull/2297
This commit is contained in:
@@ -202,7 +202,7 @@ export default function App({ Component, pageProps }: AppProps) {
|
||||
}
|
||||
const loadMlSearchState = async () => {
|
||||
try {
|
||||
const enabled = await isFaceIndexingEnabled();
|
||||
const enabled = isFaceIndexingEnabled();
|
||||
setMlSearchEnabled(enabled);
|
||||
mlWorkManager.setMlSearchEnabled(enabled);
|
||||
} catch (e) {
|
||||
@@ -302,7 +302,7 @@ export default function App({ Component, pageProps }: AppProps) {
|
||||
const showNavBar = (show: boolean) => setShowNavBar(show);
|
||||
const updateMlSearchEnabled = async (enabled: boolean) => {
|
||||
try {
|
||||
await setIsFaceIndexingEnabled(enabled);
|
||||
setIsFaceIndexingEnabled(enabled);
|
||||
setMlSearchEnabled(enabled);
|
||||
mlWorkManager.setMlSearchEnabled(enabled);
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import { WhatsNew } from "@/new/photos/components/WhatsNew";
|
||||
import { shouldShowWhatsNew } from "@/new/photos/services/changelog";
|
||||
import { fetchAndSaveFeatureFlagsIfNeeded } from "@/new/photos/services/feature-flags";
|
||||
import { getLocalFiles } from "@/new/photos/services/files";
|
||||
import {
|
||||
getLocalFiles,
|
||||
getLocalTrashedFiles,
|
||||
} from "@/new/photos/services/files";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import log from "@/next/log";
|
||||
import { CenteredFlex } from "@ente/shared/components/Container";
|
||||
import EnteSpinner from "@ente/shared/components/EnteSpinner";
|
||||
@@ -90,13 +93,12 @@ import {
|
||||
getSectionSummaries,
|
||||
} from "services/collectionService";
|
||||
import downloadManager from "services/download";
|
||||
import { syncCLIPEmbeddings } from "services/embeddingService";
|
||||
import { syncEntities } from "services/entityService";
|
||||
import { syncFiles } from "services/fileService";
|
||||
import locationSearchService from "services/locationSearchService";
|
||||
import { getLocalTrashedFiles, syncTrash } from "services/trashService";
|
||||
import { sync } from "services/sync";
|
||||
import { syncTrash } from "services/trashService";
|
||||
import uploadManager from "services/upload/uploadManager";
|
||||
import { isTokenValid, syncMapEnabled } from "services/userService";
|
||||
import { isTokenValid } from "services/userService";
|
||||
import { Collection, CollectionSummaries } from "types/collection";
|
||||
import {
|
||||
GalleryContextType,
|
||||
@@ -125,7 +127,6 @@ import {
|
||||
getSelectedFiles,
|
||||
getUniqueFiles,
|
||||
handleFileOps,
|
||||
mergeMetadata,
|
||||
sortFiles,
|
||||
} from "utils/file";
|
||||
import { isArchivedFile } from "utils/magicMetadata";
|
||||
@@ -717,19 +718,7 @@ export default function Gallery() {
|
||||
await syncFiles("normal", normalCollections, setFiles);
|
||||
await syncFiles("hidden", hiddenCollections, setHiddenFiles);
|
||||
await syncTrash(collections, setTrashedFiles);
|
||||
await syncEntities();
|
||||
await syncMapEnabled();
|
||||
fetchAndSaveFeatureFlagsIfNeeded();
|
||||
const electron = globalThis.electron;
|
||||
if (electron) {
|
||||
await syncCLIPEmbeddings();
|
||||
// TODO-ML(MR): Disable fetch until we start storing it in the
|
||||
// same place as the local ones.
|
||||
// if (isFaceIndexingEnabled()) await syncFaceEmbeddings();
|
||||
}
|
||||
if (clipService.isPlatformSupported()) {
|
||||
void clipService.scheduleImageEmbeddingExtraction();
|
||||
}
|
||||
await sync();
|
||||
} catch (e) {
|
||||
switch (e.message) {
|
||||
case CustomError.SESSION_EXPIRED:
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import log from "@/next/log";
|
||||
import {
|
||||
CenteredFlex,
|
||||
@@ -65,12 +66,7 @@ import {
|
||||
SetFilesDownloadProgressAttributesCreator,
|
||||
} from "types/gallery";
|
||||
import { downloadCollectionFiles, isHiddenCollection } from "utils/collection";
|
||||
import {
|
||||
downloadSelectedFiles,
|
||||
getSelectedFiles,
|
||||
mergeMetadata,
|
||||
sortFiles,
|
||||
} from "utils/file";
|
||||
import { downloadSelectedFiles, getSelectedFiles, sortFiles } from "utils/file";
|
||||
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
|
||||
|
||||
export default function PublicCollectionGallery() {
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import type { EmbeddingModel } from "@/new/photos/services/embedding";
|
||||
import { getAllLocalFiles } from "@/new/photos/services/files";
|
||||
import type { FaceIndex } from "@/new/photos/services/face/types";
|
||||
import {
|
||||
getAllLocalFiles,
|
||||
getLocalTrashedFiles,
|
||||
} from "@/new/photos/services/files";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { inWorker } from "@/next/env";
|
||||
import log from "@/next/log";
|
||||
@@ -17,8 +21,6 @@ import type {
|
||||
PutEmbeddingRequest,
|
||||
} from "types/embedding";
|
||||
import { getLocalCollections } from "./collectionService";
|
||||
import type { FaceIndex } from "./face/types";
|
||||
import { getLocalTrashedFiles } from "./trashService";
|
||||
|
||||
type FileML = FaceIndex & {
|
||||
updatedAt: number;
|
||||
|
||||
@@ -3,6 +3,7 @@ import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import type { Metadata } from "@/media/types/file";
|
||||
import { getAllLocalFiles } from "@/new/photos/services/files";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import log from "@/next/log";
|
||||
import { wait } from "@/utils/promise";
|
||||
@@ -29,11 +30,7 @@ import {
|
||||
getCollectionUserFacingName,
|
||||
getNonEmptyPersonalCollections,
|
||||
} from "utils/collection";
|
||||
import {
|
||||
getPersonalFiles,
|
||||
getUpdatedEXIFFileForDownload,
|
||||
mergeMetadata,
|
||||
} from "utils/file";
|
||||
import { getPersonalFiles, getUpdatedEXIFFileForDownload } from "utils/file";
|
||||
import { safeDirectoryName, safeFileName } from "utils/native-fs";
|
||||
import { writeStream } from "utils/native-stream";
|
||||
import { getAllLocalCollections } from "../collectionService";
|
||||
|
||||
@@ -2,6 +2,7 @@ import { FILE_TYPE } from "@/media/file-type";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import { getAllLocalFiles } from "@/new/photos/services/files";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
@@ -22,11 +23,7 @@ import {
|
||||
FileExportNames,
|
||||
} from "types/export";
|
||||
import { getNonEmptyPersonalCollections } from "utils/collection";
|
||||
import {
|
||||
getIDBasedSortedFiles,
|
||||
getPersonalFiles,
|
||||
mergeMetadata,
|
||||
} from "utils/file";
|
||||
import { getIDBasedSortedFiles, getPersonalFiles } from "utils/file";
|
||||
import {
|
||||
safeDirectoryName,
|
||||
safeFileName,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Box } from "@/new/photos/services/face/types";
|
||||
import { blobCache } from "@/next/blob-cache";
|
||||
import type { FaceAlignment } from "./f-index";
|
||||
import type { Box } from "./types";
|
||||
|
||||
export const saveFaceCrop = async (
|
||||
imageBitmap: ImageBitmap,
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import type {
|
||||
Box,
|
||||
Dimensions,
|
||||
Face,
|
||||
Point,
|
||||
} from "@/new/photos/services/face/types";
|
||||
import { faceIndexingVersion } from "@/new/photos/services/face/types";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { workerBridge } from "@/next/worker/worker-bridge";
|
||||
@@ -21,7 +28,6 @@ import {
|
||||
pixelRGBBilinear,
|
||||
warpAffineFloat32List,
|
||||
} from "./image";
|
||||
import type { Box, Dimensions, Face, Point } from "./types";
|
||||
|
||||
/**
|
||||
* Index faces in the given file.
|
||||
@@ -64,7 +70,7 @@ export const indexFaces = async (
|
||||
width,
|
||||
height,
|
||||
faceEmbedding: {
|
||||
version: 1,
|
||||
version: faceIndexingVersion,
|
||||
client: userAgent,
|
||||
faces: await indexFacesInBitmap(fileID, imageBitmap),
|
||||
},
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
import {
|
||||
isBetaUser,
|
||||
isInternalUser,
|
||||
} from "@/new/photos/services/feature-flags";
|
||||
import { getAllLocalFiles } from "@/new/photos/services/files";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import type { Remote } from "comlink";
|
||||
import {
|
||||
faceIndex,
|
||||
indexableFileIDs,
|
||||
indexedAndIndexableCounts,
|
||||
syncWithLocalFiles,
|
||||
} from "./db";
|
||||
updateAssumingLocalFiles,
|
||||
} from "@/new/photos/services/face/db";
|
||||
import {
|
||||
isBetaUser,
|
||||
isInternalUser,
|
||||
} from "@/new/photos/services/feature-flags";
|
||||
import {
|
||||
getAllLocalFiles,
|
||||
getLocalTrashedFiles,
|
||||
} from "@/new/photos/services/files";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import type { Remote } from "comlink";
|
||||
import type { FaceIndexerWorker } from "./indexer.worker";
|
||||
|
||||
/**
|
||||
@@ -212,30 +215,29 @@ export const canEnableFaceIndexing = async () =>
|
||||
* on any client. This {@link isFaceIndexingEnabled} property, on the other
|
||||
* hand, denotes whether or not indexing is enabled on the current client.
|
||||
*/
|
||||
export const isFaceIndexingEnabled = async () => {
|
||||
return localStorage.getItem("faceIndexingEnabled") == "1";
|
||||
};
|
||||
export const isFaceIndexingEnabled = () =>
|
||||
localStorage.getItem("faceIndexingEnabled") == "1";
|
||||
|
||||
/**
|
||||
* Update the (locally stored) value of {@link isFaceIndexingEnabled}.
|
||||
*/
|
||||
export const setIsFaceIndexingEnabled = async (enabled: boolean) => {
|
||||
if (enabled) localStorage.setItem("faceIndexingEnabled", "1");
|
||||
else localStorage.removeItem("faceIndexingEnabled");
|
||||
};
|
||||
export const setIsFaceIndexingEnabled = (enabled: boolean) =>
|
||||
enabled
|
||||
? localStorage.setItem("faceIndexingEnabled", "1")
|
||||
: localStorage.removeItem("faceIndexingEnabled");
|
||||
|
||||
/**
|
||||
* Sync face DB with the local (and potentially indexable) files that we know
|
||||
* about. Then return the next {@link count} files that still need to be
|
||||
* indexed.
|
||||
*
|
||||
* For more specifics of what a "sync" entails, see {@link syncWithLocalFiles}.
|
||||
* For specifics of what a "sync" entails, see {@link updateAssumingLocalFiles}.
|
||||
*
|
||||
* @param userID Sync only files owned by a {@link userID} with the face DB.
|
||||
*
|
||||
* @param count Limit the resulting list of indexable files to {@link count}.
|
||||
*/
|
||||
export const syncAndGetFilesToIndex = async (
|
||||
export const syncWithLocalFilesAndGetFilesToIndex = async (
|
||||
userID: number,
|
||||
count: number,
|
||||
): Promise<EnteFile[]> => {
|
||||
@@ -246,7 +248,12 @@ export const syncAndGetFilesToIndex = async (
|
||||
localFiles.filter(isIndexable).map((f) => [f.id, f]),
|
||||
);
|
||||
|
||||
await syncWithLocalFiles([...localFilesByID.keys()]);
|
||||
const localTrashFileIDs = (await getLocalTrashedFiles()).map((f) => f.id);
|
||||
|
||||
await updateAssumingLocalFiles(
|
||||
[...localFilesByID.keys()],
|
||||
localTrashFileIDs,
|
||||
);
|
||||
|
||||
const fileIDsToIndex = await indexableFileIDs(count);
|
||||
return fileIDsToIndex.map((id) => ensure(localFilesByID.get(id)));
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { fileLogID } from "utils/file";
|
||||
import {
|
||||
closeFaceDBConnectionsIfNeeded,
|
||||
markIndexingFailed,
|
||||
saveFaceIndex,
|
||||
} from "./db";
|
||||
} from "@/new/photos/services/face/db";
|
||||
import type { FaceIndex } from "@/new/photos/services/face/types";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { fileLogID } from "utils/file";
|
||||
import { indexFaces } from "./f-index";
|
||||
import { putFaceIndex } from "./remote";
|
||||
import type { FaceIndex } from "./types";
|
||||
|
||||
/**
|
||||
* Index faces in a file, save the persist the results locally, and put them on
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import type { FaceIndex } from "@/new/photos/services/face/types";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { putEmbedding } from "services/embeddingService";
|
||||
import type { FaceIndex } from "./types";
|
||||
|
||||
export const putFaceIndex = async (
|
||||
enteFile: EnteFile,
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
TrashRequest,
|
||||
} from "@/new/photos/types/file";
|
||||
import { BulkUpdateMagicMetadataRequest } from "@/new/photos/types/magicMetadata";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import log from "@/next/log";
|
||||
import { apiURL } from "@/next/origins";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
@@ -16,12 +17,7 @@ import { REQUEST_BATCH_SIZE } from "constants/api";
|
||||
import { Collection } from "types/collection";
|
||||
import { SetFiles } from "types/gallery";
|
||||
import { batch } from "utils/common";
|
||||
import {
|
||||
decryptFile,
|
||||
getLatestVersionFiles,
|
||||
mergeMetadata,
|
||||
sortFiles,
|
||||
} from "utils/file";
|
||||
import { decryptFile, getLatestVersionFiles, sortFiles } from "utils/file";
|
||||
import {
|
||||
getCollectionLastSyncTime,
|
||||
setCollectionLastSyncTime,
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { terminateFaceWorker } from "@/new/photos/services/face";
|
||||
import { clearFaceData } from "@/new/photos/services/face/db";
|
||||
import { clearFeatureFlagSessionState } from "@/new/photos/services/feature-flags";
|
||||
import log from "@/next/log";
|
||||
import { accountLogout } from "@ente/accounts/services/logout";
|
||||
import { clipService } from "services/clip-service";
|
||||
import DownloadManager from "./download";
|
||||
import exportService from "./export";
|
||||
import { clearFaceData } from "./face/db";
|
||||
import mlWorkManager from "./face/mlWorkManager";
|
||||
|
||||
/**
|
||||
@@ -41,6 +42,12 @@ export const photosLogout = async () => {
|
||||
ignoreError("CLIP", e);
|
||||
}
|
||||
|
||||
try {
|
||||
terminateFaceWorker();
|
||||
} catch (e) {
|
||||
ignoreError("face", e);
|
||||
}
|
||||
|
||||
const electron = globalThis.electron;
|
||||
if (electron) {
|
||||
try {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
|
||||
import PQueue from "p-queue";
|
||||
import { syncAndGetFilesToIndex } from "services/face/indexer";
|
||||
import { syncWithLocalFilesAndGetFilesToIndex } from "services/face/indexer";
|
||||
import { FaceIndexerWorker } from "services/face/indexer.worker";
|
||||
|
||||
const batchSize = 200;
|
||||
@@ -56,7 +56,7 @@ class MachineLearningService {
|
||||
|
||||
const syncContext = await this.getSyncContext(token, userID, userAgent);
|
||||
|
||||
syncContext.outOfSyncFiles = await syncAndGetFilesToIndex(
|
||||
syncContext.outOfSyncFiles = await syncWithLocalFilesAndGetFilesToIndex(
|
||||
userID,
|
||||
batchSize,
|
||||
);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { EncryptedEnteFile, EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import log from "@/next/log";
|
||||
import { apiURL } from "@/next/origins";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
@@ -7,7 +8,7 @@ import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import localForage from "@ente/shared/storage/localForage";
|
||||
import { Collection, CollectionPublicMagicMetadata } from "types/collection";
|
||||
import { LocalSavedPublicCollectionFiles } from "types/publicCollection";
|
||||
import { decryptFile, mergeMetadata, sortFiles } from "utils/file";
|
||||
import { decryptFile, sortFiles } from "utils/file";
|
||||
|
||||
const PUBLIC_COLLECTION_FILES_TABLE = "public-collection-files";
|
||||
const PUBLIC_COLLECTIONS_TABLE = "public-collections";
|
||||
|
||||
@@ -32,9 +32,7 @@ export const getDefaultOptions = async () => {
|
||||
return [
|
||||
// TODO-ML(MR): Skip this for now if indexing is disabled (eventually
|
||||
// the indexing status should not be tied to results).
|
||||
...((await isFaceIndexingEnabled())
|
||||
? [await getIndexStatusSuggestion()]
|
||||
: []),
|
||||
...(isFaceIndexingEnabled() ? [await getIndexStatusSuggestion()] : []),
|
||||
...(await convertSuggestionsToOptions(await getAllPeopleSuggestion())),
|
||||
].filter((t) => !!t);
|
||||
};
|
||||
|
||||
30
web/apps/photos/src/services/sync.ts
Normal file
30
web/apps/photos/src/services/sync.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { faceWorker } from "@/new/photos/services/face";
|
||||
import { fetchAndSaveFeatureFlagsIfNeeded } from "@/new/photos/services/feature-flags";
|
||||
import { clipService } from "services/clip-service";
|
||||
import { syncCLIPEmbeddings } from "services/embeddingService";
|
||||
import { syncEntities } from "services/entityService";
|
||||
import { syncMapEnabled } from "services/userService";
|
||||
import { isFaceIndexingEnabled } from "./face/indexer";
|
||||
|
||||
/**
|
||||
* Perform a soft "refresh" by making various API calls to fetch state from
|
||||
* remote, using it to update our local state, and triggering periodic jobs that
|
||||
* depend on the local state.
|
||||
*/
|
||||
export const sync = async () => {
|
||||
// TODO: This is called after we've synced the local files DBs with remote.
|
||||
// That code belongs here, but currently that state is persisted in the top
|
||||
// level gallery React component.
|
||||
|
||||
await syncEntities();
|
||||
await syncMapEnabled();
|
||||
fetchAndSaveFeatureFlagsIfNeeded();
|
||||
const electron = globalThis.electron;
|
||||
if (electron) {
|
||||
await syncCLIPEmbeddings();
|
||||
if (isFaceIndexingEnabled()) await (await faceWorker()).sync();
|
||||
}
|
||||
if (clipService.isPlatformSupported()) {
|
||||
void clipService.scheduleImageEmbeddingExtraction();
|
||||
}
|
||||
};
|
||||
@@ -1,4 +1,9 @@
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import {
|
||||
getLocalTrash,
|
||||
getTrashedFiles,
|
||||
TRASH,
|
||||
} from "@/new/photos/services/files";
|
||||
import { EncryptedTrashItem, Trash } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { apiURL } from "@/next/origins";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
@@ -6,23 +11,12 @@ import localForage from "@ente/shared/storage/localForage";
|
||||
import { getToken } from "@ente/shared/storage/localStorage/helpers";
|
||||
import { Collection } from "types/collection";
|
||||
import { SetFiles } from "types/gallery";
|
||||
import { EncryptedTrashItem, Trash } from "types/trash";
|
||||
import { decryptFile, mergeMetadata, sortTrashFiles } from "utils/file";
|
||||
import { decryptFile } from "utils/file";
|
||||
import { getCollection } from "./collectionService";
|
||||
|
||||
const TRASH = "file-trash";
|
||||
const TRASH_TIME = "trash-time";
|
||||
const DELETED_COLLECTION = "deleted-collection";
|
||||
|
||||
async function getLocalTrash() {
|
||||
const trash = (await localForage.getItem<Trash>(TRASH)) || [];
|
||||
return trash;
|
||||
}
|
||||
|
||||
export async function getLocalTrashedFiles() {
|
||||
return getTrashedFiles(await getLocalTrash());
|
||||
}
|
||||
|
||||
export async function getLocalDeletedCollections() {
|
||||
const trashedCollections: Array<Collection> =
|
||||
(await localForage.getItem<Collection[]>(DELETED_COLLECTION)) || [];
|
||||
@@ -136,19 +130,6 @@ export const updateTrash = async (
|
||||
return currentTrash;
|
||||
};
|
||||
|
||||
export function getTrashedFiles(trash: Trash): EnteFile[] {
|
||||
return sortTrashFiles(
|
||||
mergeMetadata(
|
||||
trash.map((trashedFile) => ({
|
||||
...trashedFile.file,
|
||||
updationTime: trashedFile.updatedAt,
|
||||
deleteBy: trashedFile.deleteBy,
|
||||
isTrashed: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
export const emptyTrash = async () => {
|
||||
try {
|
||||
const token = getToken();
|
||||
|
||||
@@ -16,9 +16,9 @@ import { basename } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomErrorMessage } from "@/next/types/ipc";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { CustomError, handleUploadError } from "@ente/shared/error";
|
||||
import type { Remote } from "comlink";
|
||||
import {
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import { EncryptedEnteFile, EnteFile } from "@/new/photos/types/file";
|
||||
|
||||
export interface TrashItem extends Omit<EncryptedTrashItem, "file"> {
|
||||
file: EnteFile;
|
||||
}
|
||||
|
||||
export interface EncryptedTrashItem {
|
||||
file: EncryptedEnteFile;
|
||||
isDeleted: boolean;
|
||||
isRestored: boolean;
|
||||
deleteBy: number;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export type Trash = TrashItem[];
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
FileWithUpdatedMagicMetadata,
|
||||
} from "@/new/photos/types/file";
|
||||
import { VISIBILITY_STATE } from "@/new/photos/types/magicMetadata";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import { lowercaseExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
|
||||
@@ -197,20 +198,6 @@ export function sortFiles(files: EnteFile[], sortAsc = false) {
|
||||
});
|
||||
}
|
||||
|
||||
export function sortTrashFiles(files: EnteFile[]) {
|
||||
return files.sort((a, b) => {
|
||||
if (a.deleteBy === b.deleteBy) {
|
||||
if (a.metadata.creationTime === b.metadata.creationTime) {
|
||||
return (
|
||||
b.metadata.modificationTime - a.metadata.modificationTime
|
||||
);
|
||||
}
|
||||
return b.metadata.creationTime - a.metadata.creationTime;
|
||||
}
|
||||
return a.deleteBy - b.deleteBy;
|
||||
});
|
||||
}
|
||||
|
||||
export async function decryptFile(
|
||||
file: EncryptedEnteFile,
|
||||
collectionKey: string,
|
||||
@@ -432,31 +419,6 @@ export function isSharedFile(user: User, file: EnteFile) {
|
||||
return file.ownerID !== user.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* [Note: File name for local EnteFile objects]
|
||||
*
|
||||
* The title property in a file's metadata is the original file's name. The
|
||||
* metadata of a file cannot be edited. So if later on the file's name is
|
||||
* changed, then the edit is stored in the `editedName` property of the public
|
||||
* metadata of the file.
|
||||
*
|
||||
* This function merges these edits onto the file object that we use locally.
|
||||
* Effectively, post this step, the file's metadata.title can be used in lieu of
|
||||
* its filename.
|
||||
*/
|
||||
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
|
||||
return files.map((file) => {
|
||||
if (file.pubMagicMetadata?.data.editedTime) {
|
||||
file.metadata.creationTime = file.pubMagicMetadata.data.editedTime;
|
||||
}
|
||||
if (file.pubMagicMetadata?.data.editedName) {
|
||||
file.metadata.title = file.pubMagicMetadata.data.editedName;
|
||||
}
|
||||
|
||||
return file;
|
||||
});
|
||||
}
|
||||
|
||||
export function updateExistingFilePubMetadata(
|
||||
existingFile: EnteFile,
|
||||
updatedFile: EnteFile,
|
||||
|
||||
@@ -7,7 +7,7 @@ import type {
|
||||
TwoFactorVerificationResponse,
|
||||
UserVerificationResponse,
|
||||
} from "@ente/accounts/types/user";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { ApiError, CustomError } from "@ente/shared/error";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import { getToken } from "@ente/shared/storage/localStorage/helpers";
|
||||
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
generateLoginSubKey,
|
||||
saveKeyInSessionStore,
|
||||
} from "@ente/shared/crypto/helpers";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
|
||||
import {
|
||||
|
||||
@@ -18,7 +18,7 @@ import SingleInputForm, {
|
||||
} from "@ente/shared/components/SingleInputForm";
|
||||
import { SUPPORT_EMAIL } from "@ente/shared/constants/urls";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { ApiError } from "@ente/shared/error";
|
||||
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
|
||||
import { Link } from "@mui/material";
|
||||
|
||||
37
web/packages/new/common/crypto/ente.ts
Normal file
37
web/packages/new/common/crypto/ente.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* @file Higher level functions that use the ontology of Ente's types
|
||||
*
|
||||
* These are thin wrappers over the (thin-) wrappers in internal/libsodium.ts.
|
||||
* The main difference is that they don't name things in terms of the crypto
|
||||
* algorithms, but rather by the specific Ente specific tasks we are trying to
|
||||
* do.
|
||||
*/
|
||||
import * as libsodium from "@ente/shared/crypto/internal/libsodium";
|
||||
|
||||
/**
|
||||
* Decrypt arbitrary metadata associated with a file using the its's key.
|
||||
*
|
||||
* @param encryptedMetadataB64 The Base64 encoded string containing the
|
||||
* encrypted data.
|
||||
*
|
||||
* @param headerB64 The Base64 encoded string containing the decryption header
|
||||
* produced during encryption.
|
||||
*
|
||||
* @param keyB64 The Base64 encoded string containing the encryption key
|
||||
* (this'll generally be the file's key).
|
||||
*
|
||||
* @returns The decrypted utf-8 string.
|
||||
*/
|
||||
export const decryptFileMetadata = async (
|
||||
encryptedMetadataB64: string,
|
||||
decryptionHeaderB64: string,
|
||||
keyB64: string,
|
||||
) => {
|
||||
const metadataBytes = await libsodium.decryptChaChaOneShot(
|
||||
await libsodium.fromB64(encryptedMetadataB64),
|
||||
await libsodium.fromB64(decryptionHeaderB64),
|
||||
keyB64,
|
||||
);
|
||||
const textDecoder = new TextDecoder();
|
||||
return textDecoder.decode(metadataBytes);
|
||||
};
|
||||
@@ -1,9 +1,12 @@
|
||||
import { getLocalTrashedFiles } from "@/new/photos/services/files";
|
||||
import { authenticatedRequestHeaders } from "@/next/http";
|
||||
import log from "@/next/log";
|
||||
import { apiURL } from "@/next/origins";
|
||||
import { nullToUndefined } from "@/utils/transform";
|
||||
// import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { z } from "zod";
|
||||
// import { getAllLocalFiles } from "./files";
|
||||
import { decryptFileMetadata } from "../../common/crypto/ente";
|
||||
import { saveFaceIndex } from "./face/db";
|
||||
import { faceIndexingVersion, type FaceIndex } from "./face/types";
|
||||
import { getAllLocalFiles } from "./files";
|
||||
|
||||
/**
|
||||
* The embeddings that we (the current client) knows how to handle.
|
||||
@@ -19,17 +22,19 @@ import { z } from "zod";
|
||||
*
|
||||
* [Note: Handling versioning of embeddings]
|
||||
*
|
||||
* The embeddings themselves have a version included in them, so it is possible
|
||||
* The embeddings themselves have a version embedded in them, so it is possible
|
||||
* for us to make backward compatible updates to the indexing process on newer
|
||||
* clients.
|
||||
* clients (There is also a top level version field too but that is not used).
|
||||
*
|
||||
* If we bump the version of same model (say when indexing on a newer client),
|
||||
* the assumption will be that older client will be able to consume the
|
||||
* response. Say if we improve blur detection, older client should just consume
|
||||
* the newer version and not try to index the file locally.
|
||||
* response. e.g. Say if we improve blur detection, older client should just
|
||||
* consume embeddings with a newer version and not try to index the file again
|
||||
* locally.
|
||||
*
|
||||
* If you get version that is older, client should discard and try to index
|
||||
* locally (if needed) and also put the newer version it has on remote.
|
||||
* If we get an embedding with version that is older than the version the client
|
||||
* supports, then the client should ignore it. This way, the file will get
|
||||
* reindexed locally an embedding with a newer version will get put to remote.
|
||||
*
|
||||
* In the case where the changes are not backward compatible and can only be
|
||||
* consumed by clients with the relevant scaffolding, then we change this
|
||||
@@ -62,85 +67,242 @@ const RemoteEmbedding = z.object({
|
||||
decryptionHeader: z.string(),
|
||||
/** Last time (epoch ms) this embedding was updated. */
|
||||
updatedAt: z.number(),
|
||||
/**
|
||||
* The version for the embedding. Optional.
|
||||
*
|
||||
* See: [Note: Handling versioning of embeddings]
|
||||
*/
|
||||
version: z.number().nullish().transform(nullToUndefined),
|
||||
});
|
||||
|
||||
type RemoteEmbedding = z.infer<typeof RemoteEmbedding>;
|
||||
|
||||
/**
|
||||
* Ask remote for what all changes have happened to the face embeddings that it
|
||||
* knows about since the last time we synced. Then update our local state to
|
||||
* reflect those changes.
|
||||
* Fetch new or updated embeddings from remote and save them locally.
|
||||
*
|
||||
* It takes no parameters since it saves the last sync time in local storage.
|
||||
* @param model The {@link EmbeddingModel} for which to pull embeddings. For
|
||||
* each model, this function maintains the last sync time in local storage so
|
||||
* subsequent fetches only pull what's new.
|
||||
*
|
||||
* @param save A function that is called to save the embedding. The save process
|
||||
* can be model specific, so this provides us a hook to reuse the surrounding
|
||||
* pull mechanisms while varying the save itself. This function will be passed
|
||||
* the decrypted embedding string. If it throws, then we'll log about but
|
||||
* otherwise ignore the embedding under consideration.
|
||||
*
|
||||
* This function should be called only after we have synced files with remote.
|
||||
* See: [Note: Ignoring embeddings for unknown files].
|
||||
*/
|
||||
export const syncRemoteFaceEmbeddings = async () => {
|
||||
let sinceTime = faceEmbeddingSyncTime();
|
||||
// const cryptoWorker = await ComlinkCryptoWorker.getInstance();
|
||||
// const files = await getAllLocalFiles();
|
||||
const pullEmbeddings = async (
|
||||
model: EmbeddingModel,
|
||||
save: (decryptedEmbedding: string) => Promise<void>,
|
||||
) => {
|
||||
// Include files from trash, otherwise they'll get unnecessarily reindexed
|
||||
// if the user restores them from trash before permanent deletion.
|
||||
const localFiles = (await getAllLocalFiles()).concat(
|
||||
await getLocalTrashedFiles(),
|
||||
);
|
||||
// [Note: Ignoring embeddings for unknown files]
|
||||
//
|
||||
// We need the file to decrypt the embedding. This is easily ensured by
|
||||
// running the embedding sync after we have synced our local files with
|
||||
// remote.
|
||||
//
|
||||
// Still, it might happen that we come across an embedding for which we
|
||||
// don't have the corresponding file locally. We can put them in two
|
||||
// buckets:
|
||||
//
|
||||
// 1. Known case: In rare cases we might get a diff entry for an embedding
|
||||
// corresponding to a file which has been deleted (but whose embedding
|
||||
// is enqueued for deletion). Client should expect such a scenario, but
|
||||
// all they have to do is just ignore such embeddings.
|
||||
//
|
||||
// 2. Other unknown cases: Even if somehow we end up with an embedding for
|
||||
// a existent file which we don't have locally, it is fine because the
|
||||
// current client will just regenerate the embedding if the file really
|
||||
// exists and gets locally found later. There would be a bit of
|
||||
// duplicate work, but that's fine as long as there isn't a systematic
|
||||
// scenario where this happens.
|
||||
const localFilesByID = new Map(localFiles.map((f) => [f.id, f]));
|
||||
|
||||
let sinceTime = embeddingSyncTime(model);
|
||||
// TODO: eslint has fixed this spurious warning, but we're not on the latest
|
||||
// version yet, so add a disable.
|
||||
// https://github.com/eslint/eslint/pull/18286
|
||||
/* eslint-disable no-constant-condition */
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
while (true) {
|
||||
const remoteEmbeddings = await getEmbeddingsDiff(
|
||||
"file-ml-clip-face",
|
||||
sinceTime,
|
||||
);
|
||||
const remoteEmbeddings = await getEmbeddingsDiff(model, sinceTime);
|
||||
if (remoteEmbeddings.length == 0) break;
|
||||
// const _embeddings = Promise.all(
|
||||
// remoteEmbeddings.map(decryptFaceEmbedding),
|
||||
// );
|
||||
sinceTime = remoteEmbeddings.reduce(
|
||||
(max, { updatedAt }) => Math.max(max, updatedAt),
|
||||
sinceTime,
|
||||
);
|
||||
saveFaceEmbeddingSyncTime(sinceTime);
|
||||
let count = 0;
|
||||
for (const remoteEmbedding of remoteEmbeddings) {
|
||||
sinceTime = Math.max(sinceTime, remoteEmbedding.updatedAt);
|
||||
try {
|
||||
const file = localFilesByID.get(remoteEmbedding.fileID);
|
||||
if (!file) continue;
|
||||
await save(
|
||||
await decryptFileMetadata(
|
||||
remoteEmbedding.encryptedEmbedding,
|
||||
remoteEmbedding.decryptionHeader,
|
||||
file.key,
|
||||
),
|
||||
);
|
||||
count++;
|
||||
} catch (e) {
|
||||
log.warn(`Ignoring unparseable ${model} embedding`, e);
|
||||
}
|
||||
}
|
||||
saveEmbeddingSyncTime(sinceTime, model);
|
||||
log.info(`Fetched ${count} ${model} embeddings`);
|
||||
}
|
||||
};
|
||||
|
||||
// const decryptFaceEmbedding = async (remoteEmbedding: RemoteEmbedding) => {
|
||||
// const fileKey = fileIdToKeyMap.get(embedding.fileID);
|
||||
// if (!fileKey) {
|
||||
// throw Error(CustomError.FILE_NOT_FOUND);
|
||||
// }
|
||||
// const decryptedData = await worker.decryptMetadata(
|
||||
// embedding.encryptedEmbedding,
|
||||
// embedding.decryptionHeader,
|
||||
// fileIdToKeyMap.get(embedding.fileID),
|
||||
// );
|
||||
// return {
|
||||
// ...decryptedData,
|
||||
// updatedAt: embedding.updatedAt,
|
||||
// } as unknown as FileML;
|
||||
// };
|
||||
|
||||
/**
|
||||
* The updatedAt of the most recent face {@link RemoteEmbedding} we've retrieved
|
||||
* and saved from remote, or 0.
|
||||
* The updatedAt of the most recent {@link RemoteEmbedding} for {@link model}
|
||||
* we've retrieved from remote.
|
||||
*
|
||||
* Returns 0 if there is no such embedding.
|
||||
*
|
||||
* This value is persisted to local storage. To update it, use
|
||||
* {@link saveFaceEmbeddingSyncMarker}.
|
||||
* {@link saveEmbeddingSyncTime}.
|
||||
*/
|
||||
const faceEmbeddingSyncTime = () =>
|
||||
parseInt(localStorage.getItem("faceEmbeddingSyncTime") ?? "0");
|
||||
const embeddingSyncTime = (model: EmbeddingModel) =>
|
||||
parseInt(localStorage.getItem("embeddingSyncTime:" + model) ?? "0");
|
||||
|
||||
/** Sibling of {@link faceEmbeddingSyncMarker}. */
|
||||
const saveFaceEmbeddingSyncTime = (t: number) =>
|
||||
localStorage.setItem("faceEmbeddingSyncTime", `${t}`);
|
||||
/** Sibling of {@link embeddingSyncTime}. */
|
||||
const saveEmbeddingSyncTime = (t: number, model: EmbeddingModel) =>
|
||||
localStorage.setItem("embeddingSyncTime:" + model, `${t}`);
|
||||
|
||||
// const getFaceEmbeddings = async () => {
|
||||
/**
|
||||
* Fetch new or updated face embeddings from remote and save them locally.
|
||||
*
|
||||
* It takes no parameters since it saves the last sync time in local storage.
|
||||
*
|
||||
* This function should be called only after we have synced files with remote.
|
||||
* See: [Note: Ignoring embeddings for unknown files].
|
||||
*/
|
||||
export const pullFaceEmbeddings = () =>
|
||||
pullEmbeddings("file-ml-clip-face", (jsonString: string) =>
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-ts-expect-error, @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore TODO: There is no error here, but this file is imported by
|
||||
// one of our packages that doesn't have strict mode enabled yet,
|
||||
// causing a spurious error to be emitted in that context.
|
||||
saveFaceIndexIfNewer(FaceIndex.parse(JSON.parse(jsonString))),
|
||||
);
|
||||
|
||||
// }
|
||||
/**
|
||||
* Save the given {@link faceIndex} locally if it is newer than the one we have.
|
||||
*
|
||||
* This is a variant of {@link saveFaceIndex} that performs version checking as
|
||||
* described in [Note: Handling versioning of embeddings].
|
||||
*/
|
||||
export const saveFaceIndexIfNewer = async (index: FaceIndex) => {
|
||||
const version = index.faceEmbedding.version;
|
||||
if (version <= faceIndexingVersion) {
|
||||
log.info(
|
||||
`Ignoring remote face index with version ${version} not newer than what our indexer supports (${faceIndexingVersion})`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
return saveFaceIndex(index);
|
||||
};
|
||||
|
||||
/** The maximum number of items to fetch in a single GET /embeddings/diff */
|
||||
/**
|
||||
* Zod schemas for the {@link FaceIndex} types.
|
||||
*
|
||||
* [Note: Duplicated between Zod schemas and TS type]
|
||||
*
|
||||
* Usually we define a Zod schema, and then infer the corresponding TypeScript
|
||||
* type for it using `z.infer`. This works great except now the docstrings don't
|
||||
* show up: The doc strings get added to the Zod schema, but usually the code
|
||||
* using the parsed data will reference the TypeScript type, and the docstrings
|
||||
* added to the fields in the Zod schema won't show up.
|
||||
*
|
||||
* We usually live with this infelicity, since the alternative is code
|
||||
* duplication: Define the TypeScript type (putting the docstrings therein)
|
||||
* _and_ also a corresponding Zod schema. The duplication happens because it is
|
||||
* not possible to go the other way (TS type => Zod schema).
|
||||
*
|
||||
* However, in some cases having when the TypeScript type under consideration is
|
||||
* used pervasely in our code, having a standalone TypeScript type with attached
|
||||
* docstrings, is worth the code duplication.
|
||||
*
|
||||
* Note that this'll just be syntactic duplication - if the two definitions get
|
||||
* out of sync in the shape of the types they represent, the TypeScript compiler
|
||||
* will flag it for us.
|
||||
*/
|
||||
const FaceIndex = z
|
||||
.object({
|
||||
fileID: z.number(),
|
||||
width: z.number(),
|
||||
height: z.number(),
|
||||
faceEmbedding: z
|
||||
.object({
|
||||
version: z.number(),
|
||||
client: z.string(),
|
||||
faces: z.array(
|
||||
z
|
||||
.object({
|
||||
faceID: z.string(),
|
||||
detection: z
|
||||
.object({
|
||||
box: z
|
||||
.object({
|
||||
x: z.number(),
|
||||
y: z.number(),
|
||||
width: z.number(),
|
||||
height: z.number(),
|
||||
})
|
||||
.passthrough(),
|
||||
landmarks: z.array(
|
||||
z
|
||||
.object({
|
||||
x: z.number(),
|
||||
y: z.number(),
|
||||
})
|
||||
.passthrough(),
|
||||
),
|
||||
})
|
||||
.passthrough(),
|
||||
score: z.number(),
|
||||
blur: z.number(),
|
||||
embedding: z.array(z.number()),
|
||||
})
|
||||
.passthrough(),
|
||||
),
|
||||
})
|
||||
.passthrough(),
|
||||
})
|
||||
// Retain fields we might not (currently) understand.
|
||||
.passthrough();
|
||||
|
||||
/**
|
||||
* The maximum number of items to fetch in a single GET /embeddings/diff
|
||||
*
|
||||
* [Note: Limit of returned items in /diff requests]
|
||||
*
|
||||
* The various GET /diff API methods, which tell the client what all has changed
|
||||
* since a timestamp (provided by the client) take a limit parameter.
|
||||
*
|
||||
* These diff API calls return all items whose updated at is greater
|
||||
* (non-inclusive) than the timestamp we provide. So there is no mechanism for
|
||||
* pagination of items which have the same exact updated at. Conceptually, it
|
||||
* may happen that there are more items than the limit we've provided.
|
||||
*
|
||||
* The behaviour of this limit is different for file diff and embeddings diff.
|
||||
*
|
||||
* - For file diff, the limit is advisory, and remote may return less, equal
|
||||
* or more items than the provided limit. The scenario where it returns more
|
||||
* is when more files than the limit have the same updated at. Theoretically
|
||||
* it would make the diff response unbounded, however in practice file
|
||||
* modifications themselves are all batched. Even if the user selects all
|
||||
* the files in their library and updates them all in one go in the UI,
|
||||
* their client app must use batched API calls to make those updates, and
|
||||
* each of those batches would get distinct updated at.
|
||||
*
|
||||
* - For embeddings diff, there are no bulk updates and this limit is enforced
|
||||
* as a maximum. While theoretically it is possible for an arbitrary number
|
||||
* of files to have the same updated at, in practice it is not possible with
|
||||
* the current set of APIs where clients PUT individual embeddings (the
|
||||
* updated at is a server timestamp). And even if somehow a large number of
|
||||
* files get the same updated at and thus get truncated in the response, it
|
||||
* won't lead to any data loss, the client which requested that particular
|
||||
* truncated diff will just regenerate them.
|
||||
*/
|
||||
const diffLimit = 500;
|
||||
|
||||
/**
|
||||
@@ -156,6 +318,8 @@ const diffLimit = 500;
|
||||
*
|
||||
* @returns an array of {@link RemoteEmbedding}. The returned array is limited
|
||||
* to a maximum count of {@link diffLimit}.
|
||||
*
|
||||
* > See [Note: Limit of returned items in /diff requests].
|
||||
*/
|
||||
const getEmbeddingsDiff = async (
|
||||
model: EmbeddingModel,
|
||||
|
||||
@@ -170,7 +170,7 @@ export const saveFaceIndex = async (faceIndex: FaceIndex) => {
|
||||
const tx = db.transaction(["face-index", "file-status"], "readwrite");
|
||||
const indexStore = tx.objectStore("face-index");
|
||||
const statusStore = tx.objectStore("file-status");
|
||||
return Promise.all([
|
||||
await Promise.all([
|
||||
indexStore.put(faceIndex),
|
||||
statusStore.put({
|
||||
fileID: faceIndex.fileID,
|
||||
@@ -178,7 +178,7 @@ export const saveFaceIndex = async (faceIndex: FaceIndex) => {
|
||||
failureCount: 0,
|
||||
}),
|
||||
tx.done,
|
||||
]).then(() => {} /* convert result to void */);
|
||||
]);
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -214,35 +214,59 @@ export const addFileEntry = async (fileID: number) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync entries in the face DB to align with the state of local files outside
|
||||
* Update entries in the face DB to align with the state of local files outside
|
||||
* face DB.
|
||||
*
|
||||
* @param localFileIDs Local {@link EnteFile}s, keyed by their IDs. These are
|
||||
* all the files that the client is aware of, filtered to only keep the files
|
||||
* that the user owns and the formats that can be indexed by our current face
|
||||
* indexing pipeline.
|
||||
* @param localFileIDs IDs of all the files that the client is aware of filtered
|
||||
* to only keep the files that the user owns and the formats that can be indexed
|
||||
* by our current face indexing pipeline.
|
||||
*
|
||||
* This function syncs the state of file entries in face DB to the state of file
|
||||
* entries stored otherwise by the client locally.
|
||||
* @param localTrashFilesIDs IDs of all the files in trash.
|
||||
*
|
||||
* - Files (identified by their ID) that are present locally but are not yet in
|
||||
* face DB get a fresh entry in face DB (and are marked as indexable).
|
||||
* This function then updates the state of file entries in face DB to the be in
|
||||
* sync with these provided local file IDS.
|
||||
*
|
||||
* - Files that are not present locally but still exist in face DB are removed
|
||||
* from face DB (including its face index, if any).
|
||||
* - Files that are present locally but are not yet in face DB get a fresh entry
|
||||
* in face DB (and are marked as indexable).
|
||||
*
|
||||
* - Files that are not present locally (nor are in trash) but still exist in
|
||||
* face DB are removed from face DB (including their face index, if any).
|
||||
*
|
||||
* - Files that are not present locally but are in the trash are retained in
|
||||
* face DB if their status is "indexed" (otherwise they too are removed). This
|
||||
* is prevent churn (re-indexing) if the user moves some files to trash but
|
||||
* then later restores them before they get permanently deleted.
|
||||
*/
|
||||
export const syncWithLocalFiles = async (localFileIDs: number[]) => {
|
||||
export const updateAssumingLocalFiles = async (
|
||||
localFileIDs: number[],
|
||||
localTrashFilesIDs: number[],
|
||||
) => {
|
||||
const db = await faceDB();
|
||||
const tx = db.transaction(["face-index", "file-status"], "readwrite");
|
||||
const fdbFileIDs = await tx.objectStore("file-status").getAllKeys();
|
||||
const fdbIndexedFileIDs = await tx
|
||||
.objectStore("file-status")
|
||||
.getAllKeys(IDBKeyRange.only("indexed"));
|
||||
|
||||
const local = new Set(localFileIDs);
|
||||
const localTrash = new Set(localTrashFilesIDs);
|
||||
const fdb = new Set(fdbFileIDs);
|
||||
const fdbIndexed = new Set(fdbIndexedFileIDs);
|
||||
|
||||
const newFileIDs = localFileIDs.filter((id) => !fdb.has(id));
|
||||
const removedFileIDs = fdbFileIDs.filter((id) => !local.has(id));
|
||||
const removedFileIDs = fdbFileIDs.filter((id) => {
|
||||
if (local.has(id)) return false; // Still exists.
|
||||
if (localTrash.has(id)) {
|
||||
// Exists in trash.
|
||||
if (fdbIndexed.has(id)) {
|
||||
// But is already indexed, so let it be.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true; // Remove.
|
||||
});
|
||||
|
||||
return Promise.all(
|
||||
await Promise.all(
|
||||
[
|
||||
newFileIDs.map((id) =>
|
||||
tx.objectStore("file-status").put({
|
||||
@@ -257,7 +281,7 @@ export const syncWithLocalFiles = async (localFileIDs: number[]) => {
|
||||
removedFileIDs.map((id) => tx.objectStore("face-index").delete(id)),
|
||||
tx.done,
|
||||
].flat(),
|
||||
).then(() => {} /* convert result to void */);
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
36
web/packages/new/photos/services/face/index.ts
Normal file
36
web/packages/new/photos/services/face/index.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/**
|
||||
* @file Main thread interface to {@link FaceWorker}.
|
||||
*/
|
||||
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { FaceWorker } from "./worker";
|
||||
|
||||
/** Cached instance of the {@link ComlinkWorker} that wraps our web worker. */
|
||||
let _comlinkWorker: ComlinkWorker<typeof FaceWorker> | undefined;
|
||||
|
||||
/** Lazily created, cached, instance of {@link FaceWorker}. */
|
||||
export const faceWorker = async () => {
|
||||
let comlinkWorker = _comlinkWorker;
|
||||
if (!comlinkWorker) _comlinkWorker = comlinkWorker = createComlinkWorker();
|
||||
return await comlinkWorker.remote;
|
||||
};
|
||||
|
||||
const createComlinkWorker = () =>
|
||||
new ComlinkWorker<typeof FaceWorker>(
|
||||
"face",
|
||||
new Worker(new URL("worker.ts", import.meta.url)),
|
||||
);
|
||||
|
||||
/**
|
||||
* Terminate {@link faceWorker} (if any).
|
||||
*
|
||||
* This is useful during logout to immediately stop any background face related
|
||||
* operations that are in-flight for the current user. After the user logs in
|
||||
* again, a new {@link faceWorker} will be created on demand.
|
||||
*/
|
||||
export const terminateFaceWorker = () => {
|
||||
if (_comlinkWorker) {
|
||||
_comlinkWorker.terminate();
|
||||
_comlinkWorker = undefined;
|
||||
}
|
||||
};
|
||||
@@ -1,3 +1,8 @@
|
||||
/** The face indexing version supported by the current client. */
|
||||
// TODO: This belongs better to f-index.ts, but that file's in a different
|
||||
// package currently, move it there once these two files are together again.
|
||||
export const faceIndexingVersion = 1;
|
||||
|
||||
/**
|
||||
* The faces in a file (and an embedding for each of them).
|
||||
*
|
||||
22
web/packages/new/photos/services/face/worker.ts
Normal file
22
web/packages/new/photos/services/face/worker.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { pullFaceEmbeddings } from "../embedding";
|
||||
|
||||
/**
|
||||
* Run operations related to face indexing and search in a Web Worker.
|
||||
*
|
||||
* This is a normal class that is however exposed (via comlink) as a proxy
|
||||
* running inside a Web Worker. This way, we do not bother the main thread with
|
||||
* tasks that might degrade interactivity.
|
||||
*/
|
||||
export class FaceWorker {
|
||||
private isSyncing = false;
|
||||
|
||||
/**
|
||||
* Pull embeddings from remote, and start backfilling if needed.
|
||||
*/
|
||||
async sync() {
|
||||
if (this.isSyncing) return;
|
||||
this.isSyncing = true;
|
||||
await pullFaceEmbeddings();
|
||||
this.isSyncing = false;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { type EnteFile } from "@/new/photos/types/file";
|
||||
import log from "@/next/log";
|
||||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import localForage from "@ente/shared/storage/localForage";
|
||||
import { type EnteFile, type Trash } from "../types/file";
|
||||
import { mergeMetadata } from "../utils/file";
|
||||
|
||||
const FILES_TABLE = "files";
|
||||
const HIDDEN_FILES_TABLE = "hidden-files";
|
||||
@@ -36,3 +37,41 @@ export const setLocalFiles = async (
|
||||
log.error("Failed to save files", e);
|
||||
}
|
||||
};
|
||||
|
||||
export const TRASH = "file-trash";
|
||||
|
||||
export async function getLocalTrash() {
|
||||
const trash = (await localForage.getItem<Trash>(TRASH)) ?? [];
|
||||
return trash;
|
||||
}
|
||||
|
||||
export async function getLocalTrashedFiles() {
|
||||
return getTrashedFiles(await getLocalTrash());
|
||||
}
|
||||
|
||||
export function getTrashedFiles(trash: Trash): EnteFile[] {
|
||||
return sortTrashFiles(
|
||||
mergeMetadata(
|
||||
trash.map((trashedFile) => ({
|
||||
...trashedFile.file,
|
||||
updationTime: trashedFile.updatedAt,
|
||||
deleteBy: trashedFile.deleteBy,
|
||||
isTrashed: true,
|
||||
})),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const sortTrashFiles = (files: EnteFile[]) => {
|
||||
return files.sort((a, b) => {
|
||||
if (a.deleteBy === b.deleteBy) {
|
||||
if (a.metadata.creationTime === b.metadata.creationTime) {
|
||||
return (
|
||||
b.metadata.modificationTime - a.metadata.modificationTime
|
||||
);
|
||||
}
|
||||
return b.metadata.creationTime - a.metadata.creationTime;
|
||||
}
|
||||
return (a.deleteBy ?? 0) - (b.deleteBy ?? 0);
|
||||
});
|
||||
};
|
||||
|
||||
@@ -126,3 +126,18 @@ export interface FilePublicMagicMetadataProps {
|
||||
|
||||
export type FilePublicMagicMetadata =
|
||||
MagicMetadataCore<FilePublicMagicMetadataProps>;
|
||||
|
||||
export interface TrashItem extends Omit<EncryptedTrashItem, "file"> {
|
||||
file: EnteFile;
|
||||
}
|
||||
|
||||
export interface EncryptedTrashItem {
|
||||
file: EncryptedEnteFile;
|
||||
isDeleted: boolean;
|
||||
isRestored: boolean;
|
||||
deleteBy: number;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export type Trash = TrashItem[];
|
||||
|
||||
30
web/packages/new/photos/utils/file.ts
Normal file
30
web/packages/new/photos/utils/file.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { EnteFile } from "../types/file";
|
||||
|
||||
/**
|
||||
* [Note: File name for local EnteFile objects]
|
||||
*
|
||||
* The title property in a file's metadata is the original file's name. The
|
||||
* metadata of a file cannot be edited. So if later on the file's name is
|
||||
* changed, then the edit is stored in the `editedName` property of the public
|
||||
* metadata of the file.
|
||||
*
|
||||
* This function merges these edits onto the file object that we use locally.
|
||||
* Effectively, post this step, the file's metadata.title can be used in lieu of
|
||||
* its filename.
|
||||
*/
|
||||
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
|
||||
return files.map((file) => {
|
||||
// TODO: Until the types reflect reality
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (file.pubMagicMetadata?.data.editedTime) {
|
||||
file.metadata.creationTime = file.pubMagicMetadata.data.editedTime;
|
||||
}
|
||||
// TODO: Until the types reflect reality
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (file.pubMagicMetadata?.data.editedName) {
|
||||
file.metadata.title = file.pubMagicMetadata.data.editedName;
|
||||
}
|
||||
|
||||
return file;
|
||||
});
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { getKV, setKV } from "@/next/kv";
|
||||
import { inWorker } from "./env";
|
||||
|
||||
/**
|
||||
* Return the origin (scheme, host, port triple) that should be used for making
|
||||
@@ -36,7 +37,7 @@ export const apiURL = async (path: string) => (await apiOrigin()) + path;
|
||||
*/
|
||||
export const customAPIOrigin = async () => {
|
||||
let origin = await getKV("apiOrigin");
|
||||
if (!origin) {
|
||||
if (!origin && !inWorker()) {
|
||||
// TODO: Migration of apiOrigin from local storage to indexed DB
|
||||
// Remove me after a bit (27 June 2024).
|
||||
const legacyOrigin = localStorage.getItem("apiOrigin");
|
||||
|
||||
@@ -25,7 +25,7 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
|
||||
|
||||
public terminate() {
|
||||
this.worker.terminate();
|
||||
log.debug(() => `Terminated ${this.name}`);
|
||||
log.debug(() => `Terminated web worker ${this.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export const ENCRYPTION_CHUNK_SIZE = 4 * 1024 * 1024;
|
||||
@@ -4,6 +4,7 @@ import type { StateAddress } from "libsodium-wrappers";
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
const textEncoder = new TextEncoder();
|
||||
|
||||
export class DedicatedCryptoWorker {
|
||||
async decryptMetadata(
|
||||
encryptedMetadata: string,
|
||||
|
||||
@@ -1,8 +1,211 @@
|
||||
/**
|
||||
* @file A thin-ish layer over the actual libsodium APIs, to make them more
|
||||
* palatable to the rest of our Javascript code.
|
||||
*
|
||||
* All functions are stateless, async, and safe to use in Web Workers.
|
||||
*
|
||||
* Docs for the JS library: https://github.com/jedisct1/libsodium.js
|
||||
*/
|
||||
import { mergeUint8Arrays } from "@/utils/array";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import sodium, { type StateAddress } from "libsodium-wrappers";
|
||||
import { ENCRYPTION_CHUNK_SIZE } from "../constants";
|
||||
import type { B64EncryptionResult } from "../types";
|
||||
|
||||
/**
|
||||
* Convert a {@link Uint8Array} to a Base64 encoded string.
|
||||
*
|
||||
* See also {@link toB64URLSafe} and {@link toB64URLSafeNoPadding}.
|
||||
*/
|
||||
export const toB64 = async (input: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.ORIGINAL);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a Base64 encoded string to a {@link Uint8Array}.
|
||||
*
|
||||
* This is the converse of {@link toBase64}.
|
||||
*/
|
||||
export const fromB64 = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return sodium.from_base64(input, sodium.base64_variants.ORIGINAL);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a {@link Uint8Array} to a URL-safe Base64 encoded string.
|
||||
*
|
||||
* See also {@link toB64URLSafe} and {@link toB64URLSafeNoPadding}.
|
||||
*/
|
||||
export const toB64URLSafe = async (input: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.URLSAFE);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a {@link Uint8Array} to a unpadded URL-safe Base64 encoded string.
|
||||
*
|
||||
* This differs from {@link toB64URLSafe} in that it does not append any
|
||||
* trailing padding character(s) "=" to make the resultant string's length be an
|
||||
* integer multiple of 4.
|
||||
*
|
||||
* - In some contexts, for example when serializing WebAuthn binary for
|
||||
* transmission over the network, this is the required / recommended
|
||||
* approach.
|
||||
*
|
||||
* - In other cases, for example when trying to pass an arbitrary JSON string
|
||||
* via a URL parameter, this is also convenient so that we do not have to
|
||||
* deal with any ambiguity surrounding the "=" which is also the query
|
||||
* parameter key value separator.
|
||||
*/
|
||||
export const toB64URLSafeNoPadding = async (input: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.URLSAFE_NO_PADDING);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a unpadded URL-safe Base64 encoded string to a {@link Uint8Array}.
|
||||
*
|
||||
* This is the converse of {@link toB64URLSafeNoPadding}, and does not expect
|
||||
* its input string's length to be a an integer multiple of 4.
|
||||
*/
|
||||
export const fromB64URLSafeNoPadding = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return sodium.from_base64(input, sodium.base64_variants.URLSAFE_NO_PADDING);
|
||||
};
|
||||
|
||||
/**
|
||||
* Variant of {@link toB64URLSafeNoPadding} that works with {@link string}
|
||||
* inputs. See also its sibling method {@link fromB64URLSafeNoPaddingString}.
|
||||
*/
|
||||
export const toB64URLSafeNoPaddingString = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return toB64URLSafeNoPadding(sodium.from_string(input));
|
||||
};
|
||||
|
||||
/**
|
||||
* Variant of {@link fromB64URLSafeNoPadding} that works with {@link strings}. See also
|
||||
* its sibling method {@link toB64URLSafeNoPaddingString}.
|
||||
*/
|
||||
export const fromB64URLSafeNoPaddingString = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_string(await fromB64URLSafeNoPadding(input));
|
||||
};
|
||||
|
||||
export async function fromUTF8(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.from_string(input);
|
||||
}
|
||||
|
||||
export async function toUTF8(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.to_string(await fromB64(input));
|
||||
}
|
||||
|
||||
export async function toHex(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.to_hex(await fromB64(input));
|
||||
}
|
||||
|
||||
export async function fromHex(input: string) {
|
||||
await sodium.ready;
|
||||
return await toB64(sodium.from_hex(input));
|
||||
}
|
||||
|
||||
export async function encryptChaChaOneShot(data: Uint8Array, key: string) {
|
||||
await sodium.ready;
|
||||
|
||||
const uintkey: Uint8Array = await fromB64(key);
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
data,
|
||||
null,
|
||||
sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL,
|
||||
);
|
||||
return {
|
||||
key: await toB64(uintkey),
|
||||
file: {
|
||||
encryptedData: pushResult,
|
||||
decryptionHeader: await toB64(header),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const ENCRYPTION_CHUNK_SIZE = 4 * 1024 * 1024;
|
||||
|
||||
export const encryptChaCha = async (data: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
|
||||
const uintkey: Uint8Array =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_keygen();
|
||||
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
let bytesRead = 0;
|
||||
let tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
|
||||
|
||||
const encryptedChunks = [];
|
||||
|
||||
while (tag !== sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL) {
|
||||
let chunkSize = ENCRYPTION_CHUNK_SIZE;
|
||||
if (bytesRead + chunkSize >= data.length) {
|
||||
chunkSize = data.length - bytesRead;
|
||||
tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL;
|
||||
}
|
||||
|
||||
const buffer = data.slice(bytesRead, bytesRead + chunkSize);
|
||||
bytesRead += chunkSize;
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
buffer,
|
||||
null,
|
||||
tag,
|
||||
);
|
||||
encryptedChunks.push(pushResult);
|
||||
}
|
||||
return {
|
||||
key: await toB64(uintkey),
|
||||
file: {
|
||||
encryptedData: mergeUint8Arrays(encryptedChunks),
|
||||
decryptionHeader: await toB64(header),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export async function initChunkEncryption() {
|
||||
await sodium.ready;
|
||||
const key = sodium.crypto_secretstream_xchacha20poly1305_keygen();
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(key);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
return {
|
||||
key: await toB64(key),
|
||||
decryptionHeader: await toB64(header),
|
||||
pushState,
|
||||
};
|
||||
}
|
||||
|
||||
export async function encryptFileChunk(
|
||||
data: Uint8Array,
|
||||
pushState: sodium.StateAddress,
|
||||
isFinalChunk: boolean,
|
||||
) {
|
||||
await sodium.ready;
|
||||
const tag = isFinalChunk
|
||||
? sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
: sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
data,
|
||||
null,
|
||||
tag,
|
||||
);
|
||||
|
||||
return pushResult;
|
||||
}
|
||||
|
||||
export async function decryptChaChaOneShot(
|
||||
data: Uint8Array,
|
||||
@@ -87,100 +290,12 @@ export async function decryptFileChunk(
|
||||
return { decryptedData: pullResult.message, newTag };
|
||||
}
|
||||
|
||||
export async function encryptChaChaOneShot(data: Uint8Array, key: string) {
|
||||
await sodium.ready;
|
||||
|
||||
const uintkey: Uint8Array = await fromB64(key);
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
data,
|
||||
null,
|
||||
sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL,
|
||||
);
|
||||
return {
|
||||
key: await toB64(uintkey),
|
||||
file: {
|
||||
encryptedData: pushResult,
|
||||
decryptionHeader: await toB64(header),
|
||||
},
|
||||
};
|
||||
export interface B64EncryptionResult {
|
||||
encryptedData: string;
|
||||
key: string;
|
||||
nonce: string;
|
||||
}
|
||||
|
||||
export const encryptChaCha = async (data: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
|
||||
const uintkey: Uint8Array =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_keygen();
|
||||
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
let bytesRead = 0;
|
||||
let tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
|
||||
|
||||
const encryptedChunks = [];
|
||||
|
||||
while (tag !== sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL) {
|
||||
let chunkSize = ENCRYPTION_CHUNK_SIZE;
|
||||
if (bytesRead + chunkSize >= data.length) {
|
||||
chunkSize = data.length - bytesRead;
|
||||
tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL;
|
||||
}
|
||||
|
||||
const buffer = data.slice(bytesRead, bytesRead + chunkSize);
|
||||
bytesRead += chunkSize;
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
buffer,
|
||||
null,
|
||||
tag,
|
||||
);
|
||||
encryptedChunks.push(pushResult);
|
||||
}
|
||||
return {
|
||||
key: await toB64(uintkey),
|
||||
file: {
|
||||
encryptedData: mergeUint8Arrays(encryptedChunks),
|
||||
decryptionHeader: await toB64(header),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export async function initChunkEncryption() {
|
||||
await sodium.ready;
|
||||
const key = sodium.crypto_secretstream_xchacha20poly1305_keygen();
|
||||
const initPushResult =
|
||||
sodium.crypto_secretstream_xchacha20poly1305_init_push(key);
|
||||
const [pushState, header] = [initPushResult.state, initPushResult.header];
|
||||
return {
|
||||
key: await toB64(key),
|
||||
decryptionHeader: await toB64(header),
|
||||
pushState,
|
||||
};
|
||||
}
|
||||
|
||||
export async function encryptFileChunk(
|
||||
data: Uint8Array,
|
||||
pushState: sodium.StateAddress,
|
||||
isFinalChunk: boolean,
|
||||
) {
|
||||
await sodium.ready;
|
||||
const tag = isFinalChunk
|
||||
? sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL
|
||||
: sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
|
||||
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
|
||||
pushState,
|
||||
data,
|
||||
null,
|
||||
tag,
|
||||
);
|
||||
|
||||
return pushResult;
|
||||
}
|
||||
export async function encryptToB64(data: string, key: string) {
|
||||
await sodium.ready;
|
||||
const encrypted = await encrypt(await fromB64(data), await fromB64(key));
|
||||
@@ -388,88 +503,3 @@ export async function generateSubKey(
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
export async function fromB64(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.from_base64(input, sodium.base64_variants.ORIGINAL);
|
||||
}
|
||||
|
||||
export async function toB64(input: Uint8Array) {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.ORIGINAL);
|
||||
}
|
||||
|
||||
/** Convert a {@link Uint8Array} to a URL safe Base64 encoded string. */
|
||||
export const toB64URLSafe = async (input: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.URLSAFE);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a {@link Uint8Array} to a URL safe Base64 encoded string.
|
||||
*
|
||||
* This differs from {@link toB64URLSafe} in that it does not append any
|
||||
* trailing padding character(s) "=" to make the resultant string's length be an
|
||||
* integer multiple of 4.
|
||||
*
|
||||
* - In some contexts, for example when serializing WebAuthn binary for
|
||||
* transmission over the network, this is the required / recommended
|
||||
* approach.
|
||||
*
|
||||
* - In other cases, for example when trying to pass an arbitrary JSON string
|
||||
* via a URL parameter, this is also convenient so that we do not have to
|
||||
* deal with any ambiguity surrounding the "=" which is also the query
|
||||
* parameter key value separator.
|
||||
*/
|
||||
export const toB64URLSafeNoPadding = async (input: Uint8Array) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_base64(input, sodium.base64_variants.URLSAFE_NO_PADDING);
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a Base64 encoded string to a {@link Uint8Array}.
|
||||
*
|
||||
* This is the converse of {@link toB64URLSafeNoPadding}, and does not expect
|
||||
* its input string's length to be a an integer multiple of 4.
|
||||
*/
|
||||
export const fromB64URLSafeNoPadding = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return sodium.from_base64(input, sodium.base64_variants.URLSAFE_NO_PADDING);
|
||||
};
|
||||
|
||||
/**
|
||||
* Variant of {@link toB64URLSafeNoPadding} that works with {@link strings}. See also
|
||||
* its sibling method {@link fromB64URLSafeNoPaddingString}.
|
||||
*/
|
||||
export const toB64URLSafeNoPaddingString = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return toB64URLSafeNoPadding(sodium.from_string(input));
|
||||
};
|
||||
|
||||
/**
|
||||
* Variant of {@link fromB64URLSafeNoPadding} that works with {@link strings}. See also
|
||||
* its sibling method {@link toB64URLSafeNoPaddingString}.
|
||||
*/
|
||||
export const fromB64URLSafeNoPaddingString = async (input: string) => {
|
||||
await sodium.ready;
|
||||
return sodium.to_string(await fromB64URLSafeNoPadding(input));
|
||||
};
|
||||
|
||||
export async function fromUTF8(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.from_string(input);
|
||||
}
|
||||
|
||||
export async function toUTF8(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.to_string(await fromB64(input));
|
||||
}
|
||||
export async function toHex(input: string) {
|
||||
await sodium.ready;
|
||||
return sodium.to_hex(await fromB64(input));
|
||||
}
|
||||
|
||||
export async function fromHex(input: string) {
|
||||
await sodium.ready;
|
||||
return await toB64(sodium.from_hex(input));
|
||||
}
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
export interface B64EncryptionResult {
|
||||
encryptedData: string;
|
||||
key: string;
|
||||
nonce: string;
|
||||
}
|
||||
@@ -21,14 +21,6 @@ export function setJustSignedUp(status: boolean) {
|
||||
setData(LS_KEYS.JUST_SIGNED_UP, { status });
|
||||
}
|
||||
|
||||
export function getLivePhotoInfoShownCount() {
|
||||
return getData(LS_KEYS.LIVE_PHOTO_INFO_SHOWN_COUNT)?.count ?? 0;
|
||||
}
|
||||
|
||||
export function setLivePhotoInfoShownCount(count: boolean) {
|
||||
setData(LS_KEYS.LIVE_PHOTO_INFO_SHOWN_COUNT, { count });
|
||||
}
|
||||
|
||||
export function getLocalMapEnabled(): boolean {
|
||||
return getData(LS_KEYS.MAP_ENABLED)?.value ?? false;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import log from "@/next/log";
|
||||
|
||||
export enum LS_KEYS {
|
||||
USER = "user",
|
||||
SESSION = "session",
|
||||
KEY_ATTRIBUTES = "keyAttributes",
|
||||
ORIGINAL_KEY_ATTRIBUTES = "originalKeyAttributes",
|
||||
SUBSCRIPTION = "subscription",
|
||||
@@ -11,13 +10,10 @@ export enum LS_KEYS {
|
||||
JUST_SIGNED_UP = "justSignedUp",
|
||||
SHOW_BACK_BUTTON = "showBackButton",
|
||||
EXPORT = "export",
|
||||
THUMBNAIL_FIX_STATE = "thumbnailFixState",
|
||||
LIVE_PHOTO_INFO_SHOWN_COUNT = "livePhotoInfoShownCount",
|
||||
// LOGS = "logs",
|
||||
USER_DETAILS = "userDetails",
|
||||
COLLECTION_SORT_BY = "collectionSortBy",
|
||||
THEME = "theme",
|
||||
WAIT_TIME = "waitTime",
|
||||
// Moved to the new wrapper @/next/local-storage
|
||||
// LOCALE = 'locale',
|
||||
MAP_ENABLED = "mapEnabled",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import type { B64EncryptionResult } from "@ente/shared/crypto/internal/libsodium";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { getKey, SESSION_KEYS } from "@ente/shared/storage/sessionStorage";
|
||||
|
||||
|
||||
Reference in New Issue
Block a user