[web] New dedup, same as mobile - Almost completed (#4508)

The changes are done, haven't swapped with the existing implementation
yet pending another scan.
This commit is contained in:
Manav Rathi
2024-12-26 19:44:27 +05:30
committed by GitHub
20 changed files with 984 additions and 846 deletions

View File

@@ -21,6 +21,7 @@ import type {
CollectionSummary,
CollectionSummaryType,
} from "@/new/photos/services/collection/ui";
import { clearLocalTrash, emptyTrash } from "@/new/photos/services/collections";
import {
isArchivedCollection,
isPinnedCollection,
@@ -50,7 +51,6 @@ import { GalleryContext } from "pages/gallery";
import React, { useCallback, useContext, useRef } from "react";
import { Trans } from "react-i18next";
import * as CollectionAPI from "services/collectionService";
import * as TrashService from "services/trashService";
import { SetFilesDownloadProgressAttributesCreator } from "types/gallery";
import {
changeCollectionOrder,
@@ -229,13 +229,13 @@ const CollectionOptions: React.FC<CollectionOptionsProps> = ({
continue: {
text: t("empty_trash"),
color: "critical",
action: emptyTrash,
action: doEmptyTrash,
},
});
const emptyTrash = wrap(async () => {
await TrashService.emptyTrash();
await TrashService.clearLocalTrash();
const doEmptyTrash = wrap(async () => {
await emptyTrash();
await clearLocalTrash();
setActiveCollectionID(ALL_SECTION);
});

View File

@@ -12,6 +12,7 @@ import {
import { fileLogID, type EnteFile } from "@/media/file";
import { FileType } from "@/media/file-type";
import { isHEICExtension, needsJPEGConversion } from "@/media/formats";
import { moveToTrash } from "@/new/photos/services/collection";
import { extractRawExif, parseExif } from "@/new/photos/services/exif";
import { AppContext } from "@/new/photos/types/context";
import { FlexWrapper } from "@ente/shared/components/Container";
@@ -58,7 +59,6 @@ import {
addToFavorites,
removeFromFavorites,
} from "services/collectionService";
import { trashFiles } from "services/fileService";
import { SetFilesDownloadProgressAttributesCreator } from "types/gallery";
import {
copyFileToClipboard,
@@ -550,7 +550,7 @@ function PhotoViewer(props: PhotoViewerProps) {
try {
showLoadingBar();
try {
await trashFiles([file]);
await moveToTrash([file]);
} finally {
hideLoadingBar();
}

View File

@@ -9,6 +9,7 @@ import { UploaderNameInput } from "@/new/albums/components/UploaderNameInput";
import { CollectionMappingChoice } from "@/new/photos/components/CollectionMappingChoice";
import type { CollectionSelectorAttributes } from "@/new/photos/components/CollectionSelector";
import { downloadAppDialogAttributes } from "@/new/photos/components/utils/download";
import { getLatestCollections } from "@/new/photos/services/collections";
import { exportMetadataDirectoryName } from "@/new/photos/services/export";
import type {
FileAndPath,
@@ -26,7 +27,6 @@ import { t } from "i18next";
import { GalleryContext } from "pages/gallery";
import { useContext, useEffect, useRef, useState } from "react";
import { Trans } from "react-i18next";
import { getLatestCollections } from "services/collectionService";
import {
getPublicCollectionUID,
getPublicCollectionUploaderName,

View File

@@ -2,11 +2,16 @@ import { stashRedirect } from "@/accounts/services/redirect";
import { ActivityIndicator } from "@/base/components/mui/ActivityIndicator";
import { errorDialogAttributes } from "@/base/components/utils/dialog";
import log from "@/base/log";
import { ALL_SECTION } from "@/new/photos/services/collection";
import { getLocalCollections } from "@/new/photos/services/collections";
import { ALL_SECTION, moveToTrash } from "@/new/photos/services/collection";
import {
getAllLatestCollections,
getLocalCollections,
syncTrash,
} from "@/new/photos/services/collections";
import {
createFileCollectionIDs,
getLocalFiles,
syncFiles,
} from "@/new/photos/services/files";
import { useAppContext } from "@/new/photos/types/context";
import { VerticallyCentered } from "@ente/shared/components/Container";
@@ -22,10 +27,7 @@ import PhotoFrame from "components/PhotoFrame";
import { t } from "i18next";
import { default as Router, default as router } from "next/router";
import { createContext, useEffect, useState } from "react";
import { getAllLatestCollections } from "services/collectionService";
import { Duplicate, getDuplicates } from "services/deduplicationService";
import { syncFiles, trashFiles } from "services/fileService";
import { syncTrash } from "services/trashService";
import { SelectedState } from "types/gallery";
import { getSelectedFiles } from "utils/file";
@@ -133,10 +135,10 @@ export default function Deduplicate() {
try {
showLoadingBar();
const selectedFiles = getSelectedFiles(selected, duplicateFiles);
await trashFiles(selectedFiles);
await moveToTrash(selectedFiles);
// trashFiles above does an API request, we still need to update our
// local state.
// moveToTrash above does an API request, we still need to update
// our local state.
//
// Enhancement: This can be done in a more granular manner. Also, it
// is better to funnel these syncs instead of adding these here and

View File

@@ -35,11 +35,16 @@ import {
isHiddenCollection,
} from "@/new/photos/services/collection";
import { areOnlySystemCollections } from "@/new/photos/services/collection/ui";
import { getAllLocalCollections } from "@/new/photos/services/collections";
import {
getAllLatestCollections,
getAllLocalCollections,
syncTrash,
} from "@/new/photos/services/collections";
import {
getLocalFiles,
getLocalTrashedFiles,
sortFiles,
syncFiles,
} from "@/new/photos/services/files";
import {
filterSearchableFiles,
@@ -47,6 +52,7 @@ import {
} from "@/new/photos/services/search";
import type { SearchOption } from "@/new/photos/services/search/types";
import { initSettings } from "@/new/photos/services/settings";
import { preCollectionsAndFilesSync, sync } from "@/new/photos/services/sync";
import {
initUserDetailsOrTriggerSync,
redirectToCustomerPortal,
@@ -113,11 +119,8 @@ import {
constructUserIDToEmailMap,
createAlbum,
createUnCategorizedCollection,
getAllLatestCollections,
} from "services/collectionService";
import { syncFiles } from "services/fileService";
import { preFileInfoSync, sync } from "services/sync";
import { syncTrash } from "services/trashService";
import exportService from "services/export";
import uploadManager from "services/upload/uploadManager";
import { isTokenValid } from "services/userService";
import {
@@ -566,7 +569,7 @@ export default function Gallery() {
throw new Error(CustomError.SESSION_EXPIRED);
}
!silent && showLoadingBar();
await preFileInfoSync();
await preCollectionsAndFilesSync();
const allCollections = await getAllLatestCollections();
const [hiddenCollections, collections] = splitByPredicate(
allCollections,
@@ -577,13 +580,13 @@ export default function Gallery() {
collections,
hiddenCollections,
});
await syncFiles(
const didUpdateNormalFiles = await syncFiles(
"normal",
collections,
(files) => dispatch({ type: "setFiles", files }),
(files) => dispatch({ type: "fetchFiles", files }),
);
await syncFiles(
const didUpdateHiddenFiles = await syncFiles(
"hidden",
hiddenCollections,
(hiddenFiles) =>
@@ -591,6 +594,8 @@ export default function Gallery() {
(hiddenFiles) =>
dispatch({ type: "fetchHiddenFiles", hiddenFiles }),
);
if (didUpdateNormalFiles || didUpdateHiddenFiles)
exportService.onLocalFilesUpdated();
await syncTrash(allCollections, (trashedFiles: EnteFile[]) =>
dispatch({ type: "setTrashedFiles", trashedFiles }),
);

View File

@@ -6,7 +6,6 @@ import {
CollectionMagicMetadata,
CollectionMagicMetadataProps,
CollectionPublicMagicMetadata,
CollectionShareeMagicMetadata,
CollectionType,
CreatePublicAccessTokenRequest,
EncryptedCollection,
@@ -20,7 +19,6 @@ import { ItemVisibility } from "@/media/file-metadata";
import {
addToCollection,
isDefaultHiddenCollection,
isHiddenCollection,
moveToCollection,
} from "@/new/photos/services/collection";
import type { CollectionSummary } from "@/new/photos/services/collection/ui";
@@ -29,7 +27,7 @@ import {
CollectionsSortBy,
} from "@/new/photos/services/collection/ui";
import {
getAllLocalCollections,
getCollectionWithSecrets,
getLocalCollections,
} from "@/new/photos/services/collections";
import {
@@ -41,7 +39,6 @@ import { updateMagicMetadata } from "@/new/photos/services/magic-metadata";
import type { FamilyData } from "@/new/photos/services/user-details";
import { batch } from "@/utils/array";
import HTTPService from "@ente/shared/network/HTTPService";
import localForage from "@ente/shared/storage/localForage";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getActualKey } from "@ente/shared/user";
@@ -54,262 +51,11 @@ import {
import { UpdateMagicMetadataRequest } from "./fileService";
import { getPublicKey } from "./userService";
const COLLECTION_TABLE = "collections";
const COLLECTION_UPDATION_TIME = "collection-updation-time";
const HIDDEN_COLLECTION_IDS = "hidden-collection-ids";
const UNCATEGORIZED_COLLECTION_NAME = "Uncategorized";
export const HIDDEN_COLLECTION_NAME = ".hidden";
const FAVORITE_COLLECTION_NAME = "Favorites";
export const REQUEST_BATCH_SIZE = 1000;
export const getCollectionLastSyncTime = async (collection: Collection) =>
(await localForage.getItem<number>(`${collection.id}-time`)) ?? 0;
export const setCollectionLastSyncTime = async (
collection: Collection,
time: number,
) => await localForage.setItem<number>(`${collection.id}-time`, time);
export const removeCollectionLastSyncTime = async (collection: Collection) =>
await localForage.removeItem(`${collection.id}-time`);
const getCollectionWithSecrets = async (
collection: EncryptedCollection,
masterKey: string,
): Promise<Collection> => {
const cryptoWorker = await sharedCryptoWorker();
const userID = getData(LS_KEYS.USER).id;
let collectionKey: string;
if (collection.owner.id === userID) {
collectionKey = await cryptoWorker.decryptB64(
collection.encryptedKey,
collection.keyDecryptionNonce,
masterKey,
);
} else {
const keyAttributes = getData(LS_KEYS.KEY_ATTRIBUTES);
const secretKey = await cryptoWorker.decryptB64(
keyAttributes.encryptedSecretKey,
keyAttributes.secretKeyDecryptionNonce,
masterKey,
);
collectionKey = await cryptoWorker.boxSealOpen(
collection.encryptedKey,
keyAttributes.publicKey,
secretKey,
);
}
const collectionName =
collection.name ||
(await cryptoWorker.decryptToUTF8(
collection.encryptedName,
collection.nameDecryptionNonce,
collectionKey,
));
let collectionMagicMetadata: CollectionMagicMetadata;
if (collection.magicMetadata?.data) {
collectionMagicMetadata = {
...collection.magicMetadata,
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.magicMetadata.data,
decryptionHeaderB64: collection.magicMetadata.header,
keyB64: collectionKey,
}),
};
}
let collectionPublicMagicMetadata: CollectionPublicMagicMetadata;
if (collection.pubMagicMetadata?.data) {
collectionPublicMagicMetadata = {
...collection.pubMagicMetadata,
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.pubMagicMetadata.data,
decryptionHeaderB64: collection.pubMagicMetadata.header,
keyB64: collectionKey,
}),
};
}
let collectionShareeMagicMetadata: CollectionShareeMagicMetadata;
if (collection.sharedMagicMetadata?.data) {
collectionShareeMagicMetadata = {
...collection.sharedMagicMetadata,
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.sharedMagicMetadata.data,
decryptionHeaderB64: collection.sharedMagicMetadata.header,
keyB64: collectionKey,
}),
};
}
return {
...collection,
name: collectionName,
key: collectionKey,
magicMetadata: collectionMagicMetadata,
pubMagicMetadata: collectionPublicMagicMetadata,
sharedMagicMetadata: collectionShareeMagicMetadata,
};
};
const getCollections = async (
token: string,
sinceTime: number,
key: string,
): Promise<Collection[]> => {
try {
const resp = await HTTPService.get(
await apiURL("/collections/v2"),
{
sinceTime,
},
{ "X-Auth-Token": token },
);
const decryptedCollections: Collection[] = await Promise.all(
resp.data.collections.map(
async (collection: EncryptedCollection) => {
if (collection.isDeleted) {
return collection;
}
try {
return await getCollectionWithSecrets(collection, key);
} catch (e) {
log.error(
`decryption failed for collection with ID ${collection.id}`,
e,
);
return collection;
}
},
),
);
// only allow deleted or collection with key, filtering out collection whose decryption failed
const collections = decryptedCollections.filter(
(collection) => collection.isDeleted || collection.key,
);
return collections;
} catch (e) {
log.error("getCollections failed", e);
throw e;
}
};
export const getCollectionUpdationTime = async (): Promise<number> =>
(await localForage.getItem<number>(COLLECTION_UPDATION_TIME)) ?? 0;
export const getHiddenCollectionIDs = async (): Promise<number[]> =>
(await localForage.getItem<number[]>(HIDDEN_COLLECTION_IDS)) ?? [];
export const getLatestCollections = async (
type: "normal" | "hidden" = "normal",
): Promise<Collection[]> => {
const collections = await getAllLatestCollections();
return type == "normal"
? collections.filter((c) => !isHiddenCollection(c))
: collections.filter((c) => isHiddenCollection(c));
};
export const getAllLatestCollections = async (): Promise<Collection[]> => {
const collections = await syncCollections();
return collections;
};
export const syncCollections = async () => {
const localCollections = await getAllLocalCollections();
let lastCollectionUpdationTime = await getCollectionUpdationTime();
const hiddenCollectionIDs = await getHiddenCollectionIDs();
const token = getToken();
const key = await getActualKey();
const updatedCollections =
(await getCollections(token, lastCollectionUpdationTime, key)) ?? [];
if (updatedCollections.length === 0) {
return localCollections;
}
const allCollectionsInstances = [
...localCollections,
...updatedCollections,
];
const latestCollectionsInstances = new Map<number, Collection>();
allCollectionsInstances.forEach((collection) => {
if (
!latestCollectionsInstances.has(collection.id) ||
latestCollectionsInstances.get(collection.id).updationTime <
collection.updationTime
) {
latestCollectionsInstances.set(collection.id, collection);
}
});
const collections: Collection[] = [];
// eslint-disable-next-line @typescript-eslint/no-unused-vars
for (const [_, collection] of latestCollectionsInstances) {
const isDeletedCollection = collection.isDeleted;
const isNewlyHiddenCollection =
isHiddenCollection(collection) &&
!hiddenCollectionIDs.includes(collection.id);
const isNewlyUnHiddenCollection =
!isHiddenCollection(collection) &&
hiddenCollectionIDs.includes(collection.id);
if (
isDeletedCollection ||
isNewlyHiddenCollection ||
isNewlyUnHiddenCollection
) {
removeCollectionLastSyncTime(collection);
}
if (isDeletedCollection) {
continue;
}
collections.push(collection);
lastCollectionUpdationTime = Math.max(
lastCollectionUpdationTime,
collection.updationTime,
);
}
const updatedHiddenCollectionIDs = collections
.filter((collection) => isHiddenCollection(collection))
.map((collection) => collection.id);
await localForage.setItem(COLLECTION_TABLE, collections);
await localForage.setItem(
COLLECTION_UPDATION_TIME,
lastCollectionUpdationTime,
);
await localForage.setItem(
HIDDEN_COLLECTION_IDS,
updatedHiddenCollectionIDs,
);
return collections;
};
export const getCollection = async (
collectionID: number,
): Promise<Collection> => {
try {
const token = getToken();
if (!token) {
return;
}
const resp = await HTTPService.get(
await apiURL(`/collections/${collectionID}`),
null,
{ "X-Auth-Token": token },
);
const key = await getActualKey();
const collectionWithSecrets = await getCollectionWithSecrets(
resp.data?.collection,
key,
);
return collectionWithSecrets;
} catch (e) {
log.error("failed to get collection", e);
throw e;
}
};
const REQUEST_BATCH_SIZE = 1000;
export const createAlbum = (albumName: string) => {
return createCollection(albumName, CollectionType.album);

View File

@@ -750,19 +750,19 @@ class ExportService {
const { image, video } =
parseLivePhotoExportName(fileExportName);
await moveToTrash(
await moveToFSTrash(
exportDir,
collectionExportName,
image,
);
await moveToTrash(
await moveToFSTrash(
exportDir,
collectionExportName,
video,
);
} else {
await moveToTrash(
await moveToFSTrash(
exportDir,
collectionExportName,
fileExportName,
@@ -1459,14 +1459,15 @@ const isExportInProgress = (exportStage: ExportStage) =>
exportStage > ExportStage.INIT && exportStage < ExportStage.FINISHED;
/**
* Move {@link fileName} in {@link collectionName} to Trash.
* Move {@link fileName} in {@link collectionName} to the special per-collection
* file system "Trash" folder we created under the export directory.
*
* Also move its associated metadata JSON to Trash.
*
* @param exportDir The root directory on the user's file system where we are
* exporting to.
* */
const moveToTrash = async (
const moveToFSTrash = async (
exportDir: string,
collectionName: string,
fileName: string,

View File

@@ -1,190 +1,13 @@
import { encryptMetadataJSON } from "@/base/crypto";
import log from "@/base/log";
import { apiURL } from "@/base/origins";
import type { Collection } from "@/media/collection";
import type { EncryptedMagicMetadata } from "@/media/file";
import {
EncryptedEnteFile,
type EncryptedMagicMetadata,
EnteFile,
FileWithUpdatedMagicMetadata,
FileWithUpdatedPublicMagicMetadata,
TrashRequest,
} from "@/media/file";
import {
clearCachedThumbnailsIfChanged,
getLatestVersionFiles,
getLocalFiles,
setLocalFiles,
} from "@/new/photos/services/files";
import { batch } from "@/utils/array";
import HTTPService from "@ente/shared/network/HTTPService";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import exportService from "services/export";
import { decryptFile } from "utils/file";
import {
getCollectionLastSyncTime,
REQUEST_BATCH_SIZE,
setCollectionLastSyncTime,
} from "./collectionService";
/**
* Fetch all files of the given {@link type}, belonging to the given
* {@link collections}, from remote and update our local database.
*
* If this is the initial read, or if the count of files we have differs from
* the state of the local database (these two are expected to be the same case),
* then the {@link onResetFiles} callback is invoked to give the caller a chance
* to bring its state up to speed.
*
* In addition to updating the local database, it also calls the provided
* {@link onFetchFiles} callback with the latest decrypted files after each
* batch the new and/or updated files are received from remote.
*/
export const syncFiles = async (
type: "normal" | "hidden",
collections: Collection[],
onResetFiles: (fs: EnteFile[]) => void,
onFetchFiles: (fs: EnteFile[]) => void,
) => {
const localFiles = await getLocalFiles(type);
let files = await removeDeletedCollectionFiles(collections, localFiles);
let didUpdateFiles = false;
if (files.length !== localFiles.length) {
await setLocalFiles(type, files);
onResetFiles(files);
didUpdateFiles = true;
}
for (const collection of collections) {
if (!getToken()) {
continue;
}
const lastSyncTime = await getCollectionLastSyncTime(collection);
if (collection.updationTime === lastSyncTime) {
continue;
}
const newFiles = await getFiles(collection, lastSyncTime, onFetchFiles);
await clearCachedThumbnailsIfChanged(localFiles, newFiles);
files = getLatestVersionFiles([...files, ...newFiles]);
await setLocalFiles(type, files);
didUpdateFiles = true;
setCollectionLastSyncTime(collection, collection.updationTime);
}
if (didUpdateFiles) exportService.onLocalFilesUpdated();
};
export const getFiles = async (
collection: Collection,
sinceTime: number,
onFetchFiles: (fs: EnteFile[]) => void,
): Promise<EnteFile[]> => {
try {
let decryptedFiles: EnteFile[] = [];
let time = sinceTime;
let resp;
do {
const token = getToken();
if (!token) {
break;
}
resp = await HTTPService.get(
await apiURL("/collections/v2/diff"),
{
collectionID: collection.id,
sinceTime: time,
},
{
"X-Auth-Token": token,
},
);
const newDecryptedFilesBatch = await Promise.all(
resp.data.diff.map(async (file: EncryptedEnteFile) => {
if (!file.isDeleted) {
return await decryptFile(file, collection.key);
} else {
return file;
}
}) as Promise<EnteFile>[],
);
decryptedFiles = [...decryptedFiles, ...newDecryptedFilesBatch];
onFetchFiles(decryptedFiles);
if (resp.data.diff.length) {
time = resp.data.diff.slice(-1)[0].updationTime;
}
} while (resp.data.hasMore);
return decryptedFiles;
} catch (e) {
log.error("Get files failed", e);
throw e;
}
};
const removeDeletedCollectionFiles = async (
collections: Collection[],
files: EnteFile[],
) => {
const syncedCollectionIds = new Set<number>();
for (const collection of collections) {
syncedCollectionIds.add(collection.id);
}
files = files.filter((file) => syncedCollectionIds.has(file.collectionID));
return files;
};
export const trashFiles = async (filesToTrash: EnteFile[]) => {
try {
const token = getToken();
if (!token) {
return;
}
const batchedFilesToTrash = batch(filesToTrash, REQUEST_BATCH_SIZE);
for (const batch of batchedFilesToTrash) {
const trashRequest: TrashRequest = {
items: batch.map((file) => ({
fileID: file.id,
collectionID: file.collectionID,
})),
};
await HTTPService.post(
await apiURL("/files/trash"),
trashRequest,
null,
{
"X-Auth-Token": token,
},
);
}
} catch (e) {
log.error("trash file failed", e);
throw e;
}
};
export const deleteFromTrash = async (filesToDelete: number[]) => {
try {
const token = getToken();
if (!token) {
return;
}
const batchedFilesToDelete = batch(filesToDelete, REQUEST_BATCH_SIZE);
for (const batch of batchedFilesToDelete) {
await HTTPService.post(
await apiURL("/trash/delete"),
{ fileIDs: batch },
null,
{
"X-Auth-Token": token,
},
);
}
} catch (e) {
log.error("deleteFromTrash failed", e);
throw e;
}
};
export interface UpdateMagicMetadataRequest {
id: number;

View File

@@ -6,12 +6,11 @@ import type {
CollectionPublicMagicMetadata,
} from "@/media/collection";
import type { EncryptedEnteFile, EnteFile } from "@/media/file";
import { mergeMetadata } from "@/media/file";
import { decryptFile, mergeMetadata } from "@/media/file";
import { sortFiles } from "@/new/photos/services/files";
import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import localForage from "@ente/shared/storage/localForage";
import { decryptFile } from "utils/file";
const PUBLIC_COLLECTION_FILES_TABLE = "public-collection-files";
const PUBLIC_COLLECTIONS_TABLE = "public-collections";

View File

@@ -1,36 +0,0 @@
import { isMLSupported, mlStatusSync, mlSync } from "@/new/photos/services/ml";
import { searchDataSync } from "@/new/photos/services/search";
import { syncSettings } from "@/new/photos/services/settings";
/**
* Part 1 of {@link sync}. See TODO below for why this is split.
*/
export const preFileInfoSync = async () => {
await Promise.all([syncSettings(), isMLSupported && mlStatusSync()]);
};
/**
* Sync our local state with remote on page load for web and focus for desktop.
*
* This function makes various API calls to fetch state from remote, using it to
* update our local state, and triggering periodic jobs that depend on the local
* state.
*
* This runs on initial page load (on both web and desktop). In addition for
* desktop, it also runs each time the desktop app gains focus.
*
* TODO: This is called after we've synced the local files DBs with remote. That
* code belongs here, but currently that state is persisted in the top level
* gallery React component.
*
* So meanwhile we've split this sync into this method, which is called after
* the file info has been synced (which can take a few minutes for large
* libraries after initial login), and the `preFileInfoSync`, which is called
* before doing the file sync and thus should run immediately after login.
*/
export const sync = async () => {
await Promise.all([searchDataSync()]);
// ML sync might take a very long time for initial indexing, so don't wait
// for it to finish.
void mlSync();
};

View File

@@ -1,156 +0,0 @@
import log from "@/base/log";
import { apiURL } from "@/base/origins";
import type { Collection } from "@/media/collection";
import { EncryptedTrashItem, Trash, type EnteFile } from "@/media/file";
import {
getLocalTrash,
getTrashedFiles,
TRASH,
} from "@/new/photos/services/files";
import HTTPService from "@ente/shared/network/HTTPService";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { decryptFile } from "utils/file";
import { getCollection } from "./collectionService";
const TRASH_TIME = "trash-time";
const DELETED_COLLECTION = "deleted-collection";
export async function getLocalDeletedCollections() {
const trashedCollections: Collection[] =
(await localForage.getItem<Collection[]>(DELETED_COLLECTION)) || [];
const nonUndefinedCollections = trashedCollections.filter(
(collection) => !!collection,
);
if (nonUndefinedCollections.length !== trashedCollections.length) {
await localForage.setItem(DELETED_COLLECTION, nonUndefinedCollections);
}
return nonUndefinedCollections;
}
export async function cleanTrashCollections(fileTrash: Trash) {
const trashedCollections = await getLocalDeletedCollections();
const neededTrashCollections = new Set<number>(
fileTrash.map((item) => item.file.collectionID),
);
const filterCollections = trashedCollections.filter((item) =>
neededTrashCollections.has(item.id),
);
await localForage.setItem(DELETED_COLLECTION, filterCollections);
}
async function getLastSyncTime() {
return (await localForage.getItem<number>(TRASH_TIME)) ?? 0;
}
export async function syncTrash(
collections: Collection[],
setTrashedFiles: (fs: EnteFile[]) => void,
): Promise<void> {
const trash = await getLocalTrash();
collections = [...collections, ...(await getLocalDeletedCollections())];
const collectionMap = new Map<number, Collection>(
collections.map((collection) => [collection.id, collection]),
);
if (!getToken()) {
return;
}
const lastSyncTime = await getLastSyncTime();
const updatedTrash = await updateTrash(
collectionMap,
lastSyncTime,
setTrashedFiles,
trash,
);
cleanTrashCollections(updatedTrash);
}
export const updateTrash = async (
collections: Map<number, Collection>,
sinceTime: number,
setTrashedFiles: (fs: EnteFile[]) => void,
currentTrash: Trash,
): Promise<Trash> => {
try {
let updatedTrash: Trash = [...currentTrash];
let time = sinceTime;
let resp;
do {
const token = getToken();
if (!token) {
break;
}
resp = await HTTPService.get(
await apiURL("/trash/v2/diff"),
{
sinceTime: time,
},
{
"X-Auth-Token": token,
},
);
// #Perf: This can be optimized by running the decryption in parallel
for (const trashItem of resp.data.diff as EncryptedTrashItem[]) {
const collectionID = trashItem.file.collectionID;
let collection = collections.get(collectionID);
if (!collection) {
collection = await getCollection(collectionID);
collections.set(collectionID, collection);
localForage.setItem(DELETED_COLLECTION, [
...collections.values(),
]);
}
if (!trashItem.isDeleted && !trashItem.isRestored) {
const decryptedFile = await decryptFile(
trashItem.file,
collection.key,
);
updatedTrash.push({ ...trashItem, file: decryptedFile });
} else {
updatedTrash = updatedTrash.filter(
(item) => item.file.id !== trashItem.file.id,
);
}
}
if (resp.data.diff.length) {
time = resp.data.diff.slice(-1)[0].updatedAt;
}
setTrashedFiles(getTrashedFiles(updatedTrash));
await localForage.setItem(TRASH, updatedTrash);
await localForage.setItem(TRASH_TIME, time);
} while (resp.data.hasMore);
return updatedTrash;
} catch (e) {
log.error("Get trash files failed", e);
}
return currentTrash;
};
export const emptyTrash = async () => {
try {
const token = getToken();
if (!token) {
return;
}
const lastUpdatedAt = await getLastSyncTime();
await HTTPService.post(
await apiURL("/trash/empty"),
{ lastUpdatedAt },
null,
{
"X-Auth-Token": token,
},
);
} catch (e) {
log.error("empty trash failed", e);
throw e;
}
};
export const clearLocalTrash = async () => {
await localForage.setItem(TRASH, []);
};

View File

@@ -8,7 +8,11 @@ import type { Electron } from "@/base/types/ipc";
import { ComlinkWorker } from "@/base/worker/comlink-worker";
import { shouldDisableCFUploadProxy } from "@/gallery/services/upload";
import type { Collection } from "@/media/collection";
import { EncryptedEnteFile, EnteFile } from "@/media/file";
import {
decryptFile,
type EncryptedEnteFile,
type EnteFile,
} from "@/media/file";
import type { ParsedMetadata } from "@/media/file-metadata";
import { FileType } from "@/media/file-type";
import { potentialFileTypeFromExtension } from "@/media/live-photo";
@@ -28,7 +32,7 @@ import {
getPublicCollectionUID,
} from "services/publicCollectionService";
import watcher from "services/watch";
import { decryptFile, getUserOwnedFiles } from "utils/file";
import { getUserOwnedFiles } from "utils/file";
import {
getMetadataJSONMapKeyForJSON,
tryParseTakeoutMetadataJSON,

View File

@@ -1,4 +1,3 @@
import { sharedCryptoWorker } from "@/base/crypto";
import { joinPath } from "@/base/file-name";
import log from "@/base/log";
import { type Electron } from "@/base/types/ipc";
@@ -7,9 +6,7 @@ import { downloadManager } from "@/gallery/services/download";
import { detectFileTypeInfo } from "@/gallery/utils/detect-type";
import { writeStream } from "@/gallery/utils/native-stream";
import {
EncryptedEnteFile,
EnteFile,
FileMagicMetadata,
FileMagicMetadataProps,
FilePublicMagicMetadata,
FilePublicMagicMetadataProps,
@@ -19,6 +16,7 @@ import {
import { ItemVisibility } from "@/media/file-metadata";
import { FileType } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import { deleteFromTrash, moveToTrash } from "@/new/photos/services/collection";
import {
isArchivedFile,
updateMagicMetadata,
@@ -33,8 +31,6 @@ import {
moveToHiddenCollection,
} from "services/collectionService";
import {
deleteFromTrash,
trashFiles,
updateFileMagicMetadata,
updateFilePublicMagicMetadata,
} from "services/fileService";
@@ -104,66 +100,6 @@ export function getSelectedFiles(
return files.filter((file) => selectedFilesIDs.has(file.id));
}
export async function decryptFile(
file: EncryptedEnteFile,
collectionKey: string,
): Promise<EnteFile> {
try {
const worker = await sharedCryptoWorker();
const {
encryptedKey,
keyDecryptionNonce,
metadata,
magicMetadata,
pubMagicMetadata,
...restFileProps
} = file;
const fileKey = await worker.decryptB64(
encryptedKey,
keyDecryptionNonce,
collectionKey,
);
const fileMetadata = await worker.decryptMetadataJSON({
encryptedDataB64: metadata.encryptedData,
decryptionHeaderB64: metadata.decryptionHeader,
keyB64: fileKey,
});
let fileMagicMetadata: FileMagicMetadata;
let filePubMagicMetadata: FilePublicMagicMetadata;
if (magicMetadata?.data) {
fileMagicMetadata = {
...file.magicMetadata,
data: await worker.decryptMetadataJSON({
encryptedDataB64: magicMetadata.data,
decryptionHeaderB64: magicMetadata.header,
keyB64: fileKey,
}),
};
}
if (pubMagicMetadata?.data) {
filePubMagicMetadata = {
...pubMagicMetadata,
data: await worker.decryptMetadataJSON({
encryptedDataB64: pubMagicMetadata.data,
decryptionHeaderB64: pubMagicMetadata.header,
keyB64: fileKey,
}),
};
}
return {
...restFileProps,
key: fileKey,
// @ts-expect-error TODO: Need to use zod here.
metadata: fileMetadata,
magicMetadata: fileMagicMetadata,
pubMagicMetadata: filePubMagicMetadata,
};
} catch (e) {
log.error("file decryption failed", e);
throw e;
}
}
export async function changeFilesVisibility(
files: EnteFile[],
visibility: ItemVisibility,
@@ -544,7 +480,7 @@ export const handleFileOps = async (
case FILE_OPS_TYPE.TRASH:
try {
markTempDeleted(files);
await trashFiles(files);
await moveToTrash(files);
} catch (e) {
clearTempDeleted();
throw e;

View File

@@ -1,3 +1,5 @@
import { sharedCryptoWorker } from "@/base/crypto";
import log from "@/base/log";
import { type Metadata, ItemVisibility } from "./file-metadata";
// TODO: Audit this file.
@@ -182,15 +184,6 @@ export interface EnteFile
key: string;
}
export interface TrashRequest {
items: TrashRequestItems[];
}
export interface TrashRequestItems {
fileID: number;
collectionID: number;
}
export interface FileWithUpdatedMagicMetadata {
file: EnteFile;
updatedMagicMetadata: FileMagicMetadata;
@@ -292,6 +285,76 @@ export const fileLogID = (file: EnteFile) =>
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
`file ${file.metadata.title ?? "-"} (${file.id})`;
export async function decryptFile(
file: EncryptedEnteFile,
collectionKey: string,
): Promise<EnteFile> {
try {
const worker = await sharedCryptoWorker();
const {
encryptedKey,
keyDecryptionNonce,
metadata,
magicMetadata,
pubMagicMetadata,
...restFileProps
} = file;
const fileKey = await worker.decryptB64(
encryptedKey,
keyDecryptionNonce,
collectionKey,
);
const fileMetadata = await worker.decryptMetadataJSON({
encryptedDataB64: metadata.encryptedData,
decryptionHeaderB64: metadata.decryptionHeader,
keyB64: fileKey,
});
let fileMagicMetadata: FileMagicMetadata;
let filePubMagicMetadata: FilePublicMagicMetadata;
/* eslint-disable @typescript-eslint/no-unnecessary-condition */
if (magicMetadata?.data) {
fileMagicMetadata = {
...file.magicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
data: await worker.decryptMetadataJSON({
encryptedDataB64: magicMetadata.data,
decryptionHeaderB64: magicMetadata.header,
keyB64: fileKey,
}),
};
}
/* eslint-disable @typescript-eslint/no-unnecessary-condition */
if (pubMagicMetadata?.data) {
filePubMagicMetadata = {
...pubMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
data: await worker.decryptMetadataJSON({
encryptedDataB64: pubMagicMetadata.data,
decryptionHeaderB64: pubMagicMetadata.header,
keyB64: fileKey,
}),
};
}
return {
...restFileProps,
key: fileKey,
// @ts-expect-error TODO: Need to use zod here.
metadata: fileMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
magicMetadata: fileMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
pubMagicMetadata: filePubMagicMetadata,
};
} catch (e) {
log.error("file decryption failed", e);
throw e;
}
}
/**
* Update the immutable fields of an (in-memory) {@link EnteFile} with any edits
* that the user has made to their corresponding mutable metadata fields.

View File

@@ -8,7 +8,6 @@ import {
OverflowMenuOption,
} from "@/base/components/OverflowMenu";
import { Ellipsized2LineTypography } from "@/base/components/Typography";
import { errorDialogAttributes } from "@/base/components/utils/dialog";
import { pt } from "@/base/i18n";
import log from "@/base/log";
import { formattedByteSize } from "@/new/photos/utils/units";
@@ -19,9 +18,9 @@ import SortIcon from "@mui/icons-material/Sort";
import {
Box,
Checkbox,
CircularProgress,
Divider,
IconButton,
LinearProgress,
Stack,
styled,
Tooltip,
@@ -58,7 +57,7 @@ import {
import { useAppContext } from "../types/context";
const Page: React.FC = () => {
const { showMiniDialog } = useAppContext();
const { onGenericError } = useAppContext();
const [state, dispatch] = useReducer(dedupReducer, initialDedupState);
@@ -80,18 +79,18 @@ const Page: React.FC = () => {
dispatch({ type: "dedupe" });
void removeSelectedDuplicateGroups(
state.duplicateGroups,
(duplicateGroup: DuplicateGroup) =>
dispatch({ type: "didRemoveDuplicateGroup", duplicateGroup }),
).then((allSuccess) => {
dispatch({ type: "dedupeCompleted" });
if (!allSuccess) {
const msg = pt(
"Some errors occurred when trying to remove duplicates.",
);
showMiniDialog(errorDialogAttributes(msg));
}
});
}, [state.duplicateGroups, showMiniDialog]);
(progress: number) =>
dispatch({ type: "setDedupeProgress", progress }),
)
.then((removedGroupIDs) =>
dispatch({ type: "dedupeCompleted", removedGroupIDs }),
)
.catch((e: unknown) => {
onGenericError(e);
dispatch({ type: "dedupeFailed" });
});
}, [state.duplicateGroups, onGenericError]);
const contents = (() => {
switch (state.status) {
@@ -113,7 +112,7 @@ const Page: React.FC = () => {
}
prunableCount={state.prunableCount}
prunableSize={state.prunableSize}
isDeduping={state.isDeduping}
dedupeProgress={state.dedupeProgress}
onRemoveDuplicates={handleRemoveDuplicates}
/>
);
@@ -144,8 +143,6 @@ type SortOrder = "prunableCount" | "prunableSize";
interface DedupState {
/** Status of the screen, between initial state => analysis */
status: undefined | "analyzing" | "analysisFailed" | "analysisCompleted";
/** `true` if a dedupe is in progress. */
isDeduping: boolean;
/**
* Groups of duplicates.
*
@@ -171,6 +168,11 @@ interface DedupState {
* current selection.
*/
prunableSize: number;
/**
* If a dedupe is in progress, then this will indicate its progress
* percentage (a number between 0 and 100).
*/
dedupeProgress: number | undefined;
}
type DedupAction =
@@ -181,16 +183,17 @@ type DedupAction =
| { type: "toggleSelection"; index: number }
| { type: "deselectAll" }
| { type: "dedupe" }
| { type: "didRemoveDuplicateGroup"; duplicateGroup: DuplicateGroup }
| { type: "dedupeCompleted" };
| { type: "setDedupeProgress"; progress: number }
| { type: "dedupeFailed" }
| { type: "dedupeCompleted"; removedGroupIDs: Set<string> };
const initialDedupState: DedupState = {
status: undefined,
isDeduping: false,
duplicateGroups: [],
sortOrder: "prunableSize",
prunableCount: 0,
prunableSize: 0,
dedupeProgress: undefined,
};
const dedupReducer: React.Reducer<DedupState, DedupAction> = (
@@ -262,11 +265,18 @@ const dedupReducer: React.Reducer<DedupState, DedupAction> = (
}
case "dedupe":
return { ...state, isDeduping: true };
return { ...state, dedupeProgress: 0 };
case "didRemoveDuplicateGroup": {
case "setDedupeProgress": {
return { ...state, dedupeProgress: action.progress };
}
case "dedupeFailed":
return { ...state, dedupeProgress: undefined };
case "dedupeCompleted": {
const duplicateGroups = state.duplicateGroups.filter(
({ id }) => id != action.duplicateGroup.id,
({ id }) => !action.removedGroupIDs.has(id),
);
const { prunableCount, prunableSize } =
deducePrunableCountAndSize(duplicateGroups);
@@ -275,11 +285,9 @@ const dedupReducer: React.Reducer<DedupState, DedupAction> = (
duplicateGroups,
prunableCount,
prunableSize,
dedupeProgress: undefined,
};
}
case "dedupeCompleted":
return { ...state, isDeduping: false };
}
};
@@ -611,41 +619,45 @@ const ItemGrid = styled("div", {
`,
);
interface DeduplicateButtonProps {
/**
* See {@link prunableCount} in {@link DedupState}.
*/
prunableCount: number;
/**
* See {@link prunableSize} in {@link DedupState}.
*/
prunableSize: number;
/**
* `true` if a deduplication is in progress
*/
isDeduping: DedupState["isDeduping"];
type DeduplicateButtonProps = Pick<
DedupState,
"prunableCount" | "prunableSize" | "dedupeProgress"
> & {
/**
* Called when the user presses the button to remove duplicates.
*/
onRemoveDuplicates: () => void;
}
};
const DeduplicateButton: React.FC<DeduplicateButtonProps> = ({
prunableCount,
prunableSize,
isDeduping,
dedupeProgress,
onRemoveDuplicates,
}) => (
<FocusVisibleButton
sx={{ minWidth: "min(100%, 320px)", margin: "auto" }}
disabled={prunableCount == 0 || isDeduping}
disabled={prunableCount == 0 || dedupeProgress !== undefined}
onClick={onRemoveDuplicates}
>
<Stack sx={{ gap: 1, minHeight: "45px", justifyContent: "center" }}>
{isDeduping ? (
<Typography sx={{}}>
<CircularProgress color="primary" size="24px" />
</Typography>
<Stack
sx={{
gap: 1,
// Prevent a layout shift by giving a minHeight that is larger
// than all expected states.
minHeight: "45px",
justifyContent: "center",
flex: 1,
}}
>
{dedupeProgress !== undefined ? (
<LinearProgress
sx={{ borderRadius: "4px" }}
variant={
dedupeProgress === 0 ? "indeterminate" : "determinate"
}
value={dedupeProgress}
/>
) : (
<>
<Typography>

View File

@@ -217,3 +217,48 @@ const encryptWithCollectionKey = async (
};
}),
);
/**
* Make a remote request to move the given {@link files} to trash.
*
* @param files The {@link EnteFile}s to move to trash. The API request needs
* both a file ID and a collection ID, but there should be at most one entry for
* a particular fileID in this array.
*
* Does not modify local state.
*/
export const moveToTrash = async (files: EnteFile[]) => {
for (const batchFiles of batch(files, requestBatchSize)) {
ensureOk(
await fetch(await apiURL("/files/trash"), {
method: "POST",
headers: await authenticatedRequestHeaders(),
body: JSON.stringify({
items: batchFiles.map((file) => ({
fileID: file.id,
collectionID: file.collectionID,
})),
}),
}),
);
}
};
/**
* Make a remote request to delete the given {@link fileIDs} from trash.
*
* Does not modify local state.
*/
export const deleteFromTrash = async (fileIDs: number[]) => {
for (const batchIDs of batch(fileIDs, requestBatchSize)) {
ensureOk(
await fetch(await apiURL("/trash/delete"), {
method: "POST",
headers: await authenticatedRequestHeaders(),
body: JSON.stringify({
fileIDs: batchIDs,
}),
}),
);
}
};

View File

@@ -1,8 +1,42 @@
import { type Collection } from "@/media/collection";
/* eslint-disable @typescript-eslint/no-unsafe-call */
// TODO: Audit this file
/* eslint-disable @typescript-eslint/no-unnecessary-condition */
/* eslint-disable @typescript-eslint/prefer-nullish-coalescing */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import { sharedCryptoWorker } from "@/base/crypto";
import log from "@/base/log";
import { apiURL } from "@/base/origins";
import {
type Collection,
type CollectionMagicMetadata,
type CollectionPublicMagicMetadata,
type CollectionShareeMagicMetadata,
type EncryptedCollection,
} from "@/media/collection";
import {
decryptFile,
type EncryptedTrashItem,
type EnteFile,
type Trash,
} from "@/media/file";
import {
getLocalTrash,
getTrashedFiles,
TRASH,
} from "@/new/photos/services/files";
import HTTPService from "@ente/shared/network/HTTPService";
import localForage from "@ente/shared/storage/localForage";
import { getData, LS_KEYS } from "@ente/shared/storage/localStorage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { getActualKey } from "@ente/shared/user";
import { isHiddenCollection } from "./collection";
const COLLECTION_TABLE = "collections";
const HIDDEN_COLLECTION_IDS = "hidden-collection-ids";
const COLLECTION_UPDATION_TIME = "collection-updation-time";
export const getLocalCollections = async (
type: "normal" | "hidden" = "normal",
@@ -18,3 +52,411 @@ export const getAllLocalCollections = async (): Promise<Collection[]> => {
(await localForage.getItem(COLLECTION_TABLE)) ?? [];
return collections;
};
export const getCollectionLastSyncTime = async (collection: Collection) =>
(await localForage.getItem<number>(`${collection.id}-time`)) ?? 0;
export const setCollectionLastSyncTime = async (
collection: Collection,
time: number,
) => await localForage.setItem<number>(`${collection.id}-time`, time);
export const removeCollectionLastSyncTime = async (collection: Collection) =>
await localForage.removeItem(`${collection.id}-time`);
export const getHiddenCollectionIDs = async (): Promise<number[]> =>
(await localForage.getItem<number[]>(HIDDEN_COLLECTION_IDS)) ?? [];
export const getCollectionUpdationTime = async (): Promise<number> =>
(await localForage.getItem<number>(COLLECTION_UPDATION_TIME)) ?? 0;
export const getLatestCollections = async (
type: "normal" | "hidden" = "normal",
): Promise<Collection[]> => {
const collections = await getAllLatestCollections();
return type == "normal"
? collections.filter((c) => !isHiddenCollection(c))
: collections.filter((c) => isHiddenCollection(c));
};
export const getAllLatestCollections = async (): Promise<Collection[]> => {
const collections = await syncCollections();
return collections;
};
export const syncCollections = async () => {
const localCollections = await getAllLocalCollections();
let lastCollectionUpdationTime = await getCollectionUpdationTime();
const hiddenCollectionIDs = await getHiddenCollectionIDs();
const token = getToken();
const key = await getActualKey();
const updatedCollections =
(await getCollections(token, lastCollectionUpdationTime, key)) ?? [];
if (updatedCollections.length === 0) {
return localCollections;
}
const allCollectionsInstances = [
...localCollections,
...updatedCollections,
];
const latestCollectionsInstances = new Map<number, Collection>();
allCollectionsInstances.forEach((collection) => {
if (
!latestCollectionsInstances.has(collection.id) ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
latestCollectionsInstances.get(collection.id).updationTime <
collection.updationTime
) {
latestCollectionsInstances.set(collection.id, collection);
}
});
const collections: Collection[] = [];
// eslint-disable-next-line @typescript-eslint/no-unused-vars
for (const [_, collection] of latestCollectionsInstances) {
const isDeletedCollection = collection.isDeleted;
const isNewlyHiddenCollection =
isHiddenCollection(collection) &&
!hiddenCollectionIDs.includes(collection.id);
const isNewlyUnHiddenCollection =
!isHiddenCollection(collection) &&
hiddenCollectionIDs.includes(collection.id);
if (
isDeletedCollection ||
isNewlyHiddenCollection ||
isNewlyUnHiddenCollection
) {
await removeCollectionLastSyncTime(collection);
}
if (isDeletedCollection) {
continue;
}
collections.push(collection);
lastCollectionUpdationTime = Math.max(
lastCollectionUpdationTime,
collection.updationTime,
);
}
const updatedHiddenCollectionIDs = collections
.filter((collection) => isHiddenCollection(collection))
.map((collection) => collection.id);
await localForage.setItem(COLLECTION_TABLE, collections);
await localForage.setItem(
COLLECTION_UPDATION_TIME,
lastCollectionUpdationTime,
);
await localForage.setItem(
HIDDEN_COLLECTION_IDS,
updatedHiddenCollectionIDs,
);
return collections;
};
const getCollections = async (
token: string,
sinceTime: number,
key: string,
): Promise<Collection[]> => {
try {
const resp = await HTTPService.get(
await apiURL("/collections/v2"),
{
sinceTime,
},
{ "X-Auth-Token": token },
);
const decryptedCollections: Collection[] = await Promise.all(
resp.data.collections.map(
async (collection: EncryptedCollection) => {
if (collection.isDeleted) {
return collection;
}
try {
return await getCollectionWithSecrets(collection, key);
} catch (e) {
log.error(
`decryption failed for collection with ID ${collection.id}`,
e,
);
return collection;
}
},
),
);
// only allow deleted or collection with key, filtering out collection whose decryption failed
const collections = decryptedCollections.filter(
(collection) => collection.isDeleted || collection.key,
);
return collections;
} catch (e) {
log.error("getCollections failed", e);
throw e;
}
};
export const getCollectionWithSecrets = async (
collection: EncryptedCollection,
masterKey: string,
): Promise<Collection> => {
const cryptoWorker = await sharedCryptoWorker();
const userID = getData(LS_KEYS.USER).id;
let collectionKey: string;
if (collection.owner.id === userID) {
collectionKey = await cryptoWorker.decryptB64(
collection.encryptedKey,
collection.keyDecryptionNonce,
masterKey,
);
} else {
const keyAttributes = getData(LS_KEYS.KEY_ATTRIBUTES);
const secretKey = await cryptoWorker.decryptB64(
keyAttributes.encryptedSecretKey,
keyAttributes.secretKeyDecryptionNonce,
masterKey,
);
collectionKey = await cryptoWorker.boxSealOpen(
collection.encryptedKey,
keyAttributes.publicKey,
secretKey,
);
}
const collectionName =
collection.name ||
(await cryptoWorker.decryptToUTF8(
collection.encryptedName,
collection.nameDecryptionNonce,
collectionKey,
));
let collectionMagicMetadata: CollectionMagicMetadata;
if (collection.magicMetadata?.data) {
collectionMagicMetadata = {
...collection.magicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.magicMetadata.data,
decryptionHeaderB64: collection.magicMetadata.header,
keyB64: collectionKey,
}),
};
}
let collectionPublicMagicMetadata: CollectionPublicMagicMetadata;
if (collection.pubMagicMetadata?.data) {
collectionPublicMagicMetadata = {
...collection.pubMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.pubMagicMetadata.data,
decryptionHeaderB64: collection.pubMagicMetadata.header,
keyB64: collectionKey,
}),
};
}
let collectionShareeMagicMetadata: CollectionShareeMagicMetadata;
if (collection.sharedMagicMetadata?.data) {
collectionShareeMagicMetadata = {
...collection.sharedMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
data: await cryptoWorker.decryptMetadataJSON({
encryptedDataB64: collection.sharedMagicMetadata.data,
decryptionHeaderB64: collection.sharedMagicMetadata.header,
keyB64: collectionKey,
}),
};
}
return {
...collection,
name: collectionName,
key: collectionKey,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
magicMetadata: collectionMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
pubMagicMetadata: collectionPublicMagicMetadata,
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
sharedMagicMetadata: collectionShareeMagicMetadata,
};
};
export const getCollection = async (
collectionID: number,
): Promise<Collection> => {
try {
const token = getToken();
if (!token) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return;
}
const resp = await HTTPService.get(
await apiURL(`/collections/${collectionID}`),
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
null,
{ "X-Auth-Token": token },
);
const key = await getActualKey();
const collectionWithSecrets = await getCollectionWithSecrets(
resp.data?.collection,
key,
);
return collectionWithSecrets;
} catch (e) {
log.error("failed to get collection", e);
throw e;
}
};
const TRASH_TIME = "trash-time";
const DELETED_COLLECTION = "deleted-collection";
export async function getLocalDeletedCollections() {
const trashedCollections: Collection[] =
(await localForage.getItem<Collection[]>(DELETED_COLLECTION)) || [];
const nonUndefinedCollections = trashedCollections.filter(
(collection) => !!collection,
);
if (nonUndefinedCollections.length !== trashedCollections.length) {
await localForage.setItem(DELETED_COLLECTION, nonUndefinedCollections);
}
return nonUndefinedCollections;
}
export async function cleanTrashCollections(fileTrash: Trash) {
const trashedCollections = await getLocalDeletedCollections();
const neededTrashCollections = new Set<number>(
fileTrash.map((item) => item.file.collectionID),
);
const filterCollections = trashedCollections.filter((item) =>
neededTrashCollections.has(item.id),
);
await localForage.setItem(DELETED_COLLECTION, filterCollections);
}
async function getLastTrashSyncTime() {
return (await localForage.getItem<number>(TRASH_TIME)) ?? 0;
}
export async function syncTrash(
collections: Collection[],
setTrashedFiles: (fs: EnteFile[]) => void,
): Promise<void> {
const trash = await getLocalTrash();
collections = [...collections, ...(await getLocalDeletedCollections())];
const collectionMap = new Map<number, Collection>(
collections.map((collection) => [collection.id, collection]),
);
if (!getToken()) {
return;
}
const lastSyncTime = await getLastTrashSyncTime();
const updatedTrash = await updateTrash(
collectionMap,
lastSyncTime,
setTrashedFiles,
trash,
);
await cleanTrashCollections(updatedTrash);
}
export const updateTrash = async (
collections: Map<number, Collection>,
sinceTime: number,
setTrashedFiles: (fs: EnteFile[]) => void,
currentTrash: Trash,
): Promise<Trash> => {
try {
let updatedTrash: Trash = [...currentTrash];
let time = sinceTime;
let resp;
do {
const token = getToken();
if (!token) {
break;
}
resp = await HTTPService.get(
await apiURL("/trash/v2/diff"),
{
sinceTime: time,
},
{
"X-Auth-Token": token,
},
);
// #Perf: This can be optimized by running the decryption in parallel
for (const trashItem of resp.data.diff as EncryptedTrashItem[]) {
const collectionID = trashItem.file.collectionID;
let collection = collections.get(collectionID);
if (!collection) {
collection = await getCollection(collectionID);
collections.set(collectionID, collection);
await localForage.setItem(DELETED_COLLECTION, [
...collections.values(),
]);
}
if (!trashItem.isDeleted && !trashItem.isRestored) {
const decryptedFile = await decryptFile(
trashItem.file,
collection.key,
);
updatedTrash.push({ ...trashItem, file: decryptedFile });
} else {
updatedTrash = updatedTrash.filter(
(item) => item.file.id !== trashItem.file.id,
);
}
}
if (resp.data.diff.length) {
time = resp.data.diff.slice(-1)[0].updatedAt;
}
setTrashedFiles(getTrashedFiles(updatedTrash));
await localForage.setItem(TRASH, updatedTrash);
await localForage.setItem(TRASH_TIME, time);
} while (resp.data.hasMore);
return updatedTrash;
} catch (e) {
log.error("Get trash files failed", e);
}
return currentTrash;
};
export const emptyTrash = async () => {
try {
const token = getToken();
if (!token) {
return;
}
const lastUpdatedAt = await getLastTrashSyncTime();
await HTTPService.post(
await apiURL("/trash/empty"),
{ lastUpdatedAt },
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
null,
{
"X-Auth-Token": token,
},
);
} catch (e) {
log.error("empty trash failed", e);
throw e;
}
};
export const clearLocalTrash = async () => {
await localForage.setItem(TRASH, []);
};

View File

@@ -1,14 +1,17 @@
import { assertionFailed } from "@/base/assert";
import { newID } from "@/base/id";
import { ensureLocalUser } from "@/base/local-user";
import log from "@/base/log";
import type { EnteFile } from "@/media/file";
import { metadataHash } from "@/media/file-metadata";
import { wait } from "@/utils/promise";
import { getPublicMagicMetadataSync } from "@ente/shared/file-metadata";
import { createCollectionNameByID } from "./collection";
import {
addToCollection,
createCollectionNameByID,
moveToTrash,
} from "./collection";
import { getLocalCollections } from "./collections";
import { getLocalFiles } from "./files";
import { syncFilesAndCollections } from "./sync";
/**
* A group of duplicates as shown in the UI.
@@ -27,16 +30,24 @@ export interface DuplicateGroup {
*/
items: {
/**
* The underlying collection file.
* The underlying file to delete.
*
* This is one of the files from amongst {@link collectionFiles},
* arbitrarily picked to stand in for the entire set of files in the UI.
*/
file: EnteFile;
/**
* The IDs of the collections to which this file belongs.
* All the collection files for the underlying file.
*
* This includes {@link file} too.
*/
collectionIDs: number[];
collectionFiles: EnteFile[];
/**
* The name of the collection (or of one of them, arbitrarily picked) to
* which this file belongs.
* The name of the collection to which {@link file} belongs.
*
* Like {@link file} itself, this is an arbitrary pick. Logically, none
* of the collections to which the file belongs are given more
* preference than the other.
*/
collectionName: string;
}[];
@@ -113,9 +124,9 @@ export const deduceDuplicates = async () => {
);
// Group the filtered collection files by their hashes, keeping only one
// entry per file ID, but also retaining all the collections IDs to which
// that file belongs.
const collectionIDsByFileID = new Map<number, number[]>();
// entry per file ID. We also retain all the collections files for a
// particular file ID.
const collectionFilesByFileID = new Map<number, EnteFile[]>();
const filesByHash = new Map<string, EnteFile[]>();
for (const file of filteredCollectionFiles) {
const hash = metadataHash(file.metadata);
@@ -125,15 +136,15 @@ export const deduceDuplicates = async () => {
continue;
}
const collectionIDs = collectionIDsByFileID.get(file.id);
if (!collectionIDs) {
const collectionFiles = collectionFilesByFileID.get(file.id);
if (!collectionFiles) {
// This is the first collection file we're seeing for a particular
// file ID, so also create an entry in the filesByHash map.
filesByHash.set(hash, [...(filesByHash.get(hash) ?? []), file]);
}
collectionIDsByFileID.set(file.id, [
...(collectionIDs ?? []),
file.collectionID,
collectionFilesByFileID.set(file.id, [
...(collectionFiles ?? []),
file,
]);
}
@@ -172,15 +183,15 @@ export const deduceDuplicates = async () => {
const collectionName = collectionNameByID.get(
file.collectionID,
);
const collectionIDs = collectionIDsByFileID.get(file.id);
const collectionFiles = collectionFilesByFileID.get(file.id);
// Ignore duplicates for which we do not have a collection. This
// shouldn't really happen though, so retain an assert.
if (!collectionName || !collectionIDs) {
if (!collectionName || !collectionFiles) {
assertionFailed();
return undefined;
}
return { file, collectionIDs, collectionName };
return { file, collectionFiles, collectionName };
})
.filter((item) => !!item);
if (items.length < 2) continue;
@@ -213,70 +224,106 @@ export const deduceDuplicates = async () => {
*
* This function will only process entries for which isSelected is `true`.
*
* @param onRemoveDuplicateGroup A function that is called each time a duplicate
* group is successfully removed. The duplicate group that was removed is passed
* as an argument to it.
* @param onProgress A function that is called with an estimated progress
* percentage of the operation (a number between 0 and 100).
*
* @returns true if all selected duplicate groups were successfully removed, and
* false if there were any errors.
* @returns A set containing the IDs of the duplicate groups that were removed.
*/
export const removeSelectedDuplicateGroups = async (
duplicateGroups: DuplicateGroup[],
onRemoveDuplicateGroup: (g: DuplicateGroup) => void,
onProgress: (progress: number) => void,
) => {
const selectedDuplicateGroups = duplicateGroups.filter((g) => g.isSelected);
let allSuccess = true;
// See: "Pruning duplicates" under [Note: Deduplication logic]. A tl;dr; is
//
// 1. For each selected duplicate group, determine the file to retain.
// 2. Add these to the user owned collections the other files exist in.
// 3. Delete the other files.
//
const filesToAdd = new Map<number, EnteFile[]>();
const filesToTrash: EnteFile[] = [];
for (const duplicateGroup of selectedDuplicateGroups) {
try {
await removeDuplicateGroup(duplicateGroup);
onRemoveDuplicateGroup(duplicateGroup);
} catch (e) {
log.warn("Failed to remove duplicate group", e);
allSuccess = false;
const retainedItem = duplicateGroupItemToRetain(duplicateGroup);
// Find the existing collection IDs to which this item already belongs.
const existingCollectionIDs = new Set(
retainedItem.collectionFiles.map((cf) => cf.collectionID),
);
// For each item, find all the collections to which any of the files
// (except the file we're retaining) belongs.
const collectionIDs = new Set<number>();
for (const item of duplicateGroup.items) {
// Skip the item we're retaining.
if (item.file.id == retainedItem.file.id) continue;
// Determine the collections to which any of the item's files belong.
for (const { collectionID } of item.collectionFiles) {
if (!existingCollectionIDs.has(collectionID))
collectionIDs.add(collectionID);
}
// Move the item's file to trash.
filesToTrash.push(item.file);
}
// Add the file we're retaining to these (uniqued) collections.
for (const collectionID of collectionIDs) {
filesToAdd.set(collectionID, [
...(filesToAdd.get(collectionID) ?? []),
retainedItem.file,
]);
}
}
return allSuccess;
let np = 0;
const ntotal = filesToAdd.size + filesToTrash.length ? 1 : 0 + /* sync */ 1;
const tickProgress = () => onProgress((np++ / ntotal) * 100);
// Process the adds.
const collections = await getLocalCollections("normal");
const collectionsByID = new Map(collections.map((c) => [c.id, c]));
for (const [collectionID, collectionFiles] of filesToAdd.entries()) {
await addToCollection(
collectionsByID.get(collectionID)!,
collectionFiles,
);
tickProgress();
}
// Process the removes.
if (filesToTrash.length) {
await moveToTrash(filesToTrash);
tickProgress();
}
await syncFilesAndCollections();
tickProgress();
return new Set(selectedDuplicateGroups.map((g) => g.id));
};
/**
* Retain only file from amongst these duplicates whilst keeping the existing
* collection entries intact.
*
* See: "Pruning duplicates" under [Note: Deduplication logic]. To summarize:
* 1. Find the file to retain.
* 2. Add it to the user owned collections the other files exist in.
* 3. Delete the other files.
*/
const removeDuplicateGroup = async (duplicateGroup: DuplicateGroup) => {
const fileToRetain = duplicateGroupFileToRetain(duplicateGroup);
console.log({ fileToRetain });
// const collections;
// TODO: Remove me after testing the UI
await wait(1000);
};
/**
* Find the most eligible file from amongst the duplicates to retain.
* Find the most eligible item from amongst the duplicates to retain.
*
* Give preference to files which have a caption or edited name or edited time,
* otherwise pick arbitrarily.
*/
const duplicateGroupFileToRetain = (duplicateGroup: DuplicateGroup) => {
const filesWithCaption: EnteFile[] = [];
const filesWithOtherEdits: EnteFile[] = [];
for (const { file } of duplicateGroup.items) {
const pubMM = getPublicMagicMetadataSync(file);
const duplicateGroupItemToRetain = (duplicateGroup: DuplicateGroup) => {
const itemsWithCaption: DuplicateGroup["items"] = [];
const itemsWithOtherEdits: DuplicateGroup["items"] = [];
for (const item of duplicateGroup.items) {
const pubMM = getPublicMagicMetadataSync(item.file);
if (!pubMM) continue;
if (pubMM.caption) filesWithCaption.push(file);
if (pubMM.caption) itemsWithCaption.push(item);
if (pubMM.editedName ?? pubMM.editedTime)
filesWithOtherEdits.push(file);
itemsWithOtherEdits.push(item);
}
// Duplicate group items should not be empty, so we'll get something always.
return (
filesWithCaption[0] ??
filesWithOtherEdits[0] ??
duplicateGroup.items[0]!.file
itemsWithCaption[0] ??
itemsWithOtherEdits[0] ??
duplicateGroup.items[0]!
);
};

View File

@@ -1,7 +1,22 @@
import { blobCache } from "@/base/blob-cache";
import { mergeMetadata, type EnteFile, type Trash } from "@/media/file";
import log from "@/base/log";
import { apiURL } from "@/base/origins";
import type { Collection } from "@/media/collection";
import {
decryptFile,
mergeMetadata,
type EncryptedEnteFile,
type EnteFile,
type Trash,
} from "@/media/file";
import { metadataHash } from "@/media/file-metadata";
import HTTPService from "@ente/shared/network/HTTPService";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import {
getCollectionLastSyncTime,
setCollectionLastSyncTime,
} from "./collections";
const FILES_TABLE = "files";
const HIDDEN_FILES_TABLE = "hidden-files";
@@ -37,6 +52,118 @@ export const setLocalFiles = async (
await localForage.setItem(tableName, files);
};
/**
* Fetch all files of the given {@link type}, belonging to the given
* {@link collections}, from remote and update our local database.
*
* If this is the initial read, or if the count of files we have differs from
* the state of the local database (these two are expected to be the same case),
* then the {@link onResetFiles} callback is invoked to give the caller a chance
* to bring its state up to speed.
*
* In addition to updating the local database, it also calls the provided
* {@link onFetchFiles} callback with the latest decrypted files after each
* batch the new and/or updated files are received from remote.
*
* @returns true if one or more files were updated locally, false otherwise.
*/
export const syncFiles = async (
type: "normal" | "hidden",
collections: Collection[],
onResetFiles: (fs: EnteFile[]) => void,
onFetchFiles: (fs: EnteFile[]) => void,
) => {
const localFiles = await getLocalFiles(type);
let files = removeDeletedCollectionFiles(collections, localFiles);
let didUpdateFiles = false;
if (files.length !== localFiles.length) {
await setLocalFiles(type, files);
onResetFiles(files);
didUpdateFiles = true;
}
for (const collection of collections) {
if (!getToken()) {
continue;
}
const lastSyncTime = await getCollectionLastSyncTime(collection);
if (collection.updationTime === lastSyncTime) {
continue;
}
const newFiles = await getFiles(collection, lastSyncTime, onFetchFiles);
await clearCachedThumbnailsIfChanged(localFiles, newFiles);
files = getLatestVersionFiles([...files, ...newFiles]);
await setLocalFiles(type, files);
didUpdateFiles = true;
await setCollectionLastSyncTime(collection, collection.updationTime);
}
return didUpdateFiles;
};
export const getFiles = async (
collection: Collection,
sinceTime: number,
onFetchFiles: (fs: EnteFile[]) => void,
): Promise<EnteFile[]> => {
try {
let decryptedFiles: EnteFile[] = [];
let time = sinceTime;
let resp;
do {
const token = getToken();
if (!token) {
break;
}
resp = await HTTPService.get(
await apiURL("/collections/v2/diff"),
{
collectionID: collection.id,
sinceTime: time,
},
{
"X-Auth-Token": token,
},
);
const newDecryptedFilesBatch = await Promise.all(
// eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
resp.data.diff.map(async (file: EncryptedEnteFile) => {
if (!file.isDeleted) {
return await decryptFile(file, collection.key);
} else {
return file;
}
}) as Promise<EnteFile>[],
);
decryptedFiles = [...decryptedFiles, ...newDecryptedFilesBatch];
onFetchFiles(decryptedFiles);
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (resp.data.diff.length) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
time = resp.data.diff.slice(-1)[0].updationTime;
}
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
} while (resp.data.hasMore);
return decryptedFiles;
} catch (e) {
log.error("Get files failed", e);
throw e;
}
};
const removeDeletedCollectionFiles = (
collections: Collection[],
files: EnteFile[],
) => {
const syncedCollectionIds = new Set<number>();
for (const collection of collections) {
syncedCollectionIds.add(collection.id);
}
files = files.filter((file) => syncedCollectionIds.has(file.collectionID));
return files;
};
/**
* Sort the given list of {@link EnteFile}s in place.
*

View File

@@ -0,0 +1,78 @@
/* eslint-disable @typescript-eslint/no-empty-function */
import { isHiddenCollection } from "@/new/photos/services/collection";
import {
getAllLatestCollections,
syncTrash,
} from "@/new/photos/services/collections";
import { syncFiles } from "@/new/photos/services/files";
import { isMLSupported, mlStatusSync, mlSync } from "@/new/photos/services/ml";
import { searchDataSync } from "@/new/photos/services/search";
import { syncSettings } from "@/new/photos/services/settings";
import { splitByPredicate } from "@/utils/array";
/**
* Part 1 of {@link sync}. See TODO below for why this is split.
*/
export const preCollectionsAndFilesSync = async () => {
await Promise.all([syncSettings(), isMLSupported && mlStatusSync()]);
};
/**
* Sync our local state with remote on page load for web and focus for desktop.
*
* This function makes various API calls to fetch state from remote, using it to
* update our local state, and triggering periodic jobs that depend on the local
* state.
*
* This runs on initial page load (on both web and desktop). In addition for
* desktop, it also runs each time the desktop app gains focus.
*
* TODO: This is called after we've synced the local files DBs with remote. That
* code belongs here, but currently that state is persisted in the top level
* gallery React component.
*
* So meanwhile we've split this sync into this method, which is called after
* the file info has been synced (which can take a few minutes for large
* libraries after initial login), and the `preFileInfoSync`, which is called
* before doing the file sync and thus should run immediately after login.
*/
export const sync = async () => {
await Promise.all([searchDataSync()]);
// ML sync might take a very long time for initial indexing, so don't wait
// for it to finish.
void mlSync();
};
/**
* Sync our local file and collection state with remote.
*
* This is a subset of {@link sync}, independently exposed for use at times when
* we only want to sync collections and files (e.g. we just made some API
* request that modified collections or files, and so now want to sync our local
* changes to match remote).
*
* A bespoke version of this in currently used by the gallery component when it
* syncs - it needs a broken down, bespoke version because it also keeps local
* state variables that need to be updated with the various callbacks that we
* ignore in this version.
*/
export const syncFilesAndCollections = async () => {
const allCollections = await getAllLatestCollections();
const [hiddenCollections, normalCollections] = splitByPredicate(
allCollections,
isHiddenCollection,
);
await syncFiles(
"normal",
normalCollections,
() => {},
() => {},
);
await syncFiles(
"hidden",
hiddenCollections,
() => {},
() => {},
);
await syncTrash(allCollections, () => {});
};