From 1fcc425779d8a1bf83f94f8dddbf45cb41fc8ae8 Mon Sep 17 00:00:00 2001 From: Manav Rathi Date: Tue, 27 Aug 2024 18:31:07 +0530 Subject: [PATCH] Integrate --- web/packages/new/photos/services/ml/index.ts | 14 +++++++------- web/packages/new/photos/services/ml/worker.ts | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/web/packages/new/photos/services/ml/index.ts b/web/packages/new/photos/services/ml/index.ts index 192f544dd9..6ad354a25a 100644 --- a/web/packages/new/photos/services/ml/index.ts +++ b/web/packages/new/photos/services/ml/index.ts @@ -16,6 +16,7 @@ import { ensure } from "@/utils/ensure"; import { throttled, wait } from "@/utils/promise"; import { proxy, transfer } from "comlink"; import { isInternalUser } from "../feature-flags"; +import { getAllLocalFiles } from "../files"; import { getRemoteFlag, updateRemoteFlag } from "../remote-store"; import type { SearchPerson } from "../search/types"; import type { UploadItem } from "../upload/types"; @@ -29,7 +30,6 @@ import { } from "./db"; import { MLWorker } from "./worker"; import type { CLIPMatches } from "./worker-types"; -import { getAllLocalFiles } from "../files"; /** * Internal state of the ML subsystem. @@ -353,12 +353,10 @@ export const wipCluster = async () => { _wip_searchPersons = undefined; const { clusters, cgroups } = await clusterFaces(await faceIndexes()); - const clusterByID = new Map( - clusters.map((cluster) => [cluster.id, cluster]), - ); + const clusterByID = new Map(clusters.map((c) => [c.id, c])); const localFiles = await getAllLocalFiles(); - const localFilesByID = new Map(localFiles.map((f) => [f.id, f])); + const localFileByID = new Map(localFiles.map((f) => [f.id, f])); const result: SearchPerson[] = []; for (const cgroup of cgroups) { @@ -373,7 +371,7 @@ export const wipCluster = async () => { } cgroup.clusterIDs; const avatarFaceFileID = fileIDFromFaceID(avatarFaceID); - const avatarFaceFile = localFilesByID.get(avatarFaceFileID ?? 0); + const avatarFaceFile = localFileByID.get(avatarFaceFileID ?? 0); if (!avatarFaceFileID || !avatarFaceFile) { assertionFailed(`Face ID ${avatarFaceID} without local file`); continue; @@ -392,7 +390,9 @@ export const wipCluster = async () => { }); } - const searchPersons = result.sort((a, b) => b.files.length - a.files.length); + const searchPersons = result.sort( + (a, b) => b.files.length - a.files.length, + ); _wip_isClustering = false; _wip_searchPersons = searchPersons; diff --git a/web/packages/new/photos/services/ml/worker.ts b/web/packages/new/photos/services/ml/worker.ts index fdcb1ac595..f21f58d85a 100644 --- a/web/packages/new/photos/services/ml/worker.ts +++ b/web/packages/new/photos/services/ml/worker.ts @@ -246,14 +246,14 @@ export class MLWorker { private async backfillQ() { const userID = ensure(await getKVN("userID")); // Find files that our local DB thinks need syncing. - const filesByID = await syncWithLocalFilesAndGetFilesToIndex( + const fileByID = await syncWithLocalFilesAndGetFilesToIndex( userID, 200, ); - if (!filesByID.size) return []; + if (!fileByID.size) return []; // Fetch their existing ML data (if any). - const mlDataByID = await fetchMLData(filesByID); + const mlDataByID = await fetchMLData(fileByID); // If the number of files for which remote gave us data is more than 50% // of what we asked of it, assume we are "fetching", not "indexing". @@ -263,10 +263,10 @@ export class MLWorker { if (this.state != "indexing" && this.state != "fetching") assertionFailed(`Unexpected state ${this.state}`); this.state = - mlDataByID.size * 2 > filesByID.size ? "fetching" : "indexing"; + mlDataByID.size * 2 > fileByID.size ? "fetching" : "indexing"; // Return files after annotating them with their existing ML data. - return Array.from(filesByID, ([id, file]) => ({ + return Array.from(fileByID, ([id, file]) => ({ enteFile: file, uploadItem: undefined, remoteMLData: mlDataByID.get(id), @@ -364,20 +364,20 @@ const syncWithLocalFilesAndGetFilesToIndex = async ( const isIndexable = (f: EnteFile) => f.ownerID == userID; const localFiles = await getAllLocalFiles(); - const localFilesByID = new Map( + const localFileByID = new Map( localFiles.filter(isIndexable).map((f) => [f.id, f]), ); const localTrashFileIDs = (await getLocalTrashedFiles()).map((f) => f.id); await updateAssumingLocalFiles( - Array.from(localFilesByID.keys()), + Array.from(localFileByID.keys()), localTrashFileIDs, ); const fileIDsToIndex = await indexableFileIDs(count); return new Map( - fileIDsToIndex.map((id) => [id, ensure(localFilesByID.get(id))]), + fileIDsToIndex.map((id) => [id, ensure(localFileByID.get(id))]), ); };