Integrate
This commit is contained in:
@@ -16,6 +16,7 @@ import { ensure } from "@/utils/ensure";
|
||||
import { throttled, wait } from "@/utils/promise";
|
||||
import { proxy, transfer } from "comlink";
|
||||
import { isInternalUser } from "../feature-flags";
|
||||
import { getAllLocalFiles } from "../files";
|
||||
import { getRemoteFlag, updateRemoteFlag } from "../remote-store";
|
||||
import type { SearchPerson } from "../search/types";
|
||||
import type { UploadItem } from "../upload/types";
|
||||
@@ -29,7 +30,6 @@ import {
|
||||
} from "./db";
|
||||
import { MLWorker } from "./worker";
|
||||
import type { CLIPMatches } from "./worker-types";
|
||||
import { getAllLocalFiles } from "../files";
|
||||
|
||||
/**
|
||||
* Internal state of the ML subsystem.
|
||||
@@ -353,12 +353,10 @@ export const wipCluster = async () => {
|
||||
_wip_searchPersons = undefined;
|
||||
|
||||
const { clusters, cgroups } = await clusterFaces(await faceIndexes());
|
||||
const clusterByID = new Map(
|
||||
clusters.map((cluster) => [cluster.id, cluster]),
|
||||
);
|
||||
const clusterByID = new Map(clusters.map((c) => [c.id, c]));
|
||||
|
||||
const localFiles = await getAllLocalFiles();
|
||||
const localFilesByID = new Map(localFiles.map((f) => [f.id, f]));
|
||||
const localFileByID = new Map(localFiles.map((f) => [f.id, f]));
|
||||
|
||||
const result: SearchPerson[] = [];
|
||||
for (const cgroup of cgroups) {
|
||||
@@ -373,7 +371,7 @@ export const wipCluster = async () => {
|
||||
}
|
||||
cgroup.clusterIDs;
|
||||
const avatarFaceFileID = fileIDFromFaceID(avatarFaceID);
|
||||
const avatarFaceFile = localFilesByID.get(avatarFaceFileID ?? 0);
|
||||
const avatarFaceFile = localFileByID.get(avatarFaceFileID ?? 0);
|
||||
if (!avatarFaceFileID || !avatarFaceFile) {
|
||||
assertionFailed(`Face ID ${avatarFaceID} without local file`);
|
||||
continue;
|
||||
@@ -392,7 +390,9 @@ export const wipCluster = async () => {
|
||||
});
|
||||
}
|
||||
|
||||
const searchPersons = result.sort((a, b) => b.files.length - a.files.length);
|
||||
const searchPersons = result.sort(
|
||||
(a, b) => b.files.length - a.files.length,
|
||||
);
|
||||
|
||||
_wip_isClustering = false;
|
||||
_wip_searchPersons = searchPersons;
|
||||
|
||||
@@ -246,14 +246,14 @@ export class MLWorker {
|
||||
private async backfillQ() {
|
||||
const userID = ensure(await getKVN("userID"));
|
||||
// Find files that our local DB thinks need syncing.
|
||||
const filesByID = await syncWithLocalFilesAndGetFilesToIndex(
|
||||
const fileByID = await syncWithLocalFilesAndGetFilesToIndex(
|
||||
userID,
|
||||
200,
|
||||
);
|
||||
if (!filesByID.size) return [];
|
||||
if (!fileByID.size) return [];
|
||||
|
||||
// Fetch their existing ML data (if any).
|
||||
const mlDataByID = await fetchMLData(filesByID);
|
||||
const mlDataByID = await fetchMLData(fileByID);
|
||||
|
||||
// If the number of files for which remote gave us data is more than 50%
|
||||
// of what we asked of it, assume we are "fetching", not "indexing".
|
||||
@@ -263,10 +263,10 @@ export class MLWorker {
|
||||
if (this.state != "indexing" && this.state != "fetching")
|
||||
assertionFailed(`Unexpected state ${this.state}`);
|
||||
this.state =
|
||||
mlDataByID.size * 2 > filesByID.size ? "fetching" : "indexing";
|
||||
mlDataByID.size * 2 > fileByID.size ? "fetching" : "indexing";
|
||||
|
||||
// Return files after annotating them with their existing ML data.
|
||||
return Array.from(filesByID, ([id, file]) => ({
|
||||
return Array.from(fileByID, ([id, file]) => ({
|
||||
enteFile: file,
|
||||
uploadItem: undefined,
|
||||
remoteMLData: mlDataByID.get(id),
|
||||
@@ -364,20 +364,20 @@ const syncWithLocalFilesAndGetFilesToIndex = async (
|
||||
const isIndexable = (f: EnteFile) => f.ownerID == userID;
|
||||
|
||||
const localFiles = await getAllLocalFiles();
|
||||
const localFilesByID = new Map(
|
||||
const localFileByID = new Map(
|
||||
localFiles.filter(isIndexable).map((f) => [f.id, f]),
|
||||
);
|
||||
|
||||
const localTrashFileIDs = (await getLocalTrashedFiles()).map((f) => f.id);
|
||||
|
||||
await updateAssumingLocalFiles(
|
||||
Array.from(localFilesByID.keys()),
|
||||
Array.from(localFileByID.keys()),
|
||||
localTrashFileIDs,
|
||||
);
|
||||
|
||||
const fileIDsToIndex = await indexableFileIDs(count);
|
||||
return new Map(
|
||||
fileIDsToIndex.map((id) => [id, ensure(localFilesByID.get(id))]),
|
||||
fileIDsToIndex.map((id) => [id, ensure(localFileByID.get(id))]),
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user