diff --git a/web/apps/photos/src/services/machineLearning/machineLearningService.ts b/web/apps/photos/src/services/machineLearning/machineLearningService.ts index ea180d6fdf..4ac17dbb8c 100644 --- a/web/apps/photos/src/services/machineLearning/machineLearningService.ts +++ b/web/apps/photos/src/services/machineLearning/machineLearningService.ts @@ -19,7 +19,6 @@ import mlIDbStorage, { import { BlurDetectionMethod, BlurDetectionService, - ClipEmbedding, ClusteringMethod, ClusteringService, Face, @@ -761,12 +760,10 @@ class ServerFileMl { public height?: number; public width?: number; public faceEmbedding: ServerFaceEmbeddings; - public clipEmbedding?: ClipEmbedding; public constructor( fileID: number, faceEmbedding: ServerFaceEmbeddings, - clipEmbedding?: ClipEmbedding, height?: number, width?: number, ) { @@ -774,7 +771,6 @@ class ServerFileMl { this.height = height; this.width = width; this.faceEmbedding = faceEmbedding; - this.clipEmbedding = clipEmbedding; } } @@ -895,7 +891,6 @@ function LocalFileMlDataToServerFileMl( return new ServerFileMl( localFileMlData.fileId, faceEmbeddings, - undefined, imageDimensions.height, imageDimensions.width, ); diff --git a/web/apps/photos/src/services/ml/types.ts b/web/apps/photos/src/services/ml/types.ts index 2ee42b36a3..422cf9d4aa 100644 --- a/web/apps/photos/src/services/ml/types.ts +++ b/web/apps/photos/src/services/ml/types.ts @@ -329,8 +329,3 @@ export interface MachineLearningWorker { close(): void; } - -export interface ClipEmbedding { - embedding: Float32Array; - model: "ggml-clip" | "onnx-clip"; -} diff --git a/web/apps/photos/src/types/embedding.tsx b/web/apps/photos/src/types/embedding.tsx index d4719986bc..161244c159 100644 --- a/web/apps/photos/src/types/embedding.tsx +++ b/web/apps/photos/src/types/embedding.tsx @@ -1,9 +1,9 @@ /** - * The embeddings models that we support. + * The embeddings that we (the current client) knows how to handle. * * This is an exhaustive set of values we pass when PUT-ting encrypted * embeddings on the server. However, we should be prepared to receive an - * {@link EncryptedEmbedding} with a model value distinct from one of these. + * {@link EncryptedEmbedding} with a model value different from these. */ export type EmbeddingModel = "onnx-clip" | "file-ml-clip-face";