regen lower

This commit is contained in:
Manav Rathi
2024-07-04 13:13:44 +05:30
parent eb7611a6d1
commit 1ab844da36
3 changed files with 81 additions and 46 deletions

View File

@@ -0,0 +1,47 @@
import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import { ensure } from "@/utils/ensure";
import type { EnteFile } from "../../types/file";
import { getRenderableImage } from "../../utils/file";
import DownloadManager from "../download";
/**
* Return a "renderable" image blob, using {@link file} if present otherwise
* downloading the source image corresponding to {@link enteFile} from remote.
*
* For videos their thumbnail is used.
*/
export const renderableImageBlob = async (
enteFile: EnteFile,
file?: File | undefined,
) => {
const fileType = enteFile.metadata.fileType;
if (fileType == FILE_TYPE.VIDEO) {
const thumbnailData = await DownloadManager.getThumbnail(enteFile);
return new Blob([ensure(thumbnailData)]);
} else {
return ensure(
file
? await getRenderableImage(enteFile.metadata.title, file)
: await fetchRenderableBlob(enteFile),
);
}
};
const fetchRenderableBlob = async (enteFile: EnteFile) => {
const fileStream = await DownloadManager.getFile(enteFile);
const fileBlob = await new Response(fileStream).blob();
const fileType = enteFile.metadata.fileType;
if (fileType == FILE_TYPE.IMAGE) {
return getRenderableImage(enteFile.metadata.title, fileBlob);
} else if (fileType == FILE_TYPE.LIVE_PHOTO) {
const { imageFileName, imageData } = await decodeLivePhoto(
enteFile.metadata.title,
fileBlob,
);
return getRenderableImage(imageFileName, new Blob([imageData]));
} else {
// A layer above us should've already filtered these out.
throw new Error(`Cannot index unsupported file type ${fileType}`);
}
};

View File

@@ -1,8 +1,41 @@
import { blobCache } from "@/next/blob-cache";
import { ensure } from "@/utils/ensure";
import type { EnteFile } from "../../types/file";
import { renderableImageBlob } from "./blob";
import { type Box, type FaceIndex } from "./face";
import { clamp } from "./image";
/**
* Regenerate and locally save face crops for faces in the given file.
*
* Face crops (the rectangular regions of the original image where a particular
* face was detected) are not stored on remote and are generated on demand. On
* the client where the indexing occurred, they get generated during the face
* indexing pipeline itself. But we need to regenerate them locally if the user
* views that item on any other client.
*
* @param enteFile The {@link EnteFile} whose face crops we want to generate.
*
* @param faceIndex The {@link FaceIndex} containing information about the faces
* detected in the given image.
*
* The generated face crops are saved in a local cache and can subsequently be
* retrieved from the {@link BlobCache} named "face-crops".
*/
export const regenerateFaceCrops = async (
enteFile: EnteFile,
faceIndex: FaceIndex,
) => {
const imageBitmap =
await renderableImageBlob(enteFile).then(createImageBitmap);
try {
await saveFaceCrops(imageBitmap, faceIndex);
} finally {
imageBitmap.close();
}
};
/**
* Extract and locally save the face crops (the rectangle of the original image
* that contain the detected face) for each of the faces detected in an image.

View File

@@ -7,12 +7,7 @@
//
/* eslint-disable @typescript-eslint/no-non-null-assertion */
import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import DownloadManager from "@/new/photos/services/download";
import type { EnteFile } from "@/new/photos/types/file";
import { getRenderableImage } from "@/new/photos/utils/file";
import log from "@/next/log";
import { workerBridge } from "@/next/worker/worker-bridge";
import { ensure } from "@/utils/ensure";
@@ -25,6 +20,7 @@ import {
translate,
type Matrix as TransformationMatrix,
} from "transformation-matrix";
import { renderableImageBlob } from "./blob";
import { saveFaceCrops } from "./crop";
import {
clamp,
@@ -259,47 +255,6 @@ export const indexFaces = async (
}
};
/**
* Return a "renderable" image blob, using {@link file} if present otherwise
* downloading the source image corresponding to {@link enteFile} from remote.
*
* For videos their thumbnail is used.
*/
const renderableImageBlob = async (
enteFile: EnteFile,
file: File | undefined,
) => {
const fileType = enteFile.metadata.fileType;
if (fileType == FILE_TYPE.VIDEO) {
const thumbnailData = await DownloadManager.getThumbnail(enteFile);
return new Blob([ensure(thumbnailData)]);
} else {
return ensure(
file
? await getRenderableImage(enteFile.metadata.title, file)
: await fetchRenderableBlob(enteFile),
);
}
};
const fetchRenderableBlob = async (enteFile: EnteFile) => {
const fileStream = await DownloadManager.getFile(enteFile);
const fileBlob = await new Response(fileStream).blob();
const fileType = enteFile.metadata.fileType;
if (fileType == FILE_TYPE.IMAGE) {
return getRenderableImage(enteFile.metadata.title, fileBlob);
} else if (fileType == FILE_TYPE.LIVE_PHOTO) {
const { imageFileName, imageData } = await decodeLivePhoto(
enteFile.metadata.title,
fileBlob,
);
return getRenderableImage(imageFileName, new Blob([imageData]));
} else {
// A layer above us should've already filtered these out.
throw new Error(`Cannot index unsupported file type ${fileType}`);
}
};
const indexFacesInBitmap = async (
fileID: number,
imageBitmap: ImageBitmap,