[web] Handle video durations - WIP (#5910)

The change is complete, but currently only enabled for internal users.
This commit is contained in:
Manav Rathi
2025-05-14 19:57:39 +05:30
committed by GitHub
15 changed files with 425 additions and 123 deletions

View File

@@ -32,7 +32,7 @@ import {
openLogDirectory,
selectDirectory,
} from "./services/dir";
import { ffmpegExec } from "./services/ffmpeg";
import { ffmpegDetermineVideoDuration, ffmpegExec } from "./services/ffmpeg";
import {
fsExists,
fsFindFiles,
@@ -182,10 +182,10 @@ export const attachIPCHandlers = () => {
"generateImageThumbnail",
(
_,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
maxDimension: number,
maxSize: number,
) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize),
) => generateImageThumbnail(pathOrZipItem, maxDimension, maxSize),
);
ipcMain.handle(
@@ -193,9 +193,15 @@ export const attachIPCHandlers = () => {
(
_,
command: FFmpegCommand,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
outputFileExtension: string,
) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension),
) => ffmpegExec(command, pathOrZipItem, outputFileExtension),
);
ipcMain.handle(
"ffmpegDetermineVideoDuration",
(_, pathOrZipItem: string | ZipItem) =>
ffmpegDetermineVideoDuration(pathOrZipItem),
);
// - Upload

View File

@@ -45,6 +45,8 @@ export interface FFmpegUtilityProcess {
outputPathPrefix: string,
outputUploadURL: string,
) => Promise<FFmpegGenerateHLSPlaylistAndSegmentsResult | undefined>;
ffmpegDetermineVideoDuration: (inputFilePath: string) => Promise<number>;
}
log.debugString("Started ffmpeg utility process");
@@ -57,6 +59,7 @@ process.parentPort.once("message", (e) => {
ffmpegExec,
ffmpegConvertToMP4,
ffmpegGenerateHLSPlaylistAndSegments,
ffmpegDetermineVideoDuration,
} satisfies FFmpegUtilityProcess,
messagePortMainEndpoint(e.ports[0]!),
);
@@ -548,6 +551,7 @@ interface VideoCharacteristics {
isBT709: boolean;
bitrate: number | undefined;
}
/**
* Heuristically determine information about the video at the given
* {@link inputFilePath}:
@@ -821,3 +825,59 @@ const uploadVideoSegments = async (
}
}
};
/**
* A regex that matches the first line of the form
*
* Duration: 00:00:03.13, start: 0.000000, bitrate: 16088 kb/s
*
* The part after Duration: and until the first non-digit or colon is the first
* capture group, while after the dot is an optional second capture group.
*/
const videoDurationLineRegex = /\s\sDuration: ([0-9:]+)(.[0-9]+)?/;
/**
* Determine the duration of the video at the given {@link inputFilePath}.
*
* While the detection works for all known cases, it is still heuristic because
* it uses ffmpeg output instead of ffprobe (which we don't have access to).
* See: [Note: Parsing CLI output might break on ffmpeg updates].
*/
export const ffmpegDetermineVideoDuration = async (inputFilePath: string) => {
const videoInfo = await pseudoFFProbeVideo(inputFilePath);
const matches = videoDurationLineRegex.exec(videoInfo);
const fail = () => {
throw new Error(`Cannot parse video duration '${matches?.at(0)}'`);
};
// The HH:mm:ss.
const ints = (matches?.at(1) ?? "")
.split(":")
.map((s) => parseInt(s, 10) || 0);
let [h, m, s] = [0, 0, 0];
switch (ints.length) {
case 1:
s = ints[0]!;
break;
case 2:
m = ints[0]!;
s = ints[1]!;
break;
case 3:
h = ints[0]!;
m = ints[1]!;
s = ints[2]!;
break;
default:
fail();
}
// Optional subseconds.
const ss = parseFloat(`0${matches?.at(2) ?? ""}`);
// Follow the same round up behaviour that the web side uses.
const duration = Math.ceil(h * 3600 + m * 60 + s + ss);
if (!duration) fail();
return duration;
};

View File

@@ -8,7 +8,7 @@ import fs from "node:fs/promises";
import type { FFmpegCommand, ZipItem } from "../../types/ipc";
import {
deleteTempFileIgnoringErrors,
makeFileForDataOrStreamOrPathOrZipItem,
makeFileForStreamOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
import type { FFmpegUtilityProcess } from "./ffmpeg-worker";
@@ -29,27 +29,49 @@ export const ffmpegUtilityProcess = () =>
*/
export const ffmpegExec = async (
command: FFmpegCommand,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
outputFileExtension: string,
): Promise<Uint8Array> => {
): Promise<Uint8Array> =>
withInputFile(pathOrZipItem, async (worker, inputFilePath) => {
const outputFilePath = await makeTempFilePath(outputFileExtension);
try {
await worker.ffmpegExec(command, inputFilePath, outputFilePath);
return await fs.readFile(outputFilePath);
} finally {
await deleteTempFileIgnoringErrors(outputFilePath);
}
});
export const withInputFile = async <T>(
pathOrZipItem: string | ZipItem,
f: (worker: FFmpegUtilityProcess, inputFilePath: string) => Promise<T>,
): Promise<T> => {
const worker = await ffmpegUtilityProcess();
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrStreamOrPathOrZipItem(dataOrPathOrZipItem);
} = await makeFileForStreamOrPathOrZipItem(pathOrZipItem);
const outputFilePath = await makeTempFilePath(outputFileExtension);
try {
await writeToTemporaryInputFile();
await worker.ffmpegExec(command, inputFilePath, outputFilePath);
return await fs.readFile(outputFilePath);
return await f(worker, inputFilePath);
} finally {
if (isInputFileTemporary)
await deleteTempFileIgnoringErrors(inputFilePath);
await deleteTempFileIgnoringErrors(outputFilePath);
}
};
/**
* Implement the IPC "ffmpegDetermineVideoDuration" contract, writing the input
* to temporary files as needed, and then forward to the
* {@link ffmpegDetermineVideoDuration} running in the utility process.
*/
export const ffmpegDetermineVideoDuration = async (
pathOrZipItem: string | ZipItem,
): Promise<number> =>
withInputFile(pathOrZipItem, async (worker, inputFilePath) =>
worker.ffmpegDetermineVideoDuration(inputFilePath),
);

View File

@@ -6,7 +6,7 @@ import { type ZipItem } from "../../types/ipc";
import { execAsync, isDev } from "../utils/electron";
import {
deleteTempFileIgnoringErrors,
makeFileForDataOrStreamOrPathOrZipItem,
makeFileForStreamOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
@@ -61,7 +61,7 @@ const vipsPath = () =>
);
export const generateImageThumbnail = async (
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> => {
@@ -69,7 +69,7 @@ export const generateImageThumbnail = async (
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrStreamOrPathOrZipItem(dataOrPathOrZipItem);
} = await makeFileForStreamOrPathOrZipItem(pathOrZipItem);
const outputFilePath = await makeTempFilePath("jpeg");

View File

@@ -14,7 +14,7 @@ import { writeStream } from "./utils/stream";
import {
deleteTempFile,
deleteTempFileIgnoringErrors,
makeFileForDataOrStreamOrPathOrZipItem,
makeFileForStreamOrPathOrZipItem,
makeTempFilePath,
} from "./utils/temp";
@@ -292,7 +292,7 @@ const handleGenerateHLSWrite = async (
const objectUploadURL = params.get("objectUploadURL");
if (!objectUploadURL) throw new Error("Missing objectUploadURL");
let inputItem: Parameters<typeof makeFileForDataOrStreamOrPathOrZipItem>[0];
let inputItem: Parameters<typeof makeFileForStreamOrPathOrZipItem>[0];
const path = params.get("path");
if (path) {
inputItem = path;
@@ -314,7 +314,7 @@ const handleGenerateHLSWrite = async (
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrStreamOrPathOrZipItem(inputItem);
} = await makeFileForStreamOrPathOrZipItem(inputItem);
const outputFilePathPrefix = await makeTempFilePath();
let result: FFmpegGenerateHLSPlaylistAndSegmentsResult | undefined;

View File

@@ -80,8 +80,8 @@ export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => {
}
};
/** The result of {@link makeFileForDataOrStreamOrPathOrZipItem}. */
interface FileForDataOrPathOrZipItem {
/** The result of {@link makeFileForStreamOrPathOrZipItem}. */
interface FileForStreamOrPathOrZipItem {
/**
* The path to the file (possibly temporary).
*/
@@ -107,13 +107,13 @@ interface FileForDataOrPathOrZipItem {
* that needs to be deleted after processing, and a function to write the given
* {@link item} into that temporary file if needed.
*
* @param item The contents of the file (bytes), or a {@link ReadableStream}
* with the contents of the file, or the path to an existing file, or a (path to
* a zip file, name of an entry within that zip file) tuple.
* @param item A {@link ReadableStream} with the contents of the file, or the
* path to an existing file, or a (path to a zip file, name of an entry within
* that zip file) tuple.
*/
export const makeFileForDataOrStreamOrPathOrZipItem = async (
item: Uint8Array | ReadableStream | string | ZipItem,
): Promise<FileForDataOrPathOrZipItem> => {
export const makeFileForStreamOrPathOrZipItem = async (
item: ReadableStream | string | ZipItem,
): Promise<FileForStreamOrPathOrZipItem> => {
let path: string;
let isFileTemporary: boolean;
let writeToTemporaryFile = async () => {
@@ -126,9 +126,7 @@ export const makeFileForDataOrStreamOrPathOrZipItem = async (
} else {
path = await makeTempFilePath();
isFileTemporary = true;
if (item instanceof Uint8Array) {
writeToTemporaryFile = () => fs.writeFile(path, item);
} else if (item instanceof ReadableStream) {
if (item instanceof ReadableStream) {
writeToTemporaryFile = () => writeStream(path, item);
} else {
writeToTemporaryFile = async () => {

View File

@@ -193,29 +193,32 @@ const convertToJPEG = (imageData: Uint8Array) =>
ipcRenderer.invoke("convertToJPEG", imageData);
const generateImageThumbnail = (
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
maxDimension: number,
maxSize: number,
) =>
ipcRenderer.invoke(
"generateImageThumbnail",
dataOrPathOrZipItem,
pathOrZipItem,
maxDimension,
maxSize,
);
const ffmpegExec = (
command: FFmpegCommand,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
outputFileExtension: string,
) =>
ipcRenderer.invoke(
"ffmpegExec",
command,
dataOrPathOrZipItem,
pathOrZipItem,
outputFileExtension,
);
const ffmpegDetermineVideoDuration = (pathOrZipItem: string | ZipItem) =>
ipcRenderer.invoke("ffmpegDetermineVideoDuration", pathOrZipItem);
// - Utility processes
const triggerCreateUtilityProcess = (type: UtilityProcessType) => {
@@ -392,6 +395,7 @@ contextBridge.exposeInMainWorld("electron", {
convertToJPEG,
generateImageThumbnail,
ffmpegExec,
ffmpegDetermineVideoDuration,
// - ML

View File

@@ -9,6 +9,7 @@ import { isSameDay } from "ente-base/date";
import { formattedDateRelative } from "ente-base/i18n-date";
import { downloadManager } from "ente-gallery/services/download";
import { EnteFile, enteFileDeletionDate } from "ente-media/file";
import { fileDurationString } from "ente-media/file-metadata";
import { FileType } from "ente-media/file-type";
import {
GAP_BTW_TILES,
@@ -1198,15 +1199,13 @@ const FileThumbnail: React.FC<FileThumbnailProps> = ({
) : (
<LoadingThumbnail />
)}
{file.metadata.fileType === FileType.livePhoto ? (
{file.metadata.fileType == FileType.livePhoto ? (
<FileTypeIndicatorOverlay>
<AlbumOutlinedIcon />
<AlbumOutlinedIcon fontSize="small" />
</FileTypeIndicatorOverlay>
) : (
file.metadata.fileType === FileType.video && (
<FileTypeIndicatorOverlay>
<PlayCircleOutlineOutlinedIcon />
</FileTypeIndicatorOverlay>
file.metadata.fileType == FileType.video && (
<VideoDurationOverlay duration={fileDurationString(file)} />
)
)}
{selected && <SelectedOverlay />}
@@ -1400,3 +1399,19 @@ const SelectedOverlay = styled(Overlay)(
border-radius: 4px;
`,
);
interface VideoDurationOverlayProps {
duration: string | undefined;
}
const VideoDurationOverlay: React.FC<VideoDurationOverlayProps> = ({
duration,
}) => (
<FileTypeIndicatorOverlay>
{duration ? (
<Typography variant="mini">{duration}</Typography>
) : (
<PlayCircleOutlineOutlinedIcon fontSize="small" />
)}
</FileTypeIndicatorOverlay>
);

View File

@@ -316,10 +316,10 @@ export interface Electron {
* The behaviour is OS dependent. On macOS we use the `sips` utility, while
* on Linux and Windows we use a `vips` bundled with our desktop app.
*
* @param dataOrPathOrZipItem The file whose thumbnail we want to generate.
* It can be provided as raw image data (the contents of the image file), or
* the path to the image file, or a tuple containing the path of the zip
* file along with the name of an entry in it.
* @param pathOrZipItem The file whose thumbnail we want to generate. It can
* be provided as raw image data (the contents of the image file), or the
* path to the image file, or a tuple containing the path of the zip file
* along with the name of an entry in it.
*
* @param maxDimension The maximum width or height of the generated
* thumbnail.
@@ -329,14 +329,13 @@ export interface Electron {
* @returns JPEG data of the generated thumbnail.
*/
generateImageThumbnail: (
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
maxDimension: number,
maxSize: number,
) => Promise<Uint8Array>;
/**
* Execute a FFmpeg {@link command} on the given
* {@link dataOrPathOrZipItem}.
* Execute a FFmpeg {@link command} on the given {@link pathOrZipItem}.
*
* This executes the command using a FFmpeg executable we bundle with our
* desktop app. We also have a Wasm FFmpeg implementation that we use when
@@ -349,11 +348,11 @@ export interface Electron {
* (respectively {@link inputPathPlaceholder},
* {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}).
*
* @param dataOrPathOrZipItem The bytes of the input file, or the path to
* the input file on the user's local disk, or the path to a zip file on the
* user's disk and the name of an entry in it. In all three cases, the data
* gets serialized to a temporary file, and then that path gets substituted
* in the FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}.
* @param pathOrZipItem The path to the input file on the user's local disk,
* or the path to a zip file on the user's disk and the name of an entry in
* it. In the second case, the data gets serialized to a temporary file, and
* then that path (or if it was already a path) gets substituted in the
* FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}.
*
* @param outputFileExtension The extension (without the dot, e.g. "jpeg")
* to use for the output file that we ask FFmpeg to create in
@@ -366,10 +365,28 @@ export interface Electron {
*/
ffmpegExec: (
command: FFmpegCommand,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
pathOrZipItem: string | ZipItem,
outputFileExtension: string,
) => Promise<Uint8Array>;
/**
* Determine the duration (in seconds) of the video present at
* {@link pathOrZipItem} using ffmpeg.
*
* This is a bespoke variant of {@link ffmpegExec} for the sole purpose of
* retrieving the video duration.
*
* @param pathOrZipItem The input file whose duration we want to determine.
* For more details, see the documentation of the {@link ffmpegExec}
* parameter with the same name.
*
* @returns The duration (in seconds) of the video referred to by
* {@link pathOrZipItem}.
*/
ffmpegDetermineVideoDuration: (
pathOrZipItem: string | ZipItem,
) => Promise<number>;
// - Utility process
/**

View File

@@ -2,7 +2,7 @@ import { ensureElectron } from "ente-base/electron";
import log from "ente-base/log";
import type { Electron } from "ente-base/types/ipc";
import {
toDataOrPathOrZipEntry,
toPathOrZipEntry,
type FileSystemUploadItem,
type UploadItem,
} from "ente-gallery/services/upload";
@@ -20,7 +20,7 @@ import {
inputPathPlaceholder,
outputPathPlaceholder,
} from "./constants";
import { ffmpegExecWeb } from "./web";
import { determineVideoDurationWeb, ffmpegExecWeb } from "./web";
/**
* Generate a thumbnail for the given video using a Wasm FFmpeg running in a web
@@ -74,7 +74,7 @@ export const generateVideoThumbnailNative = async (
_generateVideoThumbnail((seekTime: number) =>
electron.ffmpegExec(
makeGenThumbnailCommand(seekTime),
toDataOrPathOrZipEntry(fsUploadItem),
toPathOrZipEntry(fsUploadItem),
"jpeg",
),
);
@@ -116,18 +116,17 @@ const _makeGenThumbnailCommand = (seekTime: number, forHDR: boolean) => [
];
/**
* Extract metadata from the given video
* Extract metadata from the given video.
*
* When we're running in the context of our desktop app _and_ we're passed a
* file path , this uses the native FFmpeg bundled with our desktop app.
* Otherwise it uses a Wasm build of FFmpeg running in a web worker.
* When we're running in the context of our desktop app _and_ we're passed an
* upload item that resolves to a path of the user's file system, this uses the
* native FFmpeg bundled with our desktop app. Otherwise it uses a Wasm build of
* FFmpeg running in a web worker.
*
* This function is called during upload, when we need to extract the metadata
* of videos that the user is uploading.
* This function is called during upload, when we need to extract the
* "ffmetadata" of videos that the user is uploading.
*
* @param uploadItem A {@link File}, or the absolute path to a file on the
* user's local file system. A path can only be provided when we're running in
* the context of our desktop app.
* @param uploadItem The video item being uploaded.
*/
export const extractVideoMetadata = async (
uploadItem: UploadItem,
@@ -138,7 +137,7 @@ export const extractVideoMetadata = async (
? await ffmpegExecWeb(command, uploadItem, "txt")
: await ensureElectron().ffmpegExec(
command,
toDataOrPathOrZipEntry(uploadItem),
toPathOrZipEntry(uploadItem),
"txt",
),
);
@@ -260,6 +259,26 @@ const parseFFMetadataDate = (s: string | undefined) => {
return d;
};
/**
* Extract the duration (in seconds) from the given video
*
* This is a sibling of {@link extractVideoMetadata}, except it tries to
* determine the duration of the video. The duration is not part of the
* "ffmetadata", and is instead a property of the video itself.
*
* @param uploadItem The video item being uploaded.
*
* @return the duration of the video in seconds (a floating point number).
*/
export const determineVideoDuration = async (
uploadItem: UploadItem,
): Promise<number> =>
uploadItem instanceof File
? determineVideoDurationWeb(uploadItem)
: ensureElectron().ffmpegDetermineVideoDuration(
toPathOrZipEntry(uploadItem),
);
/**
* Convert a video from a format that is not supported in the browser to MP4.
*

View File

@@ -15,7 +15,7 @@ import {
let _ffmpeg: Promise<FFmpeg> | undefined;
/** Queue of in-flight requests. */
const _ffmpegTaskQueue = new PromiseQueue<Uint8Array>();
const _ffmpegTaskQueue = new PromiseQueue<Uint8Array | number>();
/**
* Return the shared {@link FFmpeg} instance, lazily creating and loading it if
@@ -45,7 +45,7 @@ const createFFmpeg = async () => {
*
* @param command The FFmpeg command to execute.
*
* @param blob The input data on which to run the command, provided as a blob.
* @param blob The input blob on which to run the command.
*
* @param outputFileExtension The extension of the (temporary) output file which
* will be generated by the command.
@@ -66,7 +66,26 @@ export const ffmpegExecWeb = async (
// So serialize them using a promise queue.
return _ffmpegTaskQueue.add(() =>
ffmpegExec(ffmpeg, command, outputFileExtension, blob),
);
) as Promise<Uint8Array>;
};
/**
* Determine the duration of the given video blob.
*
* This is a specialized variant of {@link ffmpegExecWeb} that uses the same
* queue but internally uses ffprobe to try and determine the video's duration.
*
* @param blob The input blob on which to run the command, provided as a blob.
*
* @returns The duration of the {@link blob} (if it indeed is a video).
*/
export const determineVideoDurationWeb = async (
blob: Blob,
): Promise<number> => {
const ffmpeg = await ffmpegLazy();
return _ffmpegTaskQueue.add(() =>
ffprobeExecVideoDuration(ffmpeg, blob),
) as Promise<number>;
};
const ffmpegExec = async (
@@ -75,53 +94,78 @@ const ffmpegExec = async (
outputFileExtension: string,
blob: Blob,
) => {
const mountDir = "/mount";
const inputFileName = newID("in_");
const inputPath = joinPath(mountDir, inputFileName);
const outputSuffix = outputFileExtension ? "." + outputFileExtension : "";
const outputPath = newID("out_") + outputSuffix;
const inputFile = new File([blob], inputFileName);
// Exit status of the ffmpeg.exec invocation.
// `0` if no error, `!= 0` if timeout (1) or error.
let status: number | undefined;
try {
const startTime = Date.now();
return withInputMount(ffmpeg, blob, async (inputPath) => {
try {
const startTime = Date.now();
let resolvedCommand: string[];
if (Array.isArray(command)) {
resolvedCommand = command;
} else {
const isHDR = await isHDRVideo(ffmpeg, inputPath);
resolvedCommand = isHDR ? command.hdr : command.default;
}
const cmd = substitutePlaceholders(
resolvedCommand,
inputPath,
outputPath,
);
status = await ffmpeg.exec(cmd);
if (status !== 0) {
log.info(
`[wasm] ffmpeg command failed with exit code ${status}: ${cmd.join(" ")}`,
);
throw new Error(
`ffmpeg command failed with exit code ${status}`,
);
}
const result = await ffmpeg.readFile(outputPath);
if (typeof result == "string")
throw new Error("Expected binary data");
const ms = Date.now() - startTime;
log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`);
return result;
} finally {
try {
await ffmpeg.deleteFile(outputPath);
} catch (e) {
// Output file might not even exist if the command did not succeed,
// so only log on success.
if (status === 0) {
log.error(`Failed to remove output ${outputPath}`, e);
}
}
}
});
};
const withInputMount = async <T>(
ffmpeg: FFmpeg,
blob: Blob,
f: (inputPath: string) => Promise<T>,
): Promise<T> => {
const mountDir = "/mount";
const inputFileName = newID("in_");
const inputPath = joinPath(mountDir, inputFileName);
const inputFile = new File([blob], inputFileName);
try {
await ffmpeg.createDir(mountDir);
await ffmpeg.mount(FFFSType.WORKERFS, { files: [inputFile] }, mountDir);
let resolvedCommand: string[];
if (Array.isArray(command)) {
resolvedCommand = command;
} else {
const isHDR = await isHDRVideo(ffmpeg, inputPath);
resolvedCommand = isHDR ? command.hdr : command.default;
}
const cmd = substitutePlaceholders(
resolvedCommand,
inputPath,
outputPath,
);
status = await ffmpeg.exec(cmd);
if (status !== 0) {
log.info(
`[wasm] ffmpeg command failed with exit code ${status}: ${cmd.join(" ")}`,
);
throw new Error(`ffmpeg command failed with exit code ${status}`);
}
const result = await ffmpeg.readFile(outputPath);
if (typeof result == "string") throw new Error("Expected binary data");
const ms = Date.now() - startTime;
log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`);
return result;
return await f(inputPath);
} finally {
try {
await ffmpeg.unmount(mountDir);
@@ -133,15 +177,6 @@ const ffmpegExec = async (
} catch (e) {
log.error(`Failed to delete mount directory ${mountDir}`, e);
}
try {
await ffmpeg.deleteFile(outputPath);
} catch (e) {
// Output file might not even exist if the command did not succeed,
// so only log on success.
if (status === 0) {
log.error(`Failed to remove output ${outputPath}`, e);
}
}
}
};
@@ -164,7 +199,7 @@ const substitutePlaceholders = (
})
.filter((s) => s !== undefined);
const isHDRVideoFFProbeOutput = z.object({
const FFProbeOutputIsHDR = z.object({
streams: z.array(z.object({ color_transfer: z.string().optional() })),
});
@@ -181,8 +216,9 @@ const isHDRVideoFFProbeOutput = z.object({
* `false` to make this function safe to invoke without breaking the happy path.
*/
const isHDRVideo = async (ffmpeg: FFmpeg, inputFilePath: string) => {
let jsonString: string | undefined;
try {
const jsonString = await ffprobeOutput(
jsonString = await ffprobeOutput(
ffmpeg,
[
["-i", inputFilePath],
@@ -201,7 +237,7 @@ const isHDRVideo = async (ffmpeg: FFmpeg, inputFilePath: string) => {
"output.json",
);
const output = isHDRVideoFFProbeOutput.parse(JSON.parse(jsonString));
const output = FFProbeOutputIsHDR.parse(JSON.parse(jsonString));
switch (output.streams[0]?.color_transfer) {
case "smpte2084":
case "arib-std-b67":
@@ -210,7 +246,8 @@ const isHDRVideo = async (ffmpeg: FFmpeg, inputFilePath: string) => {
return false;
}
} catch (e) {
log.warn(`Could not detect HDR status of ${inputFilePath}`, e);
log.warn("Could not detect HDR status", e);
if (jsonString) log.debug(() => ["ffprobe-output", jsonString]);
return false;
}
};
@@ -258,3 +295,51 @@ const ffprobeOutput = async (
}
}
};
const FFProbeOutputDuration = z.object({
format: z.object({ duration: z.string() }),
});
const ffprobeExecVideoDuration = async (ffmpeg: FFmpeg, blob: Blob) =>
withInputMount(ffmpeg, blob, async (inputPath) => {
// Determine the video duration from the container, bypassing the issues
// with stream selection.
//
// ffprobe -v error -show_entries format=duration -of
// default=noprint_wrappers=1:nokey=1 input.mp4
//
// Source:
// https://trac.ffmpeg.org/wiki/FFprobeTips#Formatcontainerduration
//
// Reference: https://ffmpeg.org/ffprobe.html
//
// Since we cannot grab the stdout easily, the command has been modified
// to output to a file instead. However, in doing the command seems to
// have become flaky - for certain videos, it outputs extra lines and
// not just the duration. So we also switch to the JSON output for more
// robust behaviour, and parse the duration from it.
const jsonString = await ffprobeOutput(
ffmpeg,
[
["-i", inputPath],
["-v", "error"],
["-show_entries", "format=duration"],
["-of", "json"],
["-o", "output.json"],
].flat(),
"output.json",
);
const durationString = FFProbeOutputDuration.parse(
JSON.parse(jsonString),
).format.duration;
const duration = parseFloat(durationString);
if (isNaN(duration)) {
const msg = "Could not parse video duration";
log.warn(msg, durationString);
throw new Error(msg);
}
return duration;
});

View File

@@ -283,7 +283,7 @@ export const fileSystemUploadItemIfUnchanged = async (
* context of our desktop app, return a value that can be passed to
* {@link Electron} functions over IPC.
*/
export const toDataOrPathOrZipEntry = (fsUploadItem: FileSystemUploadItem) =>
export const toPathOrZipEntry = (fsUploadItem: FileSystemUploadItem) =>
typeof fsUploadItem == "string" || Array.isArray(fsUploadItem)
? fsUploadItem
: fsUploadItem.path;

View File

@@ -2,7 +2,7 @@ import log from "ente-base/log";
import { type Electron } from "ente-base/types/ipc";
import * as ffmpeg from "ente-gallery/services/ffmpeg";
import {
toDataOrPathOrZipEntry,
toPathOrZipEntry,
type FileSystemUploadItem,
} from "ente-gallery/services/upload";
import { FileType, type FileTypeInfo } from "ente-media/file-type";
@@ -196,7 +196,7 @@ export const generateThumbnailNative = async (
): Promise<Uint8Array> =>
fileTypeInfo.fileType === FileType.image
? await electron.generateImageThumbnail(
toDataOrPathOrZipEntry(fsUploadItem),
toPathOrZipEntry(fsUploadItem),
maxThumbnailDimension,
maxThumbnailSize,
)

View File

@@ -9,7 +9,10 @@ import { basename, nameAndExtension } from "ente-base/file-name";
import type { PublicAlbumsCredentials } from "ente-base/http";
import log from "ente-base/log";
import { extractExif } from "ente-gallery/services/exif";
import { extractVideoMetadata } from "ente-gallery/services/ffmpeg";
import {
determineVideoDuration,
extractVideoMetadata,
} from "ente-gallery/services/ffmpeg";
import {
getNonEmptyMagicMetadataProps,
updateMagicMetadata,
@@ -37,6 +40,7 @@ import {
import { FileType, type FileTypeInfo } from "ente-media/file-type";
import { encodeLivePhoto } from "ente-media/live-photo";
import { addToCollection } from "ente-new/photos/services/collection";
import { settingsSnapshot } from "ente-new/photos/services/settings";
import { CustomError, handleUploadError } from "ente-shared/error";
import { mergeUint8Arrays } from "ente-utils/array";
import { ensureInteger, ensureNumber } from "ente-utils/ensure";
@@ -1043,6 +1047,18 @@ const extractImageOrVideoMetadata = async (
tryParseEpochMicrosecondsFromFileName(fileName) ?? modificationTime;
}
// Video duration
let duration: number | undefined;
if (
fileType == FileType.video &&
// TODO(HLS):
settingsSnapshot().isInternalUser
) {
duration = await tryDetermineVideoDuration(uploadItem);
// TODO(HLS):
log.debug(() => ["extracted duration", duration]);
}
// To avoid introducing malformed data into the metadata fields (which the
// other clients might not expect and handle), we have extra "ensure" checks
// here that act as a safety valve if somehow the TypeScript type is lying.
@@ -1060,6 +1076,10 @@ const extractImageOrVideoMetadata = async (
hash,
};
if (duration) {
metadata.duration = ensureInteger(Math.ceil(duration));
}
const location = parsedMetadataJSON?.location ?? parsedMetadata?.location;
if (location) {
metadata.latitude = ensureNumber(location.latitude);
@@ -1119,6 +1139,16 @@ const tryExtractVideoMetadata = async (uploadItem: UploadItem) => {
}
};
const tryDetermineVideoDuration = async (uploadItem: UploadItem) => {
try {
return await determineVideoDuration(uploadItem);
} catch (e) {
const fileName = uploadItemFileName(uploadItem);
log.error(`Failed to extract video duration for ${fileName}`, e);
return undefined;
}
};
const computeHash = async (uploadItem: UploadItem, worker: CryptoWorker) => {
const { stream, chunkCount } = await readUploadItem(uploadItem);
const hashState = await worker.initChunkHashing();

View File

@@ -140,6 +140,14 @@ export interface Metadata {
* older clients.
*/
videoHash?: string;
/**
* The duration (in integral seconds) of the video.
*
* Only present for videos (`fileType == FileType.video`). For compatibility
* with other clients, this must be a integer number of seconds, without any
* sub-second fraction.
*/
duration?: number;
hasStaticThumbnail?: boolean;
localID?: number;
version?: number;
@@ -759,6 +767,44 @@ export const fileLocation = (file: EnteFile): Location | undefined => {
return { latitude, longitude };
};
/**
* Return the duration of the video as a formatted "HH:mm:ss" string (when
* present) for the given {@link EnteFile}.
*
* Only files with type `FileType.video` are expected to have a duration.
*
* @returns The duration of the video as a string of the form "HH:mm:ss". The
* underlying duration present in the file's metadata is guaranteed to be
* integral, so there will never be a subsecond component.
*
* - If the hour component is all zeroes, it will be omitted.
*
* - Leading zeros in the minutes component will be trimmed off if an hour
* component is not present. If minutes is all zeros, then "0" will be used.
*
* - For example, an underlying duration of 595 seconds will result in a
* formatted string of the form "9:55". While an underlying duration of 9
* seconds will be returned as a string "0:09".
*
* - A zero duration will be treated as undefined.
*/
export const fileDurationString = (file: EnteFile): string | undefined => {
const d = file.metadata.duration;
if (!d) return undefined;
const s = d % 60;
const m = Math.floor(d / 60) % 60;
const h = Math.floor(d / 3600);
const ss = s > 9 ? `${s}` : `0${s}`;
if (h) {
const mm = m > 9 ? `${m}` : `0${m}`;
return `${h}:${mm}:${ss}`;
} else {
return `${m}:${ss}`;
}
};
/**
* Return the caption, aka "description", (if any) attached to the given
* {@link EnteFile}.