Merge branch 'main' into remote_db
This commit is contained in:
@@ -132,6 +132,10 @@
|
||||
"Binance US"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Bitkub",
|
||||
"slug": "bitkub"
|
||||
},
|
||||
{
|
||||
"title": "Bitfinex"
|
||||
},
|
||||
@@ -183,6 +187,9 @@
|
||||
"title": "Bluesky",
|
||||
"slug": "blue_sky"
|
||||
},
|
||||
{
|
||||
"title": "bonify"
|
||||
},
|
||||
{
|
||||
"title": "Booking",
|
||||
"altNames": [
|
||||
@@ -208,6 +215,13 @@
|
||||
{
|
||||
"title": "Bugzilla"
|
||||
},
|
||||
{
|
||||
"title": "Bundesagentur für Arbeit",
|
||||
"slug": "bundesagentur_fur_arbeit",
|
||||
"altNames": [
|
||||
"Agentur für Arbeit"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "ButterflyMX",
|
||||
"slug": "butterflymx"
|
||||
@@ -518,12 +532,19 @@
|
||||
"slug": "id_me"
|
||||
},
|
||||
{
|
||||
"title": "Infomaniak"
|
||||
"title": "ImmoScout24",
|
||||
"slug": "immo_scout_24",
|
||||
"altNames": [
|
||||
"ImmobilienScout24"
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "Impact.com",
|
||||
"slug": "impact"
|
||||
},
|
||||
{
|
||||
"title": "Infomaniak"
|
||||
},
|
||||
{
|
||||
"title": "ING"
|
||||
},
|
||||
|
||||
1
auth/assets/custom-icons/icons/bitkub.svg
Normal file
1
auth/assets/custom-icons/icons/bitkub.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg viewBox="0 0 245.73 156" xmlns="http://www.w3.org/2000/svg"><g fill="#4cba64"><path d="m167.87 0a23.32 23.32 0 0 0 0 33l44.89 44.9-45 45-22.89-22.9a23.34 23.34 0 0 0 -33 0l55.86 55.87 78-78z"/><circle cx="167.87" cy="78" r="16"/><path d="m77.87 156a23.34 23.34 0 0 0 0-33l-44.87-44.9 45-45 22.87 22.9a23.34 23.34 0 0 0 33 0l-55.87-55.87-78 78z"/><circle cx="77.87" cy="78" r="16"/></g></svg>
|
||||
|
After Width: | Height: | Size: 396 B |
29
auth/assets/custom-icons/icons/bonify.svg
Normal file
29
auth/assets/custom-icons/icons/bonify.svg
Normal file
@@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg id="logosandtypes_com" data-name="logosandtypes com" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 150 150">
|
||||
<defs>
|
||||
<style>
|
||||
.cls-1 {
|
||||
fill: #101010;
|
||||
}
|
||||
|
||||
.cls-2 {
|
||||
fill: none;
|
||||
}
|
||||
|
||||
.cls-3 {
|
||||
fill: url(#linear-gradient);
|
||||
}
|
||||
</style>
|
||||
<linearGradient id="linear-gradient" x1="186.97" y1="96.04" x2="45.7" y2="96.04" gradientTransform="translate(0 150.11) scale(1 -1)" gradientUnits="userSpaceOnUse">
|
||||
<stop offset="0" stop-color="#165cc3"/>
|
||||
<stop offset="1" stop-color="#3ddabb"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g id="Layer_3" data-name="Layer 3">
|
||||
<g id="Layer_2" data-name="Layer 2">
|
||||
<path id="Layer_3-2" data-name="Layer 3-2" class="cls-2" d="M0,0H150V150H0V0Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="cls-1" d="M111.63,75.01c.06,.86,.08,1.72,.08,2.59,0,20.52-16.62,37.16-37.14,37.16-20.52,0-37.16-16.62-37.16-37.14,0-20.52,16.62-37.16,37.14-37.16,0,0,.02,0,.02,0,1.61,0,3.22,.1,4.82,.32l12.7-17.11C62.3,14,30.31,30.3,20.63,60.09c-9.68,29.79,6.62,61.78,36.41,71.47,29.79,9.68,61.78-6.62,71.47-36.41,4.29-13.2,3.59-27.52-1.97-40.24l-14.9,20.11Z"/>
|
||||
<polygon class="cls-3" points="120.26 4.82 74.49 66.53 62.93 53.99 45.67 69.89 76.4 103.32 149.5 4.82 120.26 4.82"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) by Marsupilami -->
|
||||
<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" version="1.1" width="768" height="768" viewBox="-4.3240767 -4.3240767 152.8084434 152.7840434" id="svg7076">
|
||||
<defs id="defs7078"/>
|
||||
<path d="M 0,72.07202 C 0,32.27318 32.2935,0 72.08013,0 c 39.78662,0 72.08017,32.27318 72.08017,72.07202 0,39.80291 -32.29355,72.06387 -72.08017,72.06387 -17.63317,0 -33.75958,-6.32434 -46.30232,-16.82687 11.769,-19.46163 46.13944,-77.28864 46.13944,-77.28864 l 17.0223,28.5022 c 0,0 -8.95912,0.0448 -17.06303,0 -8.14464,-0.0448 -10.46588,1.7063 -14.00878,7.11027 -2.9321,4.4877 -9.85505,16.21193 -10.01793,16.42776 -0.81448,1.29093 -0.3258,2.54114 1.58818,2.54114 l 55.18001,0 28.01759,0 c 1.66968,0 2.64704,-1.16875 1.58822,-2.6226 L 73.34255,2.43932 c -0.81447,-1.37236 -2.11759,-1.25021 -2.85061,0 L 8.4704,105.97411 C 3.09495,95.87068 0,84.32969 0,72.07202" id="path8406" style="fill:#ec1c23;fill-rule:nonzero;stroke:none"/>
|
||||
</svg>
|
||||
<!-- version: 20110311, original size: 144.16029 144.13589, border: 3% -->
|
||||
|
After Width: | Height: | Size: 1.2 KiB |
6
auth/assets/custom-icons/icons/immo_scout_24.svg
Normal file
6
auth/assets/custom-icons/icons/immo_scout_24.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 12 KiB |
@@ -13,6 +13,7 @@ import type { BrowserWindow } from "electron";
|
||||
import { ipcMain } from "electron/main";
|
||||
import type {
|
||||
CollectionMapping,
|
||||
FFmpegCommand,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
ZipItem,
|
||||
@@ -187,7 +188,7 @@ export const attachIPCHandlers = () => {
|
||||
"ffmpegExec",
|
||||
(
|
||||
_,
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension),
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import pathToFfmpeg from "ffmpeg-static";
|
||||
import { randomBytes } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { ZipItem } from "../../types/ipc";
|
||||
import path, { basename } from "node:path";
|
||||
import type { FFmpegCommand, ZipItem } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { execAsync } from "../utils/electron";
|
||||
import {
|
||||
@@ -44,7 +44,7 @@ const outputPathPlaceholder = "OUTPUT";
|
||||
* But I'm not sure if our code is supposed to be able to use it, and how.
|
||||
*/
|
||||
export const ffmpegExec = async (
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
): Promise<Uint8Array> => {
|
||||
@@ -58,8 +58,17 @@ export const ffmpegExec = async (
|
||||
try {
|
||||
await writeToTemporaryInputFile();
|
||||
|
||||
let resolvedCommand: string[];
|
||||
if (Array.isArray(command)) {
|
||||
resolvedCommand = command;
|
||||
} else {
|
||||
const isHDR = await isHDRVideo(inputFilePath);
|
||||
log.debug(() => [basename(inputFilePath), { isHDR }]);
|
||||
resolvedCommand = isHDR ? command.hdr : command.default;
|
||||
}
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
command,
|
||||
resolvedCommand,
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
);
|
||||
@@ -143,8 +152,19 @@ export interface FFmpegGenerateHLSPlaylistAndSegmentsResult {
|
||||
* A bespoke variant of {@link ffmpegExec} for generation of HLS playlists for
|
||||
* videos.
|
||||
*
|
||||
* Overview of the cases:
|
||||
*
|
||||
* H.264, <= 10 MB - Skip
|
||||
* H.264, <= 4000 kb/s bitrate - Don't re-encode video stream
|
||||
* BT.709, <= 2000 kb/s bitrate - Don't apply the scale+fps filter
|
||||
* !BT.709 - Apply tonemap (zscale+tonemap+zscale)
|
||||
*
|
||||
* Example invocation:
|
||||
*
|
||||
* ffmpeg -i in.mov -vf 'scale=-2:720,fps=30,zscale=transfer=linear,tonemap=tonemap=hable:desat=0,zscale=primaries=709:transfer=709:matrix=709,format=yuv420p' -c:v libx264 -c:a aac -f hls -hls_key_info_file out.m3u8.info -hls_list_size 0 -hls_flags single_file out.m3u8
|
||||
*
|
||||
* See: [Note: Preview variant of videos]
|
||||
|
||||
*
|
||||
* @param inputFilePath The path to a file on the user's local file system. This
|
||||
* is the video we want to generate an streamable HLS playlist for.
|
||||
*
|
||||
@@ -155,11 +175,58 @@ export interface FFmpegGenerateHLSPlaylistAndSegmentsResult {
|
||||
* @returns The paths to two files on the user's local file system - one
|
||||
* containing the generated HLS playlist, and the other containing the
|
||||
* transcoded and encrypted video segments that the HLS playlist refers to.
|
||||
*
|
||||
* If the video is such that it doesn't require stream generation, then this
|
||||
* function returns `undefined`.
|
||||
*/
|
||||
export const ffmpegGenerateHLSPlaylistAndSegments = async (
|
||||
inputFilePath: string,
|
||||
outputPathPrefix: string,
|
||||
): Promise<FFmpegGenerateHLSPlaylistAndSegmentsResult> => {
|
||||
): Promise<FFmpegGenerateHLSPlaylistAndSegmentsResult | undefined> => {
|
||||
const { isH264, isBT709, bitrate } =
|
||||
await detectVideoCharacteristics(inputFilePath);
|
||||
|
||||
log.debug(() => [basename(inputFilePath), { isH264, isBT709, bitrate }]);
|
||||
|
||||
// If the video is smaller than 10 MB, and already H.264 (the codec we are
|
||||
// going to use for the conversion), then a streaming variant is not much
|
||||
// use. Skip such cases.
|
||||
//
|
||||
// ---
|
||||
//
|
||||
// [Note: HEVC/H.265 issues]
|
||||
//
|
||||
// We've observed two issues out in the wild with HEVC videos:
|
||||
//
|
||||
// 1. On Linux, HEVC video streams don't play. However, since the audio
|
||||
// stream plays, the browser tells us that the "video" itself is
|
||||
// playable, but the user sees a blank screen with only audio.
|
||||
//
|
||||
// 2. HEVC + HDR videos taken on an iPhone have a rotation (`Side data:
|
||||
// displaymatrix` in the ffmpeg output) that Chrome (and thus Electron)
|
||||
// doesn't take into account, so these play upside down.
|
||||
//
|
||||
// Not fully related to this case, but mentioning here as to why both the
|
||||
// size and codec need to be checked before skipping stream generation.
|
||||
if (isH264) {
|
||||
const inputVideoSize = await fs
|
||||
.stat(inputFilePath)
|
||||
.then((st) => st.size);
|
||||
if (inputVideoSize <= 10 * 1024 * 1024 /* 10 MB */) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
// If the video is already H.264 with a bitrate less than 4000 kbps, then we
|
||||
// do not need to reencode the video stream (by _far_ the costliest part of
|
||||
// the HLS stream generation).
|
||||
const reencodeVideo = !(isH264 && bitrate && bitrate <= 4000 * 1000);
|
||||
|
||||
// If the bitrate is not too high, then we don't need to rescale the video
|
||||
// when generating the video stream. This is not a performance optimization,
|
||||
// but more for avoiding making the video size smaller unnecessarily.
|
||||
const rescaleVideo = !(bitrate && bitrate <= 2000 * 1000);
|
||||
|
||||
// [Note: Tonemapping HDR to HD]
|
||||
//
|
||||
// BT.709 ("HD") is a standard that describes things like how color is
|
||||
@@ -193,9 +260,12 @@ export const ffmpegGenerateHLSPlaylistAndSegments = async (
|
||||
// brightness drop. So we conditionally apply this filter chain only if the
|
||||
// colorspace is not already BT.709.
|
||||
//
|
||||
// See also: [Note: Alternative FFmpeg command for HDR videos], although
|
||||
// that uses a allow-list based check (while here we use deny-list).
|
||||
//
|
||||
// Reference:
|
||||
// - https://trac.ffmpeg.org/wiki/colorspace
|
||||
const isBT709 = await detectIsBT709(inputFilePath);
|
||||
const tonemap = !isBT709;
|
||||
|
||||
// We want the generated playlist to refer to the chunks as "output.ts".
|
||||
//
|
||||
@@ -235,8 +305,8 @@ export const ffmpegGenerateHLSPlaylistAndSegments = async (
|
||||
|
||||
// Overview:
|
||||
//
|
||||
// - H.264 video HD 720p 30fps.
|
||||
// - AAC audio 128kbps.
|
||||
// - Video H.264 HD 720p 30fps.
|
||||
// - Audio AAC 128kbps.
|
||||
// - Encrypted HLS playlist with a single file containing all the chunks.
|
||||
//
|
||||
// Reference:
|
||||
@@ -251,65 +321,83 @@ export const ffmpegGenerateHLSPlaylistAndSegments = async (
|
||||
"-i",
|
||||
inputFilePath,
|
||||
// The remaining options apply to the next output file (`playlistPath`).
|
||||
//
|
||||
// ---
|
||||
//
|
||||
// `-vf` creates a filter graph for the video stream. This is a string
|
||||
// of the form `filter1=key=value:key=value.filter2=key=value`, that is,
|
||||
// a comma separated list of filters chained together.
|
||||
[
|
||||
"-vf",
|
||||
[
|
||||
// Scales the video to maximum 720p height, keeping aspect
|
||||
// ratio, and keeping the calculated dimension divisible by 2
|
||||
// (some of the other operations require an even pixel count).
|
||||
"scale=-2:720",
|
||||
// Convert the video to a constant 30 fps, duplicating or
|
||||
// dropping frames as necessary.
|
||||
"fps=30",
|
||||
// If the video is not in the HD color space (bt709), convert
|
||||
// it. Before conversion, tone map colors so that they work the
|
||||
// same across the change in the dyamic range.
|
||||
//
|
||||
// 1. The tonemap filter only works linear light, so we first
|
||||
// use zscale with transfer=linear to linearize the input.
|
||||
//
|
||||
// 2. Then we use the tonemap, with the hable option that is
|
||||
// best for preserving details. desat=0 turns off the default
|
||||
// desaturation.
|
||||
//
|
||||
// 3. Use zscale again to "convert to BT.709" by asking it to
|
||||
// set the all three of color primaries, transfer
|
||||
// characteristics and colorspace matrix to 709 (Note: the
|
||||
// constants specified in the tonemap filter help do not
|
||||
// include the "bt" prefix)
|
||||
//
|
||||
// See: https://ffmpeg.org/ffmpeg-filters.html#tonemap-1
|
||||
//
|
||||
// See: [Note: Tonemapping HDR to HD]
|
||||
isBT709
|
||||
? []
|
||||
: [
|
||||
"zscale=transfer=linear",
|
||||
"tonemap=tonemap=hable:desat=0",
|
||||
"zscale=primaries=709:transfer=709:matrix=709",
|
||||
],
|
||||
// Output using the most widely supported pixel format: 8-bit
|
||||
// YUV planar color space with 4:2:0 chroma subsampling.
|
||||
"format=yuv420p",
|
||||
]
|
||||
.flat()
|
||||
.join(","),
|
||||
],
|
||||
// Video codec H.264
|
||||
//
|
||||
// - `-c:v libx264` converts the video stream to use the H.264 codec.
|
||||
//
|
||||
// - We don't supply a bitrate, instead it uses the default CRF ("23")
|
||||
// as recommended in the ffmpeg trac.
|
||||
//
|
||||
// - We don't supply a preset, it'll use the default ("medium")
|
||||
["-c:v", "libx264"],
|
||||
reencodeVideo
|
||||
? [
|
||||
// `-vf` creates a filter graph for the video stream. It is a
|
||||
// comma separated list of filters chained together, e.g.
|
||||
// `filter1=key=value:key=value.filter2=key=value`.
|
||||
"-vf",
|
||||
[
|
||||
// Do the rescaling to even number of pixels always if the
|
||||
// tonemapping is going to be applied subsequently,
|
||||
// otherwise the tonemapping will fail with "image
|
||||
// dimensions must be divisible by subsampling factor".
|
||||
//
|
||||
// While we add the extra condition here for completeness,
|
||||
// it won't usually matter since a non-BT.709 video is
|
||||
// likely using a new codec, and as such would've a high
|
||||
// enough bitrate to require rescaling anyways.
|
||||
rescaleVideo || tonemap
|
||||
? [
|
||||
// Scales the video to maximum 720p height,
|
||||
// keeping aspect ratio and the calculated
|
||||
// dimension divisible by 2 (some of the other
|
||||
// operations require an even pixel count).
|
||||
"scale=-2:720",
|
||||
// Convert the video to a constant 30 fps,
|
||||
// duplicating or dropping frames as necessary.
|
||||
"fps=30",
|
||||
]
|
||||
: [],
|
||||
// Convert the colorspace if the video is not in the HD
|
||||
// color space (bt709). Before conversion, tone map colors
|
||||
// so that they work the same across the change in the
|
||||
// dyamic range.
|
||||
//
|
||||
// 1. The tonemap filter only works linear light, so we
|
||||
// first use zscale with transfer=linear to linearize
|
||||
// the input.
|
||||
//
|
||||
// 2. Then we use the tonemap, with the hable option that
|
||||
// is best for preserving details. desat=0 turns off
|
||||
// the default desaturation.
|
||||
//
|
||||
// 3. Use zscale again to "convert to BT.709" by asking it
|
||||
// to set the all three of color primaries, transfer
|
||||
// characteristics and colorspace matrix to 709 (Note:
|
||||
// the constants specified in the tonemap filter help
|
||||
// do not include the "bt" prefix)
|
||||
//
|
||||
// See: https://ffmpeg.org/ffmpeg-filters.html#tonemap-1
|
||||
//
|
||||
// See: [Note: Tonemapping HDR to HD]
|
||||
tonemap
|
||||
? [
|
||||
"zscale=transfer=linear",
|
||||
"tonemap=tonemap=hable:desat=0",
|
||||
"zscale=primaries=709:transfer=709:matrix=709",
|
||||
]
|
||||
: [],
|
||||
// Output using the well supported pixel format: 8-bit YUV
|
||||
// planar color space with 4:2:0 chroma subsampling.
|
||||
"format=yuv420p",
|
||||
]
|
||||
.flat()
|
||||
.join(","),
|
||||
]
|
||||
: [],
|
||||
reencodeVideo
|
||||
? // Video codec H.264
|
||||
//
|
||||
// - `-c:v libx264` converts the video stream to the H.264 codec.
|
||||
//
|
||||
// - We don't supply a bitrate, instead it uses the default CRF
|
||||
// ("23") as recommended in the ffmpeg trac.
|
||||
//
|
||||
// - We don't supply a preset, it'll use the default ("medium").
|
||||
["-c:v", "libx264"]
|
||||
: // Keep the video stream unchanged
|
||||
["-c:v", "copy"],
|
||||
// Audio codec AAC
|
||||
//
|
||||
// - `-c:a aac` converts the audio stream to use the AAC codec
|
||||
@@ -377,6 +465,10 @@ export const ffmpegGenerateHLSPlaylistAndSegments = async (
|
||||
* Stream #0:0: Video: h264 (High 10) ([27][0][0][0] / 0x001B), yuv420p10le(tv, bt2020nc/bt2020/arib-std-b67), 1920x1080, 30 fps, 30 tbr, 90k tbn
|
||||
*
|
||||
* The part after Video: is the first capture group.
|
||||
*
|
||||
* Another example:
|
||||
*
|
||||
* Stream #0:1[0x2](und): Video: h264 (Constrained Baseline) (avc1 / 0x31637661), yuv420p(progressive), 480x270 [SAR 1:1 DAR 16:9], 539 kb/s, 29.97 fps, 29.97 tbr, 30k tbn (default)
|
||||
*/
|
||||
const videoStreamLineRegex = /Stream #.+: Video:(.+)\n/;
|
||||
|
||||
@@ -384,23 +476,84 @@ const videoStreamLineRegex = /Stream #.+: Video:(.+)\n/;
|
||||
const videoStreamLinesRegex = /Stream #.+: Video:(.+)\n/g;
|
||||
|
||||
/**
|
||||
* A regex that matches <digits>x<digits> pair preceded by a space and followed
|
||||
* by a trailing comma. See {@link videoStreamLineRegex} for the context in
|
||||
* which it is used.
|
||||
* A regex that matches "<digits> kb/s" preceded by a space. See
|
||||
* {@link videoStreamLineRegex} for the context in which it is used.
|
||||
*/
|
||||
const videoDimensionsRegex = / (\d+)x(\d+),/;
|
||||
const videoBitrateRegex = / ([1-9]\d*) kb\/s/;
|
||||
|
||||
/**
|
||||
* Heuristically determine if the given video uses the BT.709 colorspace.
|
||||
* A regex that matches <digits>x<digits> pair preceded by a space. See
|
||||
* {@link videoStreamLineRegex} for the context in which it is used.
|
||||
*
|
||||
* This function tries to determine the input colorspace by scanning the ffmpeg
|
||||
* info output for the video stream line, and checking if it contains the string
|
||||
* "bt709". See: [Note: Parsing CLI output might break on ffmpeg updates].
|
||||
* We constrain the digit sequence not to begin with 0 to exclude hexadecimal
|
||||
* representations of various constants that ffmpeg prints on this line (e.g.
|
||||
* "avc1 / 0x31637661").
|
||||
*/
|
||||
const detectIsBT709 = async (inputFilePath: string) => {
|
||||
const videoDimensionsRegex = / ([1-9]\d*)x([1-9]\d*)/;
|
||||
|
||||
interface VideoCharacteristics {
|
||||
isH264: boolean;
|
||||
isBT709: boolean;
|
||||
bitrate: number | undefined;
|
||||
}
|
||||
/**
|
||||
* Heuristically determine information about the video at the given
|
||||
* {@link inputFilePath}:
|
||||
*
|
||||
* - If is encoded using H.264 codec.
|
||||
* - If it uses the BT.709 colorspace.
|
||||
* - Its bitrate.
|
||||
*
|
||||
* The defaults are tailored for the cases in which these conditions are used,
|
||||
* so that even if we get the detection wrong we'll only end up encoding videos
|
||||
* that could've possibly been skipped as an optimization.
|
||||
*
|
||||
* [Note: Parsing CLI output might break on ffmpeg updates]
|
||||
*
|
||||
* This function tries to determine the these bits of information about the
|
||||
* given video by scanning the ffmpeg info output for the video stream line, and
|
||||
* doing various string matches and regex extractions.
|
||||
*
|
||||
* Needless to say, while this works currently, this is liable to break in the
|
||||
* future. So if something stops working after updating ffmpeg, look here!
|
||||
*
|
||||
* Ideally, we'd have done this using `ffprobe`, but we don't have the ffprobe
|
||||
* binary at hand, so we make do by grepping the log output of ffmpeg.
|
||||
*
|
||||
* For reference,
|
||||
*
|
||||
* - codec and colorspace are printed by the `avcodec_string` function in the
|
||||
* ffmpeg source:
|
||||
* https://github.com/FFmpeg/FFmpeg/blob/master/libavcodec/avcodec.c
|
||||
*
|
||||
* - bitrate is printed by the `dump_stream_format` function in `dump.c`.
|
||||
*/
|
||||
const detectVideoCharacteristics = async (inputFilePath: string) => {
|
||||
const videoInfo = await pseudoFFProbeVideo(inputFilePath);
|
||||
const videoStreamLine = videoStreamLineRegex.exec(videoInfo)?.at(1);
|
||||
return !!videoStreamLine?.includes("bt709");
|
||||
const videoStreamLine = videoStreamLineRegex.exec(videoInfo)?.at(1)?.trim();
|
||||
|
||||
// Since the checks are heuristic, start with defaults that would cause the
|
||||
// codec conversion to happen, even if it is unnecessary.
|
||||
const res: VideoCharacteristics = {
|
||||
isH264: false,
|
||||
isBT709: false,
|
||||
bitrate: undefined,
|
||||
};
|
||||
if (!videoStreamLine) return res;
|
||||
|
||||
res.isH264 = videoStreamLine.startsWith("h264 ");
|
||||
res.isBT709 = videoStreamLine.includes("bt709");
|
||||
// The regex matches "\d kb/s", but there can be other units for the
|
||||
// bitrate. However, (a) "kb/s" is the most common for videos out in the
|
||||
// wild, and (b) even if we guess wrong it we'll just do "-v:c x264" instead
|
||||
// of "-v:c copy", so only unnecessary processing but no change in output.
|
||||
const brs = videoBitrateRegex.exec(videoStreamLine)?.at(0);
|
||||
if (brs) {
|
||||
const br = parseInt(brs, 10);
|
||||
if (br) res.bitrate = br;
|
||||
}
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -438,8 +591,8 @@ const detectVideoDimensions = (conversionStderr: string) => {
|
||||
if (videoStreamLine) {
|
||||
const [, ws, hs] = videoDimensionsRegex.exec(videoStreamLine) ?? [];
|
||||
if (ws && hs) {
|
||||
const w = parseInt(ws);
|
||||
const h = parseInt(hs);
|
||||
const w = parseInt(ws, 10);
|
||||
const h = parseInt(hs, 10);
|
||||
if (w && h) {
|
||||
return { width: w, height: h };
|
||||
}
|
||||
@@ -451,13 +604,52 @@ const detectVideoDimensions = (conversionStderr: string) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* We don't have the ffprobe binary at hand, so we make do by grepping the log
|
||||
* output of ffmpeg.
|
||||
* Heuristically detect if the file at given path is a HDR video.
|
||||
*
|
||||
* > [Note: Parsing CLI output might break on ffmpeg updates]
|
||||
* >
|
||||
* > Needless to say, while this works currently, this is liable to break in the
|
||||
* > future. So if something stops working after updating ffmpeg, look here!
|
||||
* This is similar to {@link detectVideoCharacteristics}, and see that
|
||||
* function's documentation for all the caveats. However, this function uses an
|
||||
* allow-list instead, and considers any file with color transfer "smpte2084" or
|
||||
* "arib-std-b67" to be HDR. While this is in some sense a more exact check, it
|
||||
* comes with different caveats:
|
||||
*
|
||||
* - These particular constants are not guaranteed to be correct; these are just
|
||||
* what I saw on the internet as being used / recommended for detecting HDR.
|
||||
*
|
||||
* - Since we don't have ffprobe, we're not checking the color space value
|
||||
* itself but a substring of the stream line in the ffmpeg stderr output.
|
||||
*
|
||||
* In particular, we use this more exact check for places where we have less
|
||||
* leeway. e.g. when generating thumbnails, if we apply the tonemapping to any
|
||||
* non-BT.709 file (as the HLS stream generation does), we start getting the
|
||||
* "code 3074: no path between colorspaces" error during the JPEG conversion
|
||||
* (this is not a problem in the H.264 conversion).
|
||||
*
|
||||
* - See: [Note: Alternative FFmpeg command for HDR videos]
|
||||
* - See: [Note: Tonemapping HDR to HD]
|
||||
*
|
||||
* @param inputFilePath The path to a video file on the user's machine.
|
||||
*
|
||||
* @returns `true` if this file is likely a HDR video. Exceptions are treated as
|
||||
* `false` to make this function safe to invoke without breaking the happy path.
|
||||
*/
|
||||
const isHDRVideo = async (inputFilePath: string) => {
|
||||
try {
|
||||
const videoInfo = await pseudoFFProbeVideo(inputFilePath);
|
||||
const vs = videoStreamLineRegex.exec(videoInfo)?.at(1);
|
||||
if (!vs) return false;
|
||||
return vs.includes("smpte2084") || vs.includes("arib-std-b67");
|
||||
} catch (e) {
|
||||
log.warn(`Could not detect HDR status of ${inputFilePath}`, e);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the stderr of ffmpeg in an attempt to gain information about the video
|
||||
* at the given {@link inputFilePath}.
|
||||
*
|
||||
* We don't have the ffprobe binary at hand, which is why we need to use this
|
||||
* alternative. See: [Note: Parsing CLI output might break on ffmpeg updates]
|
||||
*
|
||||
* @returns the stderr of ffmpeg after running it on the input file. The exact
|
||||
* command we run is:
|
||||
|
||||
@@ -277,11 +277,15 @@ const handleVideoDone = async (token: string) => {
|
||||
* See: [Note: Convert to MP4] for the general architecture of commands that do
|
||||
* renderer <-> main I/O using streams.
|
||||
*
|
||||
* The difference here is that we the conversion generates two streams - one for
|
||||
* the HLS playlist itself, and one for the file containing the encrypted and
|
||||
* transcoded video chunks. The video stream we write to the objectUploadURL
|
||||
* The difference here is that we the conversion generates two streams^ - one
|
||||
* for the HLS playlist itself, and one for the file containing the encrypted
|
||||
* and transcoded video chunks. The video stream we write to the objectUploadURL
|
||||
* (provided via {@link params}), and then we return a JSON object containing
|
||||
* the token for the playlist, and other metadata for use by the renderer.
|
||||
*
|
||||
* ^ if the video doesn't require a stream to be generated (e.g. it is very
|
||||
* small and already uses a compatible codec) then a HTT 204 is returned and
|
||||
* no stream is generated.
|
||||
*/
|
||||
const handleGenerateHLSWrite = async (
|
||||
request: Request,
|
||||
@@ -313,7 +317,7 @@ const handleGenerateHLSWrite = async (
|
||||
} = await makeFileForDataOrStreamOrPathOrZipItem(inputItem);
|
||||
|
||||
const outputFilePathPrefix = await makeTempFilePath();
|
||||
let result: FFmpegGenerateHLSPlaylistAndSegmentsResult;
|
||||
let result: FFmpegGenerateHLSPlaylistAndSegmentsResult | undefined;
|
||||
try {
|
||||
await writeToTemporaryInputFile();
|
||||
|
||||
@@ -322,6 +326,11 @@ const handleGenerateHLSWrite = async (
|
||||
outputFilePathPrefix,
|
||||
);
|
||||
|
||||
if (!result) {
|
||||
// This video doesn't require stream generation.
|
||||
return new Response(null, { status: 204 });
|
||||
}
|
||||
|
||||
const { playlistPath, videoPath } = result;
|
||||
try {
|
||||
await uploadVideoSegments(videoPath, objectUploadURL);
|
||||
|
||||
@@ -66,6 +66,7 @@ import type { IpcRendererEvent } from "electron";
|
||||
import type {
|
||||
AppUpdate,
|
||||
CollectionMapping,
|
||||
FFmpegCommand,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
ZipItem,
|
||||
@@ -201,7 +202,7 @@ const generateImageThumbnail = (
|
||||
);
|
||||
|
||||
const ffmpegExec = (
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
) =>
|
||||
|
||||
@@ -32,3 +32,5 @@ export interface PendingUploads {
|
||||
filePaths: string[];
|
||||
zipItems: ZipItem[];
|
||||
}
|
||||
|
||||
export type FFmpegCommand = string[] | { default: string[]; hdr: string[] };
|
||||
|
||||
@@ -421,6 +421,13 @@ class Configuration {
|
||||
return _preferences.getString(endPointKey) ?? endpoint;
|
||||
}
|
||||
|
||||
// isEnteProduction checks if the current endpoint is the default production
|
||||
// endpoint. This is used to determine if the app is in production mode or
|
||||
// not. The default production endpoint is set in the environment variable
|
||||
bool isEnteProduction() {
|
||||
return getHttpEndpoint() == kDefaultProductionEndpoint;
|
||||
}
|
||||
|
||||
Future<void> setHttpEndpoint(String endpoint) async {
|
||||
await _preferences.setString(endPointKey, endpoint);
|
||||
Bus.instance.fire(EndpointUpdatedEvent());
|
||||
|
||||
@@ -22,6 +22,7 @@ import "package:photos/services/search_service.dart";
|
||||
import "package:photos/ui/home/memories/full_screen_memory.dart";
|
||||
import "package:photos/utils/navigation_util.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
import "package:synchronized/synchronized.dart";
|
||||
|
||||
class MemoriesCacheService {
|
||||
static const _lastMemoriesCacheUpdateTimeKey = "lastMemoriesCacheUpdateTime";
|
||||
@@ -39,7 +40,8 @@ class MemoriesCacheService {
|
||||
|
||||
List<SmartMemory>? _cachedMemories;
|
||||
bool _shouldUpdate = false;
|
||||
bool _isUpdateInProgress = false;
|
||||
|
||||
final _memoriesUpdateLock = Lock();
|
||||
|
||||
MemoriesCacheService(this._prefs) {
|
||||
_logger.fine("MemoriesCacheService constructor");
|
||||
@@ -160,74 +162,67 @@ class MemoriesCacheService {
|
||||
return;
|
||||
}
|
||||
_checkIfTimeToUpdateCache();
|
||||
try {
|
||||
if ((!_shouldUpdate && !forced) || _isUpdateInProgress) {
|
||||
|
||||
return _memoriesUpdateLock.synchronized(() async {
|
||||
if ((!_shouldUpdate && !forced)) {
|
||||
_logger.info(
|
||||
"No update needed (shouldUpdate: $_shouldUpdate, forced: $forced, isUpdateInProgress $_isUpdateInProgress)",
|
||||
"No update needed (shouldUpdate: $_shouldUpdate, forced: $forced)",
|
||||
);
|
||||
if (_isUpdateInProgress) {
|
||||
int waitingTime = 0;
|
||||
while (_isUpdateInProgress && waitingTime < 60) {
|
||||
await Future.delayed(const Duration(seconds: 1));
|
||||
waitingTime++;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
_logger.info(
|
||||
"Updating memories cache (shouldUpdate: $_shouldUpdate, forced: $forced, isUpdateInProgress $_isUpdateInProgress)",
|
||||
"Updating memories cache (shouldUpdate: $_shouldUpdate, forced: $forced)",
|
||||
);
|
||||
_isUpdateInProgress = true;
|
||||
final EnteWatch? w =
|
||||
kDebugMode ? EnteWatch("MemoriesCacheService") : null;
|
||||
w?.start();
|
||||
final oldCache = await _readCacheFromDisk();
|
||||
w?.log("gotten old cache");
|
||||
final MemoriesCache newCache = _processOldCache(oldCache);
|
||||
w?.log("processed old cache");
|
||||
// calculate memories for this period and for the next period
|
||||
final now = DateTime.now();
|
||||
final next = now.add(kMemoriesUpdateFrequency);
|
||||
final nowResult = await smartMemoriesService.calcMemories(now, newCache);
|
||||
if (nowResult.isEmpty) {
|
||||
_cachedMemories = [];
|
||||
_isUpdateInProgress = false;
|
||||
_logger.warning(
|
||||
"No memories found for now, not updating cache and returning early",
|
||||
try {
|
||||
final EnteWatch? w =
|
||||
kDebugMode ? EnteWatch("MemoriesCacheService") : null;
|
||||
w?.start();
|
||||
final oldCache = await _readCacheFromDisk();
|
||||
w?.log("gotten old cache");
|
||||
final MemoriesCache newCache = _processOldCache(oldCache);
|
||||
w?.log("processed old cache");
|
||||
// calculate memories for this period and for the next period
|
||||
final now = DateTime.now();
|
||||
final next = now.add(kMemoriesUpdateFrequency);
|
||||
final nowResult =
|
||||
await smartMemoriesService.calcMemories(now, newCache);
|
||||
if (nowResult.isEmpty) {
|
||||
_cachedMemories = [];
|
||||
_logger.warning(
|
||||
"No memories found for now, not updating cache and returning early",
|
||||
);
|
||||
return;
|
||||
}
|
||||
final nextResult =
|
||||
await smartMemoriesService.calcMemories(next, newCache);
|
||||
w?.log("calculated new memories");
|
||||
for (final nowMemory in nowResult.memories) {
|
||||
newCache.toShowMemories
|
||||
.add(ToShowMemory.fromSmartMemory(nowMemory, now));
|
||||
}
|
||||
for (final nextMemory in nextResult.memories) {
|
||||
newCache.toShowMemories
|
||||
.add(ToShowMemory.fromSmartMemory(nextMemory, next));
|
||||
}
|
||||
newCache.baseLocations.addAll(nowResult.baseLocations);
|
||||
w?.log("added memories to cache");
|
||||
final file = File(await _getCachePath());
|
||||
if (!file.existsSync()) {
|
||||
file.createSync(recursive: true);
|
||||
}
|
||||
_cachedMemories = nowResult.memories
|
||||
.where((memory) => memory.shouldShowNow())
|
||||
.toList();
|
||||
locationService.baseLocations = nowResult.baseLocations;
|
||||
await file.writeAsBytes(
|
||||
MemoriesCache.encodeToJsonString(newCache).codeUnits,
|
||||
);
|
||||
return;
|
||||
w?.log("cacheWritten");
|
||||
await _cacheUpdated();
|
||||
w?.logAndReset('_cacheUpdated method done');
|
||||
} catch (e, s) {
|
||||
_logger.info("Error updating memories cache", e, s);
|
||||
}
|
||||
final nextResult =
|
||||
await smartMemoriesService.calcMemories(next, newCache);
|
||||
w?.log("calculated new memories");
|
||||
for (final nowMemory in nowResult.memories) {
|
||||
newCache.toShowMemories
|
||||
.add(ToShowMemory.fromSmartMemory(nowMemory, now));
|
||||
}
|
||||
for (final nextMemory in nextResult.memories) {
|
||||
newCache.toShowMemories
|
||||
.add(ToShowMemory.fromSmartMemory(nextMemory, next));
|
||||
}
|
||||
newCache.baseLocations.addAll(nowResult.baseLocations);
|
||||
w?.log("added memories to cache");
|
||||
final file = File(await _getCachePath());
|
||||
if (!file.existsSync()) {
|
||||
file.createSync(recursive: true);
|
||||
}
|
||||
_cachedMemories =
|
||||
nowResult.memories.where((memory) => memory.shouldShowNow()).toList();
|
||||
locationService.baseLocations = nowResult.baseLocations;
|
||||
await file.writeAsBytes(
|
||||
MemoriesCache.encodeToJsonString(newCache).codeUnits,
|
||||
);
|
||||
w?.log("cacheWritten");
|
||||
await _cacheUpdated();
|
||||
w?.logAndReset('_cacheUpdated method done');
|
||||
} catch (e, s) {
|
||||
_logger.info("Error updating memories cache", e, s);
|
||||
} finally {
|
||||
_isUpdateInProgress = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// WARNING: Use for testing only, TODO: lau: remove later
|
||||
|
||||
@@ -355,6 +355,8 @@ class PreviewVideoStore {
|
||||
collectionID: enteFile.collectionID ?? 0,
|
||||
);
|
||||
_removeFromLocks(enteFile).ignore();
|
||||
Directory(prefix).delete(recursive: true).ignore();
|
||||
|
||||
Bus.instance.fire(PreviewUpdatedEvent(_items));
|
||||
}
|
||||
} finally {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import 'package:flutter/material.dart';
|
||||
import "package:photos/core/configuration.dart";
|
||||
import "package:photos/core/constants.dart";
|
||||
import "package:photos/generated/l10n.dart";
|
||||
|
||||
class DeveloperSettingsWidget extends StatelessWidget {
|
||||
@@ -8,8 +7,8 @@ class DeveloperSettingsWidget extends StatelessWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final endpoint = Configuration.instance.getHttpEndpoint();
|
||||
if (endpoint != kDefaultProductionEndpoint) {
|
||||
if (!Configuration.instance.isEnteProduction()) {
|
||||
final endpoint = Configuration.instance.getHttpEndpoint();
|
||||
final endpointURI = Uri.parse(endpoint);
|
||||
return Padding(
|
||||
padding: const EdgeInsets.only(bottom: 20),
|
||||
|
||||
@@ -0,0 +1,129 @@
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:photos/theme/ente_theme.dart';
|
||||
import 'package:photos/ui/components/captioned_text_widget.dart';
|
||||
import 'package:photos/ui/components/menu_item_widget/menu_item_widget.dart';
|
||||
import 'package:photos/utils/standalone/data.dart';
|
||||
import 'package:photos/utils/standalone/directory_content.dart';
|
||||
|
||||
class PathInfoStorageItem {
|
||||
final String path;
|
||||
final String title;
|
||||
final bool allowCacheClear;
|
||||
final String match;
|
||||
|
||||
PathInfoStorageItem.name(
|
||||
this.path,
|
||||
this.title,
|
||||
this.match, {
|
||||
this.allowCacheClear = false,
|
||||
});
|
||||
}
|
||||
|
||||
class PathInfoStorageViewer extends StatefulWidget {
|
||||
final PathInfoStorageItem item;
|
||||
final bool removeTopRadius;
|
||||
final bool removeBottomRadius;
|
||||
final bool enableDoubleTapClear;
|
||||
|
||||
const PathInfoStorageViewer(
|
||||
this.item, {
|
||||
this.removeTopRadius = false,
|
||||
this.removeBottomRadius = false,
|
||||
this.enableDoubleTapClear = false,
|
||||
super.key,
|
||||
});
|
||||
|
||||
@override
|
||||
State<PathInfoStorageViewer> createState() => _PathInfoStorageViewerState();
|
||||
}
|
||||
|
||||
class _PathInfoStorageViewerState extends State<PathInfoStorageViewer> {
|
||||
final Logger _logger = Logger((_PathInfoStorageViewerState).toString());
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
}
|
||||
|
||||
void _safeRefresh() async {
|
||||
if (mounted) {
|
||||
setState(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return FutureBuilder<DirectoryStat>(
|
||||
future: getDirectoryStat(
|
||||
Directory(widget.item.path),
|
||||
prefix: widget.item.match,
|
||||
),
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
return _buildMenuItemWidget(snapshot.data, null);
|
||||
} else if (snapshot.hasError) {
|
||||
_logger.severe(
|
||||
"Failed to get state for ${widget.item.title}",
|
||||
snapshot.error,
|
||||
);
|
||||
return _buildMenuItemWidget(null, snapshot.error);
|
||||
} else {
|
||||
return _buildMenuItemWidget(null, null);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Widget _buildMenuItemWidget(DirectoryStat? stat, Object? err) {
|
||||
return MenuItemWidget(
|
||||
key: UniqueKey(),
|
||||
alignCaptionedTextToLeft: true,
|
||||
captionedTextWidget: CaptionedTextWidget(
|
||||
title: widget.item.title,
|
||||
subTitle: stat != null ? '${stat.fileCount}' : null,
|
||||
subTitleColor: getEnteColorScheme(context).textFaint,
|
||||
),
|
||||
trailingWidget: stat != null
|
||||
? Padding(
|
||||
padding: const EdgeInsets.only(left: 12.0),
|
||||
child: Text(
|
||||
formatBytes(stat.size),
|
||||
style: getEnteTextTheme(context)
|
||||
.small
|
||||
.copyWith(color: getEnteColorScheme(context).textFaint),
|
||||
),
|
||||
)
|
||||
: SizedBox.fromSize(
|
||||
size: const Size.square(14),
|
||||
child: CircularProgressIndicator(
|
||||
strokeWidth: 2,
|
||||
color: getEnteColorScheme(context).strokeMuted,
|
||||
),
|
||||
),
|
||||
trailingIcon: err != null ? Icons.error_outline_outlined : null,
|
||||
trailingIconIsMuted: err != null,
|
||||
singleBorderRadius: 8,
|
||||
menuItemColor: getEnteColorScheme(context).fillFaint,
|
||||
isBottomBorderRadiusRemoved: widget.removeBottomRadius,
|
||||
isTopBorderRadiusRemoved: widget.removeTopRadius,
|
||||
showOnlyLoadingState: true,
|
||||
onTap: () async {
|
||||
if (kDebugMode) {
|
||||
await Clipboard.setData(ClipboardData(text: widget.item.path));
|
||||
debugPrint(widget.item.path);
|
||||
}
|
||||
},
|
||||
onDoubleTap: () async {
|
||||
if (widget.item.allowCacheClear && widget.enableDoubleTapClear) {
|
||||
await deleteDirectoryContents(widget.item.path);
|
||||
_safeRefresh();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,167 @@
|
||||
import "package:flutter/material.dart";
|
||||
import "package:photos/core/configuration.dart";
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/ui/components/buttons/icon_button_widget.dart";
|
||||
import "package:photos/ui/components/menu_section_title.dart";
|
||||
import "package:photos/ui/components/title_bar_title_widget.dart";
|
||||
import "package:photos/ui/components/title_bar_widget.dart";
|
||||
import "package:photos/ui/settings/pending_sync/path_info_storage_viewer.dart";
|
||||
|
||||
// Preview Video related items -> pv
|
||||
// final String tempDir = Configuration.instance.getTempDirectory();
|
||||
// final String prefix = "${tempDir}_${enteFile.uploadedFileID}_${newID("pv")}";
|
||||
//
|
||||
// Recovery Key -> ente-recovery-key.txt
|
||||
// Configuration.instance.getTempDirectory() + "ente-recovery-key.txt",
|
||||
//
|
||||
// Encrypted files (upload), decrypted files (download) -> .encrypted & .decrypted
|
||||
// final String tempDir = Configuration.instance.getTempDirectory();
|
||||
// final String encryptedFilePath = "$tempDir${file.uploadedFileID}.encrypted";
|
||||
// final String decryptedFilePath = "$tempDir${file.uploadedFileID}.decrypted";
|
||||
//
|
||||
// Live photo compressed version -> .elp
|
||||
// final livePhotoPath = tempPath + uniqueId + "_${file.generatedID}.elp";
|
||||
//
|
||||
// Explicit uploads -> _file.encrpyted & _thumb.encrypted
|
||||
// final encryptedFilePath = multipartEntryExists
|
||||
// ? '$tempDirectory$existingMultipartEncFileName'
|
||||
// : '$tempDirectory$uploadTempFilePrefix${uniqueID}_file.encrypted';
|
||||
// final encryptedThumbnailPath =
|
||||
// '$tempDirectory$uploadTempFilePrefix${uniqueID}_thumb.encrypted';
|
||||
|
||||
class PendingSyncInfoScreen extends StatefulWidget {
|
||||
const PendingSyncInfoScreen({super.key});
|
||||
|
||||
@override
|
||||
State<PendingSyncInfoScreen> createState() => _PendingSyncInfoScreenState();
|
||||
}
|
||||
|
||||
class _PendingSyncInfoScreenState extends State<PendingSyncInfoScreen> {
|
||||
final List<PathInfoStorageItem> paths = [];
|
||||
late bool internalUser;
|
||||
final int _refreshCounterKey = 0;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
internalUser = flagService.internalUser;
|
||||
addPath();
|
||||
}
|
||||
|
||||
void addPath() async {
|
||||
final String tempDownload = Configuration.instance.getTempDirectory();
|
||||
paths.addAll([
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Encrypted Upload (File)",
|
||||
"_file.encrypted",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Encrypted Upload (Thumb)",
|
||||
"_thumb.encrypted",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Live photo",
|
||||
".elp",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Encrypted Data",
|
||||
".encrypted",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Decrypted Data",
|
||||
".decrypted",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
PathInfoStorageItem.name(
|
||||
tempDownload,
|
||||
"Video Preview",
|
||||
"pv",
|
||||
allowCacheClear: false,
|
||||
),
|
||||
]);
|
||||
if (mounted) {
|
||||
setState(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Scaffold(
|
||||
body: CustomScrollView(
|
||||
primary: false,
|
||||
slivers: <Widget>[
|
||||
TitleBarWidget(
|
||||
flexibleSpaceTitle: const TitleBarTitleWidget(title: "App Temp"),
|
||||
actionIcons: [
|
||||
IconButtonWidget(
|
||||
icon: Icons.close_outlined,
|
||||
iconButtonType: IconButtonType.secondary,
|
||||
onTap: () {
|
||||
Navigator.pop(context);
|
||||
if (Navigator.canPop(context)) {
|
||||
Navigator.pop(context);
|
||||
}
|
||||
if (Navigator.canPop(context)) {
|
||||
Navigator.pop(context);
|
||||
}
|
||||
},
|
||||
),
|
||||
],
|
||||
),
|
||||
SliverList(
|
||||
delegate: SliverChildBuilderDelegate(
|
||||
(context, index) {
|
||||
return Padding(
|
||||
padding: const EdgeInsets.symmetric(horizontal: 16),
|
||||
child: Column(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
children: [
|
||||
Column(
|
||||
children: [
|
||||
MenuSectionTitle(
|
||||
title: S.of(context).cachedData,
|
||||
),
|
||||
ListView.builder(
|
||||
shrinkWrap: true,
|
||||
padding: const EdgeInsets.all(0),
|
||||
physics: const ScrollPhysics(),
|
||||
// to disable GridView's scrolling
|
||||
itemBuilder: (context, index) {
|
||||
final path = paths[index];
|
||||
return PathInfoStorageViewer(
|
||||
path,
|
||||
removeTopRadius: index > 0,
|
||||
removeBottomRadius: index < paths.length - 1,
|
||||
enableDoubleTapClear: internalUser,
|
||||
key: ValueKey("$index-$_refreshCounterKey"),
|
||||
);
|
||||
},
|
||||
itemCount: paths.length,
|
||||
),
|
||||
const SizedBox(
|
||||
height: 24,
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
},
|
||||
childCount: 1,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -86,8 +86,10 @@ class SettingsPage extends StatelessWidget {
|
||||
|
||||
contents.add(const SizedBox(height: 8));
|
||||
if (hasLoggedIn) {
|
||||
// show banner in debug mode and ente production
|
||||
final showStorageBonusBanner =
|
||||
storageBonusService.shouldShowStorageBonus();
|
||||
storageBonusService.shouldShowStorageBonus() &&
|
||||
(kDebugMode || Configuration.instance.isEnteProduction());
|
||||
contents.addAll([
|
||||
const StorageCardWidget(),
|
||||
(showStorageBonusBanner)
|
||||
|
||||
@@ -15,7 +15,6 @@ import 'package:modal_bottom_sheet/modal_bottom_sheet.dart';
|
||||
import "package:move_to_background/move_to_background.dart";
|
||||
import "package:package_info_plus/package_info_plus.dart";
|
||||
import 'package:photos/core/configuration.dart';
|
||||
import "package:photos/core/constants.dart";
|
||||
import 'package:photos/core/event_bus.dart';
|
||||
import 'package:photos/ente_theme_data.dart';
|
||||
import 'package:photos/events/account_configured_event.dart';
|
||||
@@ -255,8 +254,7 @@ class _HomeWidgetState extends State<HomeWidget> {
|
||||
if (Platform.isAndroid &&
|
||||
!localSettings.hasConfiguredInAppLinkPermissions() &&
|
||||
RemoteSyncService.instance.isFirstRemoteSyncDone() &&
|
||||
Configuration.instance.getHttpEndpoint() ==
|
||||
kDefaultProductionEndpoint) {
|
||||
Configuration.instance.isEnteProduction()) {
|
||||
PackageInfo.fromPlatform().then((packageInfo) {
|
||||
final packageName = packageInfo.packageName;
|
||||
if (packageName == 'io.ente.photos.independent' ||
|
||||
|
||||
@@ -4,9 +4,11 @@ import 'package:flutter/foundation.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import 'package:photos/theme/ente_theme.dart';
|
||||
import 'package:photos/ui/components/captioned_text_widget.dart';
|
||||
import 'package:photos/ui/components/menu_item_widget/menu_item_widget.dart';
|
||||
import "package:photos/ui/settings/pending_sync/pending_sync_info_screen.dart";
|
||||
import 'package:photos/utils/standalone/data.dart';
|
||||
import 'package:photos/utils/standalone/directory_content.dart';
|
||||
|
||||
@@ -119,6 +121,15 @@ class _PathStorageViewerState extends State<PathStorageViewer> {
|
||||
_safeRefresh();
|
||||
}
|
||||
},
|
||||
onLongPress: () async {
|
||||
if (widget.item.title == S.of(context).pendingSync) {
|
||||
await Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => const PendingSyncInfoScreen(),
|
||||
),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ class _FileSelectionActionsWidgetState
|
||||
SelectionActionButton(
|
||||
icon: Icons.remove_circle_outline,
|
||||
labelText: S.of(context).notPersonLabel(widget.person!.data.name),
|
||||
onTap: anyUploadedFiles ? _onNotpersonClicked : null,
|
||||
onTap: _onNotpersonClicked,
|
||||
),
|
||||
);
|
||||
if (ownedFilesCount == 1) {
|
||||
@@ -195,7 +195,7 @@ class _FileSelectionActionsWidgetState
|
||||
SelectionActionButton(
|
||||
labelText: S.of(context).notThisPerson,
|
||||
icon: Icons.remove_circle_outline,
|
||||
onTap: anyUploadedFiles ? _onRemoveFromClusterClicked : null,
|
||||
onTap: _onRemoveFromClusterClicked,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -81,7 +81,10 @@ void _buildPrettyString(
|
||||
}
|
||||
}
|
||||
|
||||
Future<DirectoryStat> getDirectoryStat(Directory directory) async {
|
||||
Future<DirectoryStat> getDirectoryStat(
|
||||
Directory directory, {
|
||||
String? prefix,
|
||||
}) async {
|
||||
int size = 0;
|
||||
final List<DirectoryStat> subDirectories = [];
|
||||
final Map<String, int> fileNameToSize = {};
|
||||
@@ -89,6 +92,10 @@ Future<DirectoryStat> getDirectoryStat(Directory directory) async {
|
||||
if (await directory.exists()) {
|
||||
final List<FileSystemEntity> entities = directory.listSync();
|
||||
for (FileSystemEntity entity in entities) {
|
||||
if (prefix != null && !entity.path.contains(prefix)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entity is File) {
|
||||
final int fileSize = await File(entity.path).length();
|
||||
size += fileSize;
|
||||
|
||||
@@ -359,7 +359,7 @@ export interface Electron {
|
||||
* (specified as {@link outputPathPlaceholder} in {@link command}).
|
||||
*/
|
||||
ffmpegExec: (
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
) => Promise<Uint8Array>;
|
||||
@@ -740,3 +740,15 @@ export interface PendingUploads {
|
||||
*/
|
||||
zipItems: ZipItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* A command that we can ask FFmpeg to run for us.
|
||||
*
|
||||
* [Note: Alternative FFmpeg command for HDR videos]
|
||||
*
|
||||
* Usually, this is a single command (specified as an array of strings
|
||||
* containing the "words" of the command). However, we can also provide two
|
||||
* alternative commands - one to run when the input is (heuristically) detected
|
||||
* as a HDR video, and one otherwise.
|
||||
*/
|
||||
export type FFmpegCommand = string[] | { default: string[]; hdr: string[] };
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
parseMetadataDate,
|
||||
type ParsedMetadata,
|
||||
} from "ente-media/file-metadata";
|
||||
import { settingsSnapshot } from "ente-new/photos/services/settings";
|
||||
import {
|
||||
ffmpegPathPlaceholder,
|
||||
inputPathPlaceholder,
|
||||
@@ -37,7 +38,14 @@ import { ffmpegExecWeb } from "./web";
|
||||
*/
|
||||
export const generateVideoThumbnailWeb = async (blob: Blob) =>
|
||||
_generateVideoThumbnail((seekTime: number) =>
|
||||
ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg"),
|
||||
ffmpegExecWeb(
|
||||
// TODO(HLS): Enable for all
|
||||
settingsSnapshot().isInternalUser
|
||||
? makeGenThumbnailCommand(seekTime)
|
||||
: _makeGenThumbnailCommand(seekTime, false),
|
||||
blob,
|
||||
"jpeg",
|
||||
),
|
||||
);
|
||||
|
||||
const _generateVideoThumbnail = async (
|
||||
@@ -79,16 +87,39 @@ export const generateVideoThumbnailNative = async (
|
||||
),
|
||||
);
|
||||
|
||||
const makeGenThumbnailCommand = (seekTime: number) => [
|
||||
const makeGenThumbnailCommand = (seekTime: number) => ({
|
||||
default: _makeGenThumbnailCommand(seekTime, false),
|
||||
hdr: _makeGenThumbnailCommand(seekTime, true),
|
||||
});
|
||||
|
||||
const _makeGenThumbnailCommand = (seekTime: number, forHDR: boolean) => [
|
||||
ffmpegPathPlaceholder,
|
||||
"-i",
|
||||
inputPathPlaceholder,
|
||||
// Seek to seekTime in the video.
|
||||
"-ss",
|
||||
`00:00:0${seekTime}`,
|
||||
// Take the first frame
|
||||
"-vframes",
|
||||
"1",
|
||||
// Apply a filter to this frame
|
||||
"-vf",
|
||||
"scale=-1:720",
|
||||
[
|
||||
// Scale it to a maximum height of 720 keeping aspect ratio, ensuring
|
||||
// that the dimensions are even (subsequent filters require this).
|
||||
"scale=-2:720",
|
||||
forHDR
|
||||
? // Apply a tonemap to ensure that thumbnails of HDR videos do
|
||||
// not look washed out. See: [Note: Tonemapping HDR to HD].
|
||||
[
|
||||
"zscale=transfer=linear",
|
||||
"tonemap=tonemap=hable:desat=0",
|
||||
"zscale=primaries=709:transfer=709:matrix=709",
|
||||
]
|
||||
: [],
|
||||
]
|
||||
.flat()
|
||||
.join(","),
|
||||
outputPathPlaceholder,
|
||||
];
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { FFmpeg } from "@ffmpeg/ffmpeg";
|
||||
import { newID } from "ente-base/id";
|
||||
import log from "ente-base/log";
|
||||
import type { FFmpegCommand } from "ente-base/types/ipc";
|
||||
import { PromiseQueue } from "ente-utils/promise";
|
||||
import z from "zod";
|
||||
import {
|
||||
ffmpegPathPlaceholder,
|
||||
inputPathPlaceholder,
|
||||
@@ -51,7 +53,7 @@ const createFFmpeg = async () => {
|
||||
* {@link command} on {@link blob}.
|
||||
*/
|
||||
export const ffmpegExecWeb = async (
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
blob: Blob,
|
||||
outputFileExtension: string,
|
||||
): Promise<Uint8Array> => {
|
||||
@@ -68,7 +70,7 @@ export const ffmpegExecWeb = async (
|
||||
|
||||
const ffmpegExec = async (
|
||||
ffmpeg: FFmpeg,
|
||||
command: string[],
|
||||
command: FFmpegCommand,
|
||||
outputFileExtension: string,
|
||||
blob: Blob,
|
||||
) => {
|
||||
@@ -76,8 +78,6 @@ const ffmpegExec = async (
|
||||
const outputSuffix = outputFileExtension ? "." + outputFileExtension : "";
|
||||
const outputPath = newID("out_") + outputSuffix;
|
||||
|
||||
const cmd = substitutePlaceholders(command, inputPath, outputPath);
|
||||
|
||||
const inputData = new Uint8Array(await blob.arrayBuffer());
|
||||
|
||||
// Exit status of the ffmpeg.exec invocation.
|
||||
@@ -89,6 +89,21 @@ const ffmpegExec = async (
|
||||
|
||||
await ffmpeg.writeFile(inputPath, inputData);
|
||||
|
||||
let resolvedCommand: string[];
|
||||
if (Array.isArray(command)) {
|
||||
resolvedCommand = command;
|
||||
} else {
|
||||
const isHDR = await isHDRVideo(ffmpeg, inputPath);
|
||||
log.debug(() => `[wasm] input file is ${isHDR ? "" : "not "}HDR`);
|
||||
resolvedCommand = isHDR ? command.hdr : command.default;
|
||||
}
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
resolvedCommand,
|
||||
inputPath,
|
||||
outputPath,
|
||||
);
|
||||
|
||||
status = await ffmpeg.exec(cmd);
|
||||
if (status !== 0) {
|
||||
log.info(
|
||||
@@ -139,3 +154,98 @@ const substitutePlaceholders = (
|
||||
}
|
||||
})
|
||||
.filter((s) => s !== undefined);
|
||||
|
||||
const isHDRVideoFFProbeOutput = z.object({
|
||||
streams: z.array(z.object({ color_transfer: z.string().optional() })),
|
||||
});
|
||||
|
||||
/**
|
||||
* A variant of the {@link isHDRVideo} function in the desktop app source (see
|
||||
* `ffmpeg.ts`), except here we have access to ffprobe and can use that instead
|
||||
* of parsing the ffmpeg stderr.
|
||||
*
|
||||
* See: [Note: Alternative FFmpeg command for HDR videos]
|
||||
*
|
||||
* @param inputFilePath The path to a video file on the FFmpeg FS.
|
||||
*
|
||||
* @returns `true` if this file is likely a HDR video. Exceptions are treated as
|
||||
* `false` to make this function safe to invoke without breaking the happy path.
|
||||
*/
|
||||
const isHDRVideo = async (ffmpeg: FFmpeg, inputFilePath: string) => {
|
||||
try {
|
||||
const jsonString = await ffprobeOutput(
|
||||
ffmpeg,
|
||||
[
|
||||
["-i", inputFilePath],
|
||||
// Show information about streams.
|
||||
"-show_streams",
|
||||
// Select the first video stream. This is not necessarily
|
||||
// correct in a multi stream file because the ffmpeg automatic
|
||||
// mapping will use the highest resolution stream, but short of
|
||||
// reinventing ffmpeg's resolution mechanism, it is a reasonable
|
||||
// assumption for our current, heuristic, check.
|
||||
["-select_streams", "v:0"],
|
||||
// Output JSON
|
||||
["-of", "json"],
|
||||
["-o", "output.json"],
|
||||
].flat(),
|
||||
"output.json",
|
||||
);
|
||||
|
||||
const output = isHDRVideoFFProbeOutput.parse(JSON.parse(jsonString));
|
||||
switch (output.streams[0]?.color_transfer) {
|
||||
case "smpte2084":
|
||||
case "arib-std-b67":
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
} catch (e) {
|
||||
log.warn(`Could not detect HDR status of ${inputFilePath}`, e);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the textual output produced by verbatim invoking the given ffprobe
|
||||
* {@link cmd} that is expected to writes to a file named {@link outputFilePath}
|
||||
* in the FFmpeg FS.
|
||||
*
|
||||
* The file generated at {@link outputFilePath} is removed in all cases.
|
||||
*/
|
||||
const ffprobeOutput = async (
|
||||
ffmpeg: FFmpeg,
|
||||
cmd: string[],
|
||||
outputPath: string,
|
||||
) => {
|
||||
// Exit status of the ffmpeg.ffprobe invocation.
|
||||
// `0` if no error, `!= 0` if timeout (1) or error.
|
||||
let status: number | undefined;
|
||||
|
||||
try {
|
||||
status = await ffmpeg.ffprobe(cmd);
|
||||
// Currently, ffprobe incorrectly returns status -1 on success.
|
||||
// https://github.com/ffmpegwasm/ffmpeg.wasm/issues/817
|
||||
if (status !== 0 && status != -1) {
|
||||
log.info(
|
||||
`[wasm] ffprobe command failed with exit code ${status}: ${cmd.join(" ")}`,
|
||||
);
|
||||
throw new Error(`ffprobe command failed with exit code ${status}`);
|
||||
}
|
||||
|
||||
const result = await ffmpeg.readFile(outputPath, "utf8");
|
||||
if (typeof result != "string") throw new Error("Expected text data");
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
try {
|
||||
await ffmpeg.deleteFile(outputPath);
|
||||
} catch (e) {
|
||||
// Output file might not even exist if the command did not succeed,
|
||||
// so only log on success.
|
||||
if (status !== 0 && status != -1) {
|
||||
log.error(`Failed to remove output ${outputPath}`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import log from "ente-base/log";
|
||||
import { customAPIOrigin } from "ente-base/origins";
|
||||
import type { ZipItem } from "ente-base/types/ipc";
|
||||
import type { Electron, ZipItem } from "ente-base/types/ipc";
|
||||
import { nullToUndefined } from "ente-utils/transform";
|
||||
import { z } from "zod";
|
||||
|
||||
@@ -32,27 +32,31 @@ export const resetUploadState = () => {
|
||||
/**
|
||||
* An item to upload is one of the following:
|
||||
*
|
||||
* 1. A file drag-and-dropped or selected by the user when we are running in the
|
||||
* web browser. These is the {@link File} case.
|
||||
* 1. [web] A file drag-and-dropped or selected by the user when we are running
|
||||
* in the web browser. These is the {@link File} case.
|
||||
*
|
||||
* 2. A file drag-and-dropped or selected by the user when we are running in the
|
||||
* context of our desktop app. In such cases, we also have the absolute path
|
||||
* of the file in the user's local file system. This is the
|
||||
* 2. [desktop] A file drag-and-dropped or selected by the user when we are
|
||||
* running in the context of our desktop app. In such cases, we also have the
|
||||
* absolute path of the file in the user's local file system. This is the
|
||||
* {@link FileAndPath} case.
|
||||
*
|
||||
* 3. A file path programmatically requested by the desktop app. For example, we
|
||||
* might be resuming a previously interrupted upload after an app restart
|
||||
* (thus we no longer have access to the {@link File} from case 2). Or we
|
||||
* could be uploading a file this is in one of the folders the user has asked
|
||||
* us to watch for changes. This is the `string` case.
|
||||
* 3. [desktop] A file path programmatically requested by the desktop app. For
|
||||
* example, we might be resuming a previously interrupted upload after an app
|
||||
* restart (thus we no longer have access to the {@link File} from case 2).
|
||||
* Or we could be uploading a file this is in one of the folders the user has
|
||||
* asked us to watch for changes. This is the `string` case.
|
||||
*
|
||||
* 4. A file within a zip file on the user's local file system. This too is only
|
||||
* possible when we are running in the context of our desktop app. The user
|
||||
* might have drag-and-dropped or selected a zip file, or it might be a zip
|
||||
* file that they'd previously selected but we now are resuming an
|
||||
* interrupted upload of. Either ways, what we have is a tuple containing the
|
||||
* (path to zip file, and the name of an entry within that zip file). This is
|
||||
* the {@link ZipItem} case.
|
||||
* 4. [desktop] A file within a zip file on the user's local file system. This
|
||||
* too is only possible when we are running in the context of our desktop
|
||||
* app. The user might have drag-and-dropped or selected a zip file, or it
|
||||
* might be a zip file that they'd previously selected but we now are
|
||||
* resuming an interrupted upload of. Either ways, what we have is a tuple
|
||||
* containing the (path to zip file, and the name of an entry within that zip
|
||||
* file). This is the {@link ZipItem} case.
|
||||
*
|
||||
* Only case 1 is possible when we're running in the web app.
|
||||
*
|
||||
* Only case 2, 3 and 4 are possible when we're running in the desktop app.
|
||||
*
|
||||
* Also see: [Note: Reading a UploadItem].
|
||||
*/
|
||||
@@ -75,6 +79,30 @@ export interface FileAndPath {
|
||||
*/
|
||||
export type DesktopUploadItem = Exclude<UploadItem, File>;
|
||||
|
||||
/**
|
||||
* Assert that the given {@link UploadItem} is, in fact, an {@link DesktopUploadItem}.
|
||||
*
|
||||
* @param electron A witness to the fact that we're running in the context of
|
||||
* the desktop app. The electron instance is not actually used by this function.
|
||||
*
|
||||
* @param uploadItem The upload item we obtained from an arbitrary place in the app.
|
||||
*
|
||||
* @returns The same {@link uploadItem}, but after excluding the cases that can
|
||||
* only happen when running in the web app.
|
||||
*/
|
||||
export const toDesktopUploadItem = (
|
||||
electron: Electron,
|
||||
uploadItem: UploadItem,
|
||||
): DesktopUploadItem => {
|
||||
if (uploadItem instanceof File) {
|
||||
log.info(`Invalid upload item (electron: ${!!electron})`, uploadItem);
|
||||
throw new Error(
|
||||
"Found a File upload item even though we're running in the desktop app",
|
||||
);
|
||||
}
|
||||
return uploadItem;
|
||||
};
|
||||
|
||||
/**
|
||||
* For each of cases of {@link UploadItem} that apply when we're running in the
|
||||
* context of our desktop app, return a value that can be passed to
|
||||
|
||||
@@ -21,7 +21,11 @@ import {
|
||||
getFilePreviewDataUploadURL,
|
||||
putVideoData,
|
||||
} from "./file-data";
|
||||
import type { UploadItem } from "./upload";
|
||||
import {
|
||||
toDesktopUploadItem,
|
||||
type DesktopUploadItem,
|
||||
type UploadItem,
|
||||
} from "./upload";
|
||||
|
||||
interface VideoProcessingQueueItem {
|
||||
/**
|
||||
@@ -30,12 +34,13 @@ interface VideoProcessingQueueItem {
|
||||
*/
|
||||
file: EnteFile;
|
||||
/**
|
||||
* The {@link UploadItem} if available for the newly uploaded {@link file}.
|
||||
* The {@link DesktopUploadItem} if available for the newly uploaded
|
||||
* {@link file}.
|
||||
*
|
||||
* If present, this serves as an optimization allowing us to directly read
|
||||
* the file off the user's filesystem.
|
||||
*/
|
||||
uploadItem: UploadItem | undefined;
|
||||
uploadItem: DesktopUploadItem | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -325,13 +330,15 @@ export const processVideoNewUpload = (
|
||||
// TODO(HLS):
|
||||
if (!isVideoProcessingEnabled()) return;
|
||||
if (file.metadata.fileType !== FileType.video) return;
|
||||
|
||||
const electron = globalThis.electron;
|
||||
if (!electron) {
|
||||
// Processing very large videos with the current ffmpeg Wasm
|
||||
// implementation can cause the app to crash, esp. on mobile devices
|
||||
// (e.g. https://github.com/ffmpegwasm/ffmpeg.wasm/issues/851).
|
||||
//
|
||||
// So the video processing only happpens in the desktop app, which uses
|
||||
// the much more efficient native ffmpeg integration.
|
||||
// the much more efficient native FFmpeg integration.
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -344,7 +351,10 @@ export const processVideoNewUpload = (
|
||||
}
|
||||
|
||||
// Enqueue the item.
|
||||
_state.videoProcessingQueue.push({ file, uploadItem });
|
||||
_state.videoProcessingQueue.push({
|
||||
file,
|
||||
uploadItem: toDesktopUploadItem(electron, uploadItem),
|
||||
});
|
||||
|
||||
// Tickle the processor if it isn't already running.
|
||||
_state.queueProcessor ??= processQueue(electron);
|
||||
@@ -420,12 +430,18 @@ const processQueueItem = async (
|
||||
|
||||
log.info(`Generate HLS for ${fileLogID(file)} | start`);
|
||||
|
||||
const { playlistToken, dimensions, videoSize } = await initiateGenerateHLS(
|
||||
const res = await initiateGenerateHLS(
|
||||
electron,
|
||||
sourceVideo!,
|
||||
objectUploadURL,
|
||||
);
|
||||
|
||||
if (!res) {
|
||||
log.info(`Generate HLS for ${fileLogID(file)} | not-required`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { playlistToken, dimensions, videoSize } = res;
|
||||
try {
|
||||
const playlist = await readVideoStream(electron, playlistToken).then(
|
||||
(res) => res.text(),
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
import type { Electron, ElectronMLWorker, ZipItem } from "ente-base/types/ipc";
|
||||
import { z } from "zod";
|
||||
import type { UploadItem } from "../services/upload";
|
||||
import type { DesktopUploadItem } from "../services/upload";
|
||||
|
||||
/**
|
||||
* Stream the given file or zip entry from the user's local file system.
|
||||
@@ -188,15 +188,19 @@ export type GenerateHLSResult = z.infer<typeof GenerateHLSResult>;
|
||||
*
|
||||
* @returns a token that can be used to retrieve the generated HLS playlist, and
|
||||
* metadata about the generated video (its byte size and dimensions). See {@link
|
||||
* GenerateHLSResult.
|
||||
* GenerateHLSResult}.
|
||||
*
|
||||
* In case the video is such that it doesn't require a separate stream to be
|
||||
* generated (e.g. it is a small video using an already compatible codec), then
|
||||
* this function will return `undefined`.
|
||||
*
|
||||
* See: [Note: Preview variant of videos].
|
||||
*/
|
||||
export const initiateGenerateHLS = async (
|
||||
_: Electron,
|
||||
video: UploadItem | ReadableStream,
|
||||
video: DesktopUploadItem | ReadableStream,
|
||||
objectUploadURL: string,
|
||||
): Promise<GenerateHLSResult> => {
|
||||
): Promise<GenerateHLSResult | undefined> => {
|
||||
const params = new URLSearchParams({ op: "generate-hls", objectUploadURL });
|
||||
|
||||
let body: ReadableStream | null;
|
||||
@@ -213,11 +217,6 @@ export const initiateGenerateHLS = async (
|
||||
const [zipPath, entryName] = video;
|
||||
params.set("zipPath", zipPath);
|
||||
params.set("entryName", entryName);
|
||||
} else if (video instanceof File) {
|
||||
// A drag and dropped file, but without a path. This is a browser
|
||||
// specific case which shouldn't happen when we're running in the
|
||||
// desktop app. Bail.
|
||||
throw new Error("Unexpected file without path");
|
||||
} else {
|
||||
// A File with a path. Use the path.
|
||||
params.set("path", video.path);
|
||||
@@ -238,6 +237,8 @@ export const initiateGenerateHLS = async (
|
||||
if (!res.ok)
|
||||
throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`);
|
||||
|
||||
if (res.status == 204) return undefined;
|
||||
|
||||
return GenerateHLSResult.parse(await res.json());
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user