diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index 8e60875ff8..ff795aca78 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -46,7 +46,6 @@ import { detectFileTypeInfo } from "services/detect-type"; import downloadManager from "services/download"; import uploadManager from "services/upload/uploadManager"; import { EnteFile } from "types/file"; -import { FileWithCollection } from "types/upload"; import { getEditorCloseConfirmationMessage } from "utils/ui"; import ColoursMenu from "./ColoursMenu"; import CropMenu, { cropRegionOfCanvas, getCropRegionArgs } from "./CropMenu"; @@ -507,15 +506,15 @@ const ImageEditorOverlay = (props: IProps) => { ); const editedFile = await getEditedFile(); - const file: FileWithCollection = { - file: editedFile, - collectionID: props.file.collectionID, + const file = { + fileOrPath: editedFile, localID: 1, + collectionID: props.file.collectionID, }; uploadManager.prepareForNewUpload(); uploadManager.showUploadProgressDialog(); - uploadManager.queueFilesForUpload([file], [collection]); + uploadManager.uploadFiles([file], [collection]); setFileURL(null); props.onClose(); props.closePhotoViewer(); diff --git a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx index 8f16ef2d98..1acffd561e 100644 --- a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx +++ b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx @@ -1,18 +1,16 @@ -import { useContext, useEffect, useState } from "react"; -import { UploadProgressDialog } from "./dialog"; -import { MinimizedUploadProgress } from "./minimized"; - -import { t } from "i18next"; - import { UPLOAD_STAGES } from "constants/upload"; import UploadProgressContext from "contexts/uploadProgress"; +import { t } from "i18next"; import { AppContext } from "pages/_app"; -import { +import { useContext, useEffect, useState } from "react"; +import type { InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, -} from "types/upload/ui"; +} from "services/upload/uploadManager"; +import { UploadProgressDialog } from "./dialog"; +import { MinimizedUploadProgress } from "./minimized"; interface Props { open: boolean; diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index 8fe263a0a3..5cd157afac 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,4 +1,3 @@ -import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; import { ElectronFile } from "@/next/types/file"; import type { CollectionMapping, Electron } from "@/next/types/ipc"; @@ -19,6 +18,13 @@ import { getPublicCollectionUploaderName, savePublicCollectionUploaderName, } from "services/publicCollectionService"; +import type { + FileWithCollection, + InProgressUpload, + SegregatedFinishedUploads, + UploadCounter, + UploadFileNames, +} from "services/upload/uploadManager"; import uploadManager, { setToUploadCollection, } from "services/upload/uploadManager"; @@ -33,13 +39,6 @@ import { SetLoading, UploadTypeSelectorIntent, } from "types/gallery"; -import { FileWithCollection, type FileWithCollection2 } from "types/upload"; -import { - InProgressUpload, - SegregatedFinishedUploads, - UploadCounter, - UploadFileNames, -} from "types/upload/ui"; import { getOrCreateAlbum } from "utils/collection"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; import { @@ -48,9 +47,9 @@ import { } from "utils/ui"; import { DEFAULT_IMPORT_SUGGESTION, - filterOutSystemFiles, getImportSuggestion, groupFilesBasedOnParentFolder, + pruneHiddenFiles, type ImportSuggestion, } from "utils/upload"; import { SetCollectionNamerAttributes } from "../Collections/CollectionNamer"; @@ -114,29 +113,41 @@ export default function Uploader(props: Props) { const [importSuggestion, setImportSuggestion] = useState( DEFAULT_IMPORT_SUGGESTION, ); + + /** + * {@link File}s that the user drag-dropped or selected for uploads. This is + * the only type of selection that is possible when we're running in the + * browser. + */ + const [webFiles, setWebFiles] = useState([]); /** * Paths of file to upload that we've received over the IPC bridge from the * code running in the Node.js layer of our desktop app. */ - const [desktopFilePaths, setDesktopFilePaths] = useState< - string[] | undefined - >(); - const [electronFiles, setElectronFiles] = useState(null); - const [webFiles, setWebFiles] = useState([]); + const [desktopFilePaths, setDesktopFilePaths] = useState([]); + /** + * TODO(MR): When? + */ + const [electronFiles, setElectronFiles] = useState([]); + + /** + * Consolidated and cleaned list obtained from {@link webFiles} and + * {@link desktopFilePaths}. + */ + const fileOrPathsToUpload = useRef<(File | string)[]>([]); - const toUploadFiles = useRef< - File[] | ElectronFile[] | string[] | undefined | null - >(null); /** * If true, then the next upload we'll be processing was initiated by our * desktop app. */ const isPendingDesktopUpload = useRef(false); + /** * If set, this will be the name of the collection that our desktop app * wishes for us to upload into. */ const pendingDesktopUploadCollectionName = useRef(""); + // This is set when the user choses a type to upload from the upload type selector dialog const pickedUploadType = useRef(null); const zipPaths = useRef(null); @@ -184,22 +195,7 @@ export default function Uploader(props: Props) { setUploadProgressView(true); } - if (isElectron()) { - ensureElectron() - .pendingUploads() - .then((pending) => { - if (pending) { - log.info("Resuming pending desktop upload", pending); - resumeDesktopUpload( - pending.type == "files" - ? PICKED_UPLOAD_TYPE.FILES - : PICKED_UPLOAD_TYPE.ZIPS, - pending.files, - pending.collectionName, - ); - } - }); - + if (electron) { const upload = (collectionName: string, filePaths: string[]) => { isPendingDesktopUpload.current = true; pendingDesktopUploadCollectionName.current = collectionName; @@ -216,6 +212,19 @@ export default function Uploader(props: Props) { }; watcher.init(upload, requestSyncWithRemote); + + electron.pendingUploads().then((pending) => { + if (pending) { + log.info("Resuming pending desktop upload", pending); + resumeDesktopUpload( + pending.type == "files" + ? PICKED_UPLOAD_TYPE.FILES + : PICKED_UPLOAD_TYPE.ZIPS, + pending.files, + pending.collectionName, + ); + } + }); } }, [ publicCollectionGalleryContext.accessedThroughSharedURL, @@ -300,25 +309,25 @@ export default function Uploader(props: Props) { useEffect(() => { if ( - desktopFilePaths?.length > 0 || - electronFiles?.length > 0 || - webFiles?.length > 0 || + desktopFilePaths.length > 0 || + electronFiles.length > 0 || + webFiles.length > 0 || appContext.sharedFiles?.length > 0 ) { log.info( `upload request type: ${ - desktopFilePaths?.length > 0 + desktopFilePaths.length > 0 ? "desktopFilePaths" - : electronFiles?.length > 0 + : electronFiles.length > 0 ? "electronFiles" - : webFiles?.length > 0 + : webFiles.length > 0 ? "webFiles" : "sharedFiles" } count ${ - desktopFilePaths?.length ?? - electronFiles?.length ?? - webFiles?.length ?? - appContext?.sharedFiles.length + desktopFilePaths.length + + electronFiles.length + + webFiles.length + + (appContext.sharedFiles?.length ?? 0) }`, ); if (uploadManager.isUploadRunning()) { @@ -340,30 +349,34 @@ export default function Uploader(props: Props) { props.setLoading(true); if (webFiles?.length > 0) { // File selection by drag and drop or selection of file. - toUploadFiles.current = webFiles; + fileOrPathsToUpload.current = webFiles; setWebFiles([]); } else if (appContext.sharedFiles?.length > 0) { - toUploadFiles.current = appContext.sharedFiles; + fileOrPathsToUpload.current = appContext.sharedFiles; appContext.resetSharedFiles(); } else if (electronFiles?.length > 0) { // File selection from desktop app - deprecated - toUploadFiles.current = electronFiles; + log.warn("Using deprecated code path for ElectronFiles"); + fileOrPathsToUpload.current = electronFiles.map((f) => f.path); setElectronFiles([]); } else if (desktopFilePaths && desktopFilePaths.length > 0) { // File selection from our desktop app - toUploadFiles.current = desktopFilePaths; + fileOrPathsToUpload.current = desktopFilePaths; setDesktopFilePaths(undefined); } - toUploadFiles.current = filterOutSystemFiles(toUploadFiles.current); - if (toUploadFiles.current.length === 0) { + fileOrPathsToUpload.current = pruneHiddenFiles( + fileOrPathsToUpload.current, + ); + if (fileOrPathsToUpload.current.length === 0) { props.setLoading(false); return; } const importSuggestion = getImportSuggestion( pickedUploadType.current, - toUploadFiles.current.map((file) => + fileOrPathsToUpload.current.map((file) => + /** TODO(MR): Is path valid for Web files? */ typeof file == "string" ? file : file["path"], ), ); @@ -406,75 +419,73 @@ export default function Uploader(props: Props) { ) => { try { log.info( - `upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`, + `Uploading files existing collection id ${collection.id} (${collection.name})`, ); await preCollectionCreationAction(); - const filesWithCollectionToUpload: FileWithCollection[] = - toUploadFiles.current.map((file, index) => ({ - file, + const filesWithCollectionToUpload = fileOrPathsToUpload.current.map( + (fileOrPath, index) => ({ + fileOrPath, localID: index, collectionID: collection.id, - })); + }), + ); await waitInQueueAndUploadFiles( filesWithCollectionToUpload, [collection], uploaderName, ); } catch (e) { - log.error("Failed to upload files to existing collections", e); + log.error("Failed to upload files to existing collection", e); } }; const uploadFilesToNewCollections = async ( - strategy: CollectionMapping, + mapping: CollectionMapping, collectionName?: string, ) => { try { log.info( - `upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`, + `Uploading files to collection using ${mapping} mapping (${collectionName ?? ""})`, ); await preCollectionCreationAction(); - let filesWithCollectionToUpload: FileWithCollection2[] = []; + let filesWithCollectionToUpload: FileWithCollection[] = []; const collections: Collection[] = []; - let collectionNameToFilesMap = new Map< + let collectionNameToFileOrPaths = new Map< string, - File[] | ElectronFile[] | string[] + (File | string)[] >(); - if (strategy == "root") { - collectionNameToFilesMap.set( + if (mapping == "root") { + collectionNameToFileOrPaths.set( collectionName, - toUploadFiles.current, + fileOrPathsToUpload.current, ); } else { - collectionNameToFilesMap = groupFilesBasedOnParentFolder( - toUploadFiles.current, + collectionNameToFileOrPaths = groupFilesBasedOnParentFolder( + fileOrPathsToUpload.current, ); } - log.info( - `upload collections - [${[...collectionNameToFilesMap.keys()]}]`, - ); try { - const existingCollection = await getLatestCollections(); + const existingCollections = await getLatestCollections(); let index = 0; for (const [ collectionName, - files, - ] of collectionNameToFilesMap) { + fileOrPaths, + ] of collectionNameToFileOrPaths) { const collection = await getOrCreateAlbum( collectionName, - existingCollection, + existingCollections, ); collections.push(collection); props.setCollections([ - ...existingCollection, + ...existingCollections, ...collections, ]); filesWithCollectionToUpload = [ ...filesWithCollectionToUpload, - ...files.map((file) => ({ + ...fileOrPaths.map((fileOrPath) => ({ localID: index++, collectionID: collection.id, - file, + fileOrPath, })), ]; } @@ -483,18 +494,16 @@ export default function Uploader(props: Props) { log.error("Failed to create album", e); appContext.setDialogMessage({ title: t("ERROR"), - close: { variant: "critical" }, content: t("CREATE_ALBUM_FAILED"), }); throw e; } await waitInQueueAndUploadFiles( - /* TODO(MR): ElectronFile changes */ - filesWithCollectionToUpload as FileWithCollection[], + filesWithCollectionToUpload, collections, ); - toUploadFiles.current = null; + fileOrPathsToUpload.current = null; } catch (e) { log.error("Failed to upload files to new collections", e); } @@ -536,7 +545,6 @@ export default function Uploader(props: Props) { uploaderName?: string, ) => { try { - log.info("uploadFiles called"); preUploadAction(); if ( electron && @@ -554,24 +562,24 @@ export default function Uploader(props: Props) { await electron.setPendingUploadFiles( "files", filesWithCollectionToUploadIn.map( - ({ file }) => (file as ElectronFile).path, + // TODO(MR): ElectronFile + ({ fileOrPath }) => + typeof fileOrPath == "string" + ? fileOrPath + : (fileOrPath as any as ElectronFile).path, ), ); } - const shouldCloseUploadProgress = - await uploadManager.queueFilesForUpload( - filesWithCollectionToUploadIn, - collections, - uploaderName, - ); - if (shouldCloseUploadProgress) { - closeUploadProgress(); - } + const wereFilesProcessed = await uploadManager.uploadFiles( + filesWithCollectionToUploadIn, + collections, + uploaderName, + ); + if (!wereFilesProcessed) closeUploadProgress(); if (isElectron()) { if (watcher.isUploadRunning()) { await watcher.allFileUploadsDone( - /* TODO(MR): ElectronFile changes */ - filesWithCollectionToUploadIn as FileWithCollection2[], + filesWithCollectionToUploadIn, collections, ); } else if (watcher.isSyncPaused()) { @@ -580,7 +588,7 @@ export default function Uploader(props: Props) { } } } catch (e) { - log.error("failed to upload files", e); + log.error("Failed to upload files", e); showUserFacingError(e.message); closeUploadProgress(); } finally { @@ -590,19 +598,14 @@ export default function Uploader(props: Props) { const retryFailed = async () => { try { - log.info("user retrying failed upload"); - const filesWithCollections = + log.info("Retrying failed uploads"); + const { files, collections } = uploadManager.getFailedFilesWithCollections(); const uploaderName = uploadManager.getUploaderName(); await preUploadAction(); - await uploadManager.queueFilesForUpload( - /* TODO(MR): ElectronFile changes */ - filesWithCollections.files as FileWithCollection[], - filesWithCollections.collections, - uploaderName, - ); + await uploadManager.uploadFiles(files, collections, uploaderName); } catch (e) { - log.error("retry failed files failed", e); + log.error("Retrying failed uploads failed", e); showUserFacingError(e.message); closeUploadProgress(); } finally { @@ -663,9 +666,6 @@ export default function Uploader(props: Props) { ) => { try { if (accessedThroughSharedURL) { - log.info( - `uploading files to public collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`, - ); const uploaderName = await getPublicCollectionUploaderName( getPublicCollectionUID( publicCollectionGalleryContext.token, @@ -675,33 +675,30 @@ export default function Uploader(props: Props) { showUserNameInputDialog(); return; } + if (isPendingDesktopUpload.current) { isPendingDesktopUpload.current = false; if (pendingDesktopUploadCollectionName.current) { - log.info( - `upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`, - ); uploadFilesToNewCollections( "root", pendingDesktopUploadCollectionName.current, ); pendingDesktopUploadCollectionName.current = null; } else { - log.info( - `pending upload - strategy - "multiple collections" `, - ); uploadFilesToNewCollections("parent"); } return; } + if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { - log.info("uploading zip files"); uploadFilesToNewCollections("parent"); return; } + if (isFirstUpload && !importSuggestion.rootFolderName) { importSuggestion.rootFolderName = FIRST_ALBUM_NAME; } + if (isDragAndDrop.current) { isDragAndDrop.current = false; if ( @@ -712,14 +709,15 @@ export default function Uploader(props: Props) { return; } } + let showNextModal = () => {}; if (importSuggestion.hasNestedFolders) { - log.info(`nested folders detected`); showNextModal = () => setChoiceModalView(true); } else { showNextModal = () => showCollectionCreateModal(importSuggestion.rootFolderName); } + props.setCollectionSelectorAttributes({ callback: uploadFilesToExistingCollection, onCancel: handleCollectionSelectorCancel, @@ -727,7 +725,8 @@ export default function Uploader(props: Props) { intent: CollectionSelectorIntent.upload, }); } catch (e) { - log.error("handleCollectionCreationAndUpload failed", e); + // TODO(MR): Why? + log.warn("Ignoring error in handleCollectionCreationAndUpload", e); } }; @@ -861,7 +860,7 @@ export default function Uploader(props: Props) { open={userNameInputDialogView} onClose={handleUserNameInputDialogClose} onNameSubmit={handlePublicUpload} - toUploadFilesCount={toUploadFiles.current?.length} + toUploadFilesCount={fileOrPathsToUpload.current?.length} uploaderName={uploaderNameRef.current} /> diff --git a/web/apps/photos/src/constants/upload.ts b/web/apps/photos/src/constants/upload.ts index faa8e586fb..2ff01810fc 100644 --- a/web/apps/photos/src/constants/upload.ts +++ b/web/apps/photos/src/constants/upload.ts @@ -1,5 +1,5 @@ import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants"; -import { Location } from "types/upload"; +import { Location } from "types/metadata"; // this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. export const MULTIPART_PART_SIZE = 20 * 1024 * 1024; diff --git a/web/apps/photos/src/contexts/uploadProgress.tsx b/web/apps/photos/src/contexts/uploadProgress.tsx index fe5f733b86..b25df7d65b 100644 --- a/web/apps/photos/src/contexts/uploadProgress.tsx +++ b/web/apps/photos/src/contexts/uploadProgress.tsx @@ -1,11 +1,11 @@ import { UPLOAD_STAGES } from "constants/upload"; import { createContext } from "react"; -import { +import type { InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, -} from "types/upload/ui"; +} from "services/upload/uploadManager"; interface UploadProgressContextType { open: boolean; diff --git a/web/apps/photos/src/services/detect-type.ts b/web/apps/photos/src/services/detect-type.ts index bbabf00249..6fd2fd70d2 100644 --- a/web/apps/photos/src/services/detect-type.ts +++ b/web/apps/photos/src/services/detect-type.ts @@ -7,7 +7,6 @@ import { import { lowercaseExtension } from "@/next/file"; import { CustomError } from "@ente/shared/error"; import FileType from "file-type"; -import { getUint8ArrayView } from "./readerService"; /** * Read the file's initial contents or use the file's name to detect its type. @@ -89,7 +88,7 @@ export const detectFileTypeInfoFromChunk = async ( const readInitialChunkOfFile = async (file: File) => { const chunkSizeForTypeDetection = 4100; const chunk = file.slice(0, chunkSizeForTypeDetection); - return await getUint8ArrayView(chunk); + return new Uint8Array(await chunk.arrayBuffer()); }; const detectFileTypeFromBuffer = async (buffer: Uint8Array) => { diff --git a/web/apps/photos/src/services/exif.ts b/web/apps/photos/src/services/exif.ts index 56c20a1c23..584d79f880 100644 --- a/web/apps/photos/src/services/exif.ts +++ b/web/apps/photos/src/services/exif.ts @@ -4,7 +4,7 @@ import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time" import { NULL_LOCATION } from "constants/upload"; import exifr from "exifr"; import piexif from "piexifjs"; -import { Location, type ParsedExtractedMetadata } from "types/upload"; +import type { Location, ParsedExtractedMetadata } from "types/metadata"; type ParsedEXIFData = Record & Partial<{ @@ -311,7 +311,7 @@ export function parseEXIFLocation( })}`, e, ); - return NULL_LOCATION; + return { ...NULL_LOCATION }; } } @@ -328,7 +328,7 @@ function convertDMSToDD( export function getEXIFLocation(exifData: ParsedEXIFData): Location { if (!exifData || (!exifData.latitude && exifData.latitude !== 0)) { - return NULL_LOCATION; + return { ...NULL_LOCATION }; } return { latitude: exifData.latitude, longitude: exifData.longitude }; } diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts index a89f9cd3d7..6fc2404e2c 100644 --- a/web/apps/photos/src/services/ffmpeg.ts +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -9,8 +9,8 @@ import { outputPathPlaceholder, } from "constants/ffmpeg"; import { NULL_LOCATION } from "constants/upload"; -import { ParsedExtractedMetadata } from "types/upload"; -import { type DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; +import type { ParsedExtractedMetadata } from "types/metadata"; +import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; /** * Generate a thumbnail for the given video using a wasm FFmpeg running in a web @@ -179,7 +179,7 @@ function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) { } function parseAppleISOLocation(isoLocation: string) { - let location = NULL_LOCATION; + let location = { ...NULL_LOCATION }; if (isoLocation) { const [latitude, longitude] = isoLocation .match(/(\+|-)\d+\.*\d+/g) diff --git a/web/apps/photos/src/services/locationSearchService.ts b/web/apps/photos/src/services/locationSearchService.ts index 2aa2b6bacd..354c87a712 100644 --- a/web/apps/photos/src/services/locationSearchService.ts +++ b/web/apps/photos/src/services/locationSearchService.ts @@ -1,6 +1,6 @@ import log from "@/next/log"; import { LocationTagData } from "types/entity"; -import { Location } from "types/upload"; +import { Location } from "types/metadata"; export interface City { city: string; diff --git a/web/apps/photos/src/services/readerService.ts b/web/apps/photos/src/services/readerService.ts index e30710d5ad..a1195b35d1 100644 --- a/web/apps/photos/src/services/readerService.ts +++ b/web/apps/photos/src/services/readerService.ts @@ -1,21 +1,5 @@ -import { convertBytesToHumanReadable } from "@/next/file"; -import log from "@/next/log"; import { ElectronFile } from "@/next/types/file"; -export async function getUint8ArrayView( - file: Blob | ElectronFile, -): Promise { - try { - return new Uint8Array(await file.arrayBuffer()); - } catch (e) { - log.error( - `Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`, - e, - ); - throw e; - } -} - export function getFileStream(file: File, chunkSize: number) { const fileChunkReader = fileChunkReaderMaker(file, chunkSize); @@ -36,6 +20,16 @@ export function getFileStream(file: File, chunkSize: number) { }; } +async function* fileChunkReaderMaker(file: File, chunkSize: number) { + let offset = 0; + while (offset < file.size) { + const chunk = file.slice(offset, chunkSize + offset); + yield new Uint8Array(await chunk.arrayBuffer()); + offset += chunkSize; + } + return null; +} + export async function getElectronFileStream( file: ElectronFile, chunkSize: number, @@ -46,14 +40,3 @@ export async function getElectronFileStream( chunkCount, }; } - -async function* fileChunkReaderMaker(file: File, chunkSize: number) { - let offset = 0; - while (offset < file.size) { - const blob = file.slice(offset, chunkSize + offset); - const fileChunk = await getUint8ArrayView(blob); - yield fileChunk; - offset += chunkSize; - } - return null; -} diff --git a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts index f7d87c51c9..12228b822f 100644 --- a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts @@ -3,8 +3,8 @@ import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import { EnteFile } from "types/file"; -import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload"; import { retryHTTPCall } from "utils/upload/uploadRetrier"; +import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); diff --git a/web/apps/photos/src/services/upload/takeout.ts b/web/apps/photos/src/services/upload/takeout.ts index de768fa202..5cd16130ef 100644 --- a/web/apps/photos/src/services/upload/takeout.ts +++ b/web/apps/photos/src/services/upload/takeout.ts @@ -3,9 +3,8 @@ import { ensureElectron } from "@/next/electron"; import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import type { ElectronFile } from "@/next/types/file"; import { NULL_LOCATION } from "constants/upload"; -import { type Location } from "types/upload"; +import type { Location } from "types/metadata"; export interface ParsedMetadataJSON { creationTime: number; @@ -74,23 +73,15 @@ function getFileOriginalName(fileName: string) { return originalName; } -/** Try to parse the contents of a metadata JSON file in a Google Takeout. */ +/** Try to parse the contents of a metadata JSON file from a Google Takeout. */ export const tryParseTakeoutMetadataJSON = async ( - receivedFile: File | ElectronFile | string, + fileOrPath: File | string, ): Promise => { try { - let text: string; - if (typeof receivedFile == "string") { - text = await ensureElectron().fs.readTextFile(receivedFile); - } else { - if (!(receivedFile instanceof File)) { - receivedFile = new File( - [await receivedFile.blob()], - receivedFile.name, - ); - } - text = await receivedFile.text(); - } + const text = + fileOrPath instanceof File + ? await fileOrPath.text() + : await ensureElectron().fs.readTextFile(fileOrPath); return parseMetadataJSONText(text); } catch (e) { @@ -133,7 +124,7 @@ const parseMetadataJSONText = (text: string) => { parsedMetadataJSON.modificationTime = metadataJSON["modificationTime"]["timestamp"] * 1000000; } - let locationData: Location = NULL_LOCATION; + let locationData: Location = { ...NULL_LOCATION }; if ( metadataJSON["geoData"] && (metadataJSON["geoData"]["latitude"] !== 0.0 || diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts index 7ba35dc0dd..5757a841ad 100644 --- a/web/apps/photos/src/services/upload/uploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts @@ -4,8 +4,8 @@ import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; import { EnteFile } from "types/file"; -import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload"; import { retryHTTPCall } from "utils/upload/uploadRetrier"; +import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); const UPLOAD_ENDPOINT = getUploadEndpoint(); diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index e0d8a925ee..665cd76c87 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -4,6 +4,7 @@ import { ensureElectron } from "@/next/electron"; import { lowercaseExtension, nameAndExtension } from "@/next/file"; import log from "@/next/log"; import { ElectronFile } from "@/next/types/file"; +import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ensure } from "@/utils/ensure"; import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; @@ -28,19 +29,6 @@ import watcher from "services/watch"; import { Collection } from "types/collection"; import { EncryptedEnteFile, EnteFile } from "types/file"; import { SetFiles } from "types/gallery"; -import { - FileWithCollection, - PublicUploadProps, - type FileWithCollection2, - type LivePhotoAssets2, -} from "types/upload"; -import { - FinishedUploads, - InProgressUpload, - InProgressUploads, - ProgressUpdater, - SegregatedFinishedUploads, -} from "types/upload/ui"; import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file"; import { getLocalFiles } from "../fileService"; import { @@ -48,16 +36,72 @@ import { tryParseTakeoutMetadataJSON, type ParsedMetadataJSON, } from "./takeout"; -import UploadService, { - assetName, - fopSize, - getFileName, - uploader, -} from "./uploadService"; +import UploadService, { fopFileName, fopSize, uploader } from "./uploadService"; + +export type FileID = number; + +export type PercentageUploaded = number; +/* localID => fileName */ +export type UploadFileNames = Map; + +export interface UploadCounter { + finished: number; + total: number; +} + +export interface InProgressUpload { + localFileID: FileID; + progress: PercentageUploaded; +} + +export interface FinishedUpload { + localFileID: FileID; + result: UPLOAD_RESULT; +} + +export type InProgressUploads = Map; + +export type FinishedUploads = Map; + +export type SegregatedFinishedUploads = Map; + +export interface ProgressUpdater { + setPercentComplete: React.Dispatch>; + setUploadCounter: React.Dispatch>; + setUploadStage: React.Dispatch>; + setInProgressUploads: React.Dispatch< + React.SetStateAction + >; + setFinishedUploads: React.Dispatch< + React.SetStateAction + >; + setUploadFilenames: React.Dispatch>; + setHasLivePhotos: React.Dispatch>; + setUploadProgressView: React.Dispatch>; +} /** The number of uploads to process in parallel. */ const maxConcurrentUploads = 4; +export interface FileWithCollection { + localID: number; + collectionID: number; + isLivePhoto?: boolean; + fileOrPath?: File | string; + livePhotoAssets?: LivePhotoAssets; +} + +export interface LivePhotoAssets { + image: File | string; + video: File | string; +} + +export interface PublicUploadProps { + token: string; + passwordToken: string; + accessedThroughSharedURL: boolean; +} + interface UploadCancelStatus { value: boolean; } @@ -112,7 +156,7 @@ class UIService { convertInProgressUploadsToList(this.inProgressUploads), ); this.progressUpdater.setFinishedUploads( - segregatedFinishedUploadsToList(this.finishedUploads), + groupByResult(this.finishedUploads), ); } @@ -143,7 +187,8 @@ class UIService { this.progressUpdater.setUploadStage(stage); } - setFilenames(filenames: Map) { + setFiles(files: { localID: number; fileName: string }[]) { + const filenames = new Map(files.map((f) => [f.localID, f.fileName])); this.filenames = filenames; this.progressUpdater.setUploadFilenames(filenames); } @@ -170,15 +215,7 @@ class UIService { } hasFilesInResultList() { - const finishedUploadsList = segregatedFinishedUploadsToList( - this.finishedUploads, - ); - for (const x of finishedUploadsList.values()) { - if (x.length > 0) { - return true; - } - } - return false; + return this.finishedUploads.size > 0; } private updateProgressBarUI() { @@ -210,9 +247,7 @@ class UIService { setInProgressUploads( convertInProgressUploadsToList(this.inProgressUploads), ); - setFinishedUploads( - segregatedFinishedUploadsToList(this.finishedUploads), - ); + setFinishedUploads(groupByResult(this.finishedUploads)); } trackUploadProgress( @@ -271,25 +306,23 @@ function convertInProgressUploadsToList(inProgressUploads) { ); } -function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) { - const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads; +const groupByResult = (finishedUploads: FinishedUploads) => { + const groups: SegregatedFinishedUploads = new Map(); for (const [localID, result] of finishedUploads) { - if (!segregatedFinishedUploads.has(result)) { - segregatedFinishedUploads.set(result, []); - } - segregatedFinishedUploads.get(result).push(localID); + if (!groups.has(result)) groups.set(result, []); + groups.get(result).push(localID); } - return segregatedFinishedUploads; -} + return groups; +}; class UploadManager { private cryptoWorkers = new Array< ComlinkWorker >(maxConcurrentUploads); private parsedMetadataJSONMap: Map; - private filesToBeUploaded: FileWithCollection2[]; - private remainingFiles: FileWithCollection2[] = []; - private failedFiles: FileWithCollection2[]; + private filesToBeUploaded: ClusteredFile[]; + private remainingFiles: ClusteredFile[] = []; + private failedFiles: ClusteredFile[]; private existingFiles: EnteFile[]; private setFiles: SetFiles; private collections: Map; @@ -345,32 +378,41 @@ class UploadManager { this.uiService.setUploadProgressView(true); } - public async queueFilesForUpload( + /** + * Upload files + * + * This method waits for all the files to get uploaded (successfully or + * unsucessfully) before returning. + * + * It is an error to call this method when there is already an in-progress + * upload. + * + * @param filesWithCollectionToUploadIn The files to upload, each paired + * with the id of the collection that they should be uploaded into. + * + * @returns `true` if at least one file was processed + */ + public async uploadFiles( filesWithCollectionToUploadIn: FileWithCollection[], collections: Collection[], uploaderName?: string, ) { + if (this.uploadInProgress) + throw new Error("Cannot run multiple uploads at once"); + + log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`); + this.uploadInProgress = true; + this.uploaderName = uploaderName; + try { - if (this.uploadInProgress) - throw new Error("Cannot run multiple uploads at once"); - - log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`); - this.uploadInProgress = true; - this.uploaderName = uploaderName; - await this.updateExistingFilesAndCollections(collections); - const namedFiles: FileWithCollectionIDAndName[] = - filesWithCollectionToUploadIn.map( - makeFileWithCollectionIDAndName, - ); - - this.uiService.setFilenames( - new Map( - namedFiles.map((f) => [f.localID, f.fileName]), - ), + const namedFiles = filesWithCollectionToUploadIn.map( + makeFileWithCollectionIDAndName, ); + this.uiService.setFiles(namedFiles); + const [metadataFiles, mediaFiles] = splitMetadataAndMediaFiles(namedFiles); @@ -378,6 +420,7 @@ class UploadManager { this.uiService.setUploadStage( UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, ); + await this.parseMetadataJSONFiles(metadataFiles); } @@ -386,23 +429,15 @@ class UploadManager { this.abortIfCancelled(); - this.uiService.setFilenames( - new Map( - clusteredMediaFiles.map((file) => [ - file.localID, - file.fileName, - ]), - ), - ); + // Live photos might've been clustered together, reset the list + // of files to reflect that. + this.uiService.setFiles(clusteredMediaFiles); this.uiService.setHasLivePhoto( mediaFiles.length != clusteredMediaFiles.length, ); - /* TODO(MR): ElectronFile changes */ - await this.uploadMediaFiles( - clusteredMediaFiles as FileWithCollection2[], - ); + await this.uploadMediaFiles(clusteredMediaFiles); } } catch (e) { if (e.message === CustomError.UPLOAD_CANCELLED) { @@ -411,7 +446,7 @@ class UploadManager { await cancelRemainingUploads(); } } else { - log.error("uploading failed with error", e); + log.error("Uploading failed", e); throw e; } } finally { @@ -421,16 +456,8 @@ class UploadManager { } this.uploadInProgress = false; } - try { - if (!this.uiService.hasFilesInResultList()) { - return true; - } else { - return false; - } - } catch (e) { - log.error(" failed to return shouldCloseProgressBar", e); - return false; - } + + return this.uiService.hasFilesInResultList(); } private abortIfCancelled = () => { @@ -455,11 +482,11 @@ class UploadManager { private async parseMetadataJSONFiles(files: FileWithCollectionIDAndName[]) { this.uiService.reset(files.length); - for (const { file, fileName, collectionID } of files) { + for (const { fileOrPath, fileName, collectionID } of files) { this.abortIfCancelled(); log.info(`Parsing metadata JSON ${fileName}`); - const metadataJSON = await tryParseTakeoutMetadataJSON(file); + const metadataJSON = await tryParseTakeoutMetadataJSON(fileOrPath); if (metadataJSON) { this.parsedMetadataJSONMap.set( getMetadataJSONMapKeyForJSON(collectionID, fileName), @@ -470,7 +497,7 @@ class UploadManager { } } - private async uploadMediaFiles(mediaFiles: FileWithCollection2[]) { + private async uploadMediaFiles(mediaFiles: ClusteredFile[]) { this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles]; if (isElectron()) { @@ -502,16 +529,16 @@ class UploadManager { while (this.filesToBeUploaded.length > 0) { this.abortIfCancelled(); - let fileWithCollection = this.filesToBeUploaded.pop(); - const { collectionID } = fileWithCollection; + const clusteredFile = this.filesToBeUploaded.pop(); + const { localID, collectionID } = clusteredFile; const collection = this.collections.get(collectionID); - fileWithCollection = { ...fileWithCollection, collection }; + const uploadableFile = { ...clusteredFile, collection }; - uiService.setFileProgress(fileWithCollection.localID, 0); + uiService.setFileProgress(localID, 0); await wait(0); - const { fileUploadResult, uploadedFile } = await uploader( - fileWithCollection, + const { uploadResult, uploadedFile } = await uploader( + uploadableFile, this.uploaderName, this.existingFiles, this.parsedMetadataJSONMap, @@ -533,46 +560,45 @@ class UploadManager { ); const finalUploadResult = await this.postUploadTask( - fileUploadResult, + uploadableFile, + uploadResult, uploadedFile, - fileWithCollection, ); - this.uiService.moveFileToResultList( - fileWithCollection.localID, - finalUploadResult, - ); + this.uiService.moveFileToResultList(localID, finalUploadResult); this.uiService.increaseFileUploaded(); UploadService.reducePendingUploadCount(); } } private async postUploadTask( - fileUploadResult: UPLOAD_RESULT, - uploadedFile: EncryptedEnteFile | EnteFile | null, - fileWithCollection: FileWithCollection2, + uploadableFile: UploadableFile, + uploadResult: UPLOAD_RESULT, + uploadedFile: EncryptedEnteFile | EnteFile | undefined, ) { + log.info( + `Uploaded ${uploadableFile.fileName} with result ${uploadResult}`, + ); try { let decryptedFile: EnteFile; - log.info(`Upload completed with result: ${fileUploadResult}`); - await this.removeFromPendingUploads(fileWithCollection); - switch (fileUploadResult) { + await this.removeFromPendingUploads(uploadableFile); + switch (uploadResult) { case UPLOAD_RESULT.FAILED: case UPLOAD_RESULT.BLOCKED: - this.failedFiles.push(fileWithCollection); + this.failedFiles.push(uploadableFile); break; case UPLOAD_RESULT.ALREADY_UPLOADED: decryptedFile = uploadedFile as EnteFile; break; case UPLOAD_RESULT.ADDED_SYMLINK: decryptedFile = uploadedFile as EnteFile; - fileUploadResult = UPLOAD_RESULT.UPLOADED; + uploadResult = UPLOAD_RESULT.UPLOADED; break; case UPLOAD_RESULT.UPLOADED: case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL: decryptedFile = await decryptFile( uploadedFile as EncryptedEnteFile, - fileWithCollection.collection.key, + uploadableFile.collection.key, ); break; case UPLOAD_RESULT.UNSUPPORTED: @@ -580,23 +606,21 @@ class UploadManager { // no-op break; default: - throw new Error( - `Invalid Upload Result ${fileUploadResult}`, - ); + throw new Error(`Invalid Upload Result ${uploadResult}`); } if ( [ UPLOAD_RESULT.ADDED_SYMLINK, UPLOAD_RESULT.UPLOADED, UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL, - ].includes(fileUploadResult) + ].includes(uploadResult) ) { try { eventBus.emit(Events.FILE_UPLOADED, { enteFile: decryptedFile, localFile: - fileWithCollection.file ?? - fileWithCollection.livePhotoAssets.image, + uploadableFile.fileOrPath ?? + uploadableFile.livePhotoAssets.image, }); } catch (e) { log.warn("Ignoring error in fileUploaded handlers", e); @@ -604,11 +628,11 @@ class UploadManager { this.updateExistingFiles(decryptedFile); } await this.watchFolderCallback( - fileUploadResult, - fileWithCollection, + uploadResult, + uploadableFile, uploadedFile as EncryptedEnteFile, ); - return fileUploadResult; + return uploadResult; } catch (e) { log.error("failed to do post file upload action", e); return UPLOAD_RESULT.FAILED; @@ -617,7 +641,7 @@ class UploadManager { private async watchFolderCallback( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection2, + fileWithCollection: ClusteredFile, uploadedFile: EncryptedEnteFile, ) { if (isElectron()) { @@ -660,12 +684,13 @@ class UploadManager { this.setFiles((files) => sortFiles([...files, decryptedFile])); } - private async removeFromPendingUploads(file: FileWithCollection2) { - if (isElectron()) { + private async removeFromPendingUploads({ localID }: ClusteredFile) { + const electron = globalThis.electron; + if (electron) { this.remainingFiles = this.remainingFiles.filter( - (f) => f.localID != file.localID, + (f) => f.localID != localID, ); - await updatePendingUploads(this.remainingFiles); + await updatePendingUploads(electron, this.remainingFiles); } } @@ -686,9 +711,10 @@ export default new UploadManager(); * * - The input is {@link FileWithCollection}. This can either be a new * {@link FileWithCollection}, in which case it'll only have a - * {@link localID}, {@link collectionID} and a {@link file}. Or it could be a - * retry, in which case it'll not have a {@link file} but instead will have - * data from a previous stage, like a snake eating its tail. + * {@link localID}, {@link collectionID} and a {@link fileOrPath}. Or it could + * be a retry, in which case it'll not have a {@link fileOrPath} but instead + * will have data from a previous stage (concretely, it'll just be a + * relabelled {@link ClusteredFile}), like a snake eating its tail. * * - Immediately we convert it to {@link FileWithCollectionIDAndName}. This is * to mostly systematize what we have, and also attach a {@link fileName}. @@ -715,17 +741,15 @@ type FileWithCollectionIDAndName = { isLivePhoto?: boolean; /* Valid for non-live photos */ fileOrPath?: File | string; - /** Alias */ - file?: File | string; /* Valid for live photos */ - livePhotoAssets?: LivePhotoAssets2; + livePhotoAssets?: LivePhotoAssets; }; const makeFileWithCollectionIDAndName = ( f: FileWithCollection, ): FileWithCollectionIDAndName => { + const fileOrPath = f.fileOrPath; /* TODO(MR): ElectronFile */ - const fileOrPath = (f.fileOrPath ?? f.file) as File | string; if (!(fileOrPath instanceof File || typeof fileOrPath == "string")) throw new Error(`Unexpected file ${f}`); @@ -734,25 +758,37 @@ const makeFileWithCollectionIDAndName = ( collectionID: ensure(f.collectionID), fileName: ensure( f.isLivePhoto - ? getFileName(f.livePhotoAssets.image) - : getFileName(fileOrPath), + ? fopFileName(f.livePhotoAssets.image) + : fopFileName(fileOrPath), ), isLivePhoto: f.isLivePhoto, - /* TODO(MR): ElectronFile */ - file: fileOrPath, fileOrPath: fileOrPath, - /* TODO(MR): ElectronFile */ - livePhotoAssets: f.livePhotoAssets as LivePhotoAssets2, + livePhotoAssets: f.livePhotoAssets, }; }; +/** + * A file with both parts of a live photo clubbed together. + * + * See: [Note: Intermediate file types during upload]. + */ type ClusteredFile = { localID: number; collectionID: number; fileName: string; isLivePhoto: boolean; - file?: File | string; - livePhotoAssets?: LivePhotoAssets2; + fileOrPath?: File | string; + livePhotoAssets?: LivePhotoAssets; +}; + +/** + * The file that we hand off to the uploader. Essentially {@link ClusteredFile} + * with the {@link collection} attached to it. + * + * See: [Note: Intermediate file types during upload]. + */ +export type UploadableFile = ClusteredFile & { + collection: Collection; }; const splitMetadataAndMediaFiles = ( @@ -762,10 +798,9 @@ const splitMetadataAndMediaFiles = ( media: FileWithCollectionIDAndName[], ] => files.reduce( - ([metadata, media], file) => { - if (lowercaseExtension(file.fileName) == "json") - metadata.push(file); - else media.push(file); + ([metadata, media], f) => { + if (lowercaseExtension(f.fileName) == "json") metadata.push(f); + else media.push(f); return [metadata, media]; }, [[], []], @@ -788,16 +823,19 @@ export const setToUploadCollection = async (collections: Collection[]) => { await ensureElectron().setPendingUploadCollection(collectionName); }; -const updatePendingUploads = async (files: FileWithCollection2[]) => { +const updatePendingUploads = async ( + electron: Electron, + files: ClusteredFile[], +) => { const paths = files .map((file) => file.isLivePhoto ? [file.livePhotoAssets.image, file.livePhotoAssets.video] - : [file.file], + : [file.fileOrPath], ) .flat() .map((f) => getFilePathElectron(f)); - await ensureElectron().setPendingUploadFiles("files", paths); + await electron.setPendingUploadFiles("files", paths); }; /** @@ -837,27 +875,26 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { fileName: f.fileName, fileType: fFileType, collectionID: f.collectionID, - fileOrPath: f.file, + fileOrPath: f.fileOrPath, }; const ga: PotentialLivePhotoAsset = { fileName: g.fileName, fileType: gFileType, collectionID: g.collectionID, - fileOrPath: g.file, + fileOrPath: g.fileOrPath, }; if (await areLivePhotoAssets(fa, ga)) { - const livePhoto = { + const [image, video] = + fFileType == FILE_TYPE.IMAGE ? [f, g] : [g, f]; + result.push({ localID: f.localID, collectionID: f.collectionID, + fileName: image.fileName, isLivePhoto: true, livePhotoAssets: { - image: fFileType == FILE_TYPE.IMAGE ? f.file : g.file, - video: fFileType == FILE_TYPE.IMAGE ? g.file : f.file, + image: image.fileOrPath, + video: video.fileOrPath, }, - }; - result.push({ - ...livePhoto, - fileName: assetName(livePhoto), }); index += 2; } else { diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts index c8e56c77eb..1848a2b1e5 100644 --- a/web/apps/photos/src/services/upload/uploadService.ts +++ b/web/apps/photos/src/services/upload/uploadService.ts @@ -4,11 +4,14 @@ import type { Metadata } from "@/media/types/file"; import { ensureElectron } from "@/next/electron"; import { basename } from "@/next/file"; import log from "@/next/log"; -import { ElectronFile } from "@/next/types/file"; import { CustomErrorMessage } from "@/next/types/ipc"; import { ensure } from "@/utils/ensure"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { EncryptionResult } from "@ente/shared/crypto/types"; +import { + B64EncryptionResult, + EncryptionResult, + LocalFileAttributes, +} from "@ente/shared/crypto/types"; import { CustomError, handleUploadError } from "@ente/shared/error"; import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream"; import { Remote } from "comlink"; @@ -23,27 +26,20 @@ import { import { addToCollection } from "services/collectionService"; import { parseImageMetadata } from "services/exif"; import * as ffmpeg from "services/ffmpeg"; +import { + PublicUploadProps, + type LivePhotoAssets, +} from "services/upload/uploadManager"; import { EnteFile, + MetadataFileAttributes, + S3FileAttributes, + type EncryptedEnteFile, type FilePublicMagicMetadata, type FilePublicMagicMetadataProps, } from "types/file"; import { EncryptedMagicMetadata } from "types/magicMetadata"; -import { - BackupedFile, - EncryptedFile, - FileInMemory, - FileWithMetadata, - ParsedExtractedMetadata, - ProcessedFile, - PublicUploadProps, - UploadAsset, - UploadFile, - UploadURL, - type FileWithCollection2, - type LivePhotoAssets2, - type UploadAsset2, -} from "types/upload"; +import type { ParsedExtractedMetadata } from "types/metadata"; import { getNonEmptyMagicMetadataProps, updateMagicMetadata, @@ -63,6 +59,7 @@ import { generateThumbnailWeb, } from "./thumbnail"; import UploadHttpClient from "./uploadHttpClient"; +import type { UploadableFile } from "./uploadManager"; /** Upload files to cloud storage */ class UploadService { @@ -146,6 +143,88 @@ const uploadService = new UploadService(); export default uploadService; +/** + * Return the file name for the given {@link fileOrPath}. + * + * @param fileOrPath The {@link File}, or the path to it. Note that it is only + * valid to specify a path if we are running in the context of our desktop app. + */ +export const fopFileName = (fileOrPath: File | string) => + typeof fileOrPath == "string" ? basename(fileOrPath) : fileOrPath.name; + +/** + * Return the size of the given {@link fileOrPath}. + * + * @param fileOrPath The {@link File}, or the path to it. Note that it is only + * valid to specify a path if we are running in the context of our desktop app. + */ +export const fopSize = async (fileOrPath: File | string): Promise => + fileOrPath instanceof File + ? fileOrPath.size + : await ensureElectron().fs.size(fileOrPath); + +/* -- Various intermediate type used during upload -- */ + +interface UploadAsset2 { + isLivePhoto?: boolean; + fileOrPath?: File | string; + livePhotoAssets?: LivePhotoAssets; +} + +interface FileInMemory { + filedata: Uint8Array | DataStream; + /** The JPEG data of the generated thumbnail */ + thumbnail: Uint8Array; + /** + * `true` if this is a fallback (all black) thumbnail we're returning since + * thumbnail generation failed for some reason. + */ + hasStaticThumbnail: boolean; +} + +interface FileWithMetadata extends Omit { + metadata: Metadata; + localID: number; + pubMagicMetadata: FilePublicMagicMetadata; +} + +interface EncryptedFile { + file: ProcessedFile; + fileKey: B64EncryptionResult; +} + +interface ProcessedFile { + file: LocalFileAttributes; + thumbnail: LocalFileAttributes; + metadata: LocalFileAttributes; + pubMagicMetadata: EncryptedMagicMetadata; + localID: number; +} + +export interface BackupedFile { + file: S3FileAttributes; + thumbnail: S3FileAttributes; + metadata: MetadataFileAttributes; + pubMagicMetadata: EncryptedMagicMetadata; +} + +export interface UploadFile extends BackupedFile { + collectionID: number; + encryptedKey: string; + keyDecryptionNonce: string; +} + +export interface MultipartUploadURLs { + objectKey: string; + partURLs: string[]; + completeURL: string; +} + +export interface UploadURL { + url: string; + objectKey: string; +} + /** * A function that can be called to obtain a "progressTracker" that then is * directly fed to axios to both cancel the upload if needed, and update the @@ -160,12 +239,19 @@ type MakeProgressTracker = ( ) => unknown; interface UploadResponse { - fileUploadResult: UPLOAD_RESULT; - uploadedFile?: EnteFile; + uploadResult: UPLOAD_RESULT; + uploadedFile?: EncryptedEnteFile | EnteFile; } +/** + * Upload the given {@link UploadableFile} + * + * This is lower layer implementation of the upload. It is invoked by + * {@link UploadManager} after it has assembled all the relevant bits we need to + * go forth and upload. + */ export const uploader = async ( - fileWithCollection: FileWithCollection2, + { collection, localID, fileName, ...uploadAsset }: UploadableFile, uploaderName: string, existingFiles: EnteFile[], parsedMetadataJSONMap: Map, @@ -174,10 +260,7 @@ export const uploader = async ( abortIfCancelled: () => void, makeProgessTracker: MakeProgressTracker, ): Promise => { - const { collection, localID, ...uploadAsset } = fileWithCollection; - const name = assetName(uploadAsset); - log.info(`Uploading ${name}`); - + log.info(`Uploading ${fileName}`); try { /* * We read the file four times: @@ -200,7 +283,7 @@ export const uploader = async ( const maxFileSize = 4 * 1024 * 1024 * 1024; /* 4 GB */ if (fileSize >= maxFileSize) - return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE }; + return { uploadResult: UPLOAD_RESULT.TOO_LARGE }; abortIfCancelled(); @@ -225,7 +308,7 @@ export const uploader = async ( ); if (matchInSameCollection) { return { - fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED, + uploadResult: UPLOAD_RESULT.ALREADY_UPLOADED, uploadedFile: matchInSameCollection, }; } else { @@ -234,7 +317,7 @@ export const uploader = async ( symlink.collectionID = collection.id; await addToCollection(collection, [symlink]); return { - fileUploadResult: UPLOAD_RESULT.ADDED_SYMLINK, + uploadResult: UPLOAD_RESULT.ADDED_SYMLINK, uploadedFile: symlink, }; } @@ -287,65 +370,36 @@ export const uploader = async ( }); return { - fileUploadResult: metadata.hasStaticThumbnail + uploadResult: metadata.hasStaticThumbnail ? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL : UPLOAD_RESULT.UPLOADED, uploadedFile: uploadedFile, }; } catch (e) { if (e.message == CustomError.UPLOAD_CANCELLED) { - log.info(`Upload for ${name} cancelled`); + log.info(`Upload for ${fileName} cancelled`); } else if (e.message == CustomError.UNSUPPORTED_FILE_FORMAT) { - log.info(`Not uploading ${name}: unsupported file format`); + log.info(`Not uploading ${fileName}: unsupported file format`); } else { - log.error(`Upload failed for ${name}`, e); + log.error(`Upload failed for ${fileName}`, e); } const error = handleUploadError(e); switch (error.message) { case CustomError.ETAG_MISSING: - return { fileUploadResult: UPLOAD_RESULT.BLOCKED }; + return { uploadResult: UPLOAD_RESULT.BLOCKED }; case CustomError.UNSUPPORTED_FILE_FORMAT: - return { fileUploadResult: UPLOAD_RESULT.UNSUPPORTED }; + return { uploadResult: UPLOAD_RESULT.UNSUPPORTED }; case CustomError.FILE_TOO_LARGE: return { - fileUploadResult: - UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE, + uploadResult: UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE, }; default: - return { fileUploadResult: UPLOAD_RESULT.FAILED }; + return { uploadResult: UPLOAD_RESULT.FAILED }; } } }; -/** - * Return the size of the given file - * - * @param fileOrPath The {@link File}, or the path to it. Note that it is only - * valid to specify a path if we are running in the context of our desktop app. - */ -export const fopSize = async (fileOrPath: File | string): Promise => - fileOrPath instanceof File - ? fileOrPath.size - : await ensureElectron().fs.size(fileOrPath); - -export const getFileName = (file: File | ElectronFile | string) => - typeof file == "string" ? basename(file) : file.name; - -export const getAssetName = ({ - isLivePhoto, - file, - livePhotoAssets, -}: UploadAsset) => - isLivePhoto ? getFileName(livePhotoAssets.image) : getFileName(file); - -export const assetName = ({ - isLivePhoto, - file, - livePhotoAssets, -}: UploadAsset2) => - isLivePhoto ? getFileName(livePhotoAssets.image) : getFileName(file); - /** * Read the given file or path into an in-memory representation. * @@ -462,13 +516,13 @@ interface ReadAssetDetailsResult { const readAssetDetails = async ({ isLivePhoto, livePhotoAssets, - file, + fileOrPath, }: UploadAsset2): Promise => isLivePhoto ? readLivePhotoDetails(livePhotoAssets) - : readImageOrVideoDetails(file); + : readImageOrVideoDetails(fileOrPath); -const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets2) => { +const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { const img = await readImageOrVideoDetails(image); const vid = await readImageOrVideoDetails(video); @@ -507,7 +561,7 @@ const readImageOrVideoDetails = async (fileOrPath: File | string) => { await reader.cancel(); return chunk; } - }, getFileName(fileOrPath)); + }, fopFileName(fileOrPath)); return { fileTypeInfo, fileSize, lastModifiedMs }; }; @@ -533,7 +587,7 @@ interface ExtractAssetMetadataResult { * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum. */ const extractAssetMetadata = async ( - { isLivePhoto, file, livePhotoAssets }: UploadAsset2, + { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, @@ -550,7 +604,7 @@ const extractAssetMetadata = async ( worker, ) : await extractImageOrVideoMetadata( - file, + fileOrPath, fileTypeInfo, lastModifiedMs, collectionID, @@ -559,7 +613,7 @@ const extractAssetMetadata = async ( ); const extractLivePhotoMetadata = async ( - livePhotoAssets: LivePhotoAssets2, + livePhotoAssets: LivePhotoAssets, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, @@ -585,7 +639,7 @@ const extractLivePhotoMetadata = async ( return { metadata: { ...imageMetadata, - title: getFileName(livePhotoAssets.image), + title: fopFileName(livePhotoAssets.image), fileType: FILE_TYPE.LIVE_PHOTO, imageHash: imageMetadata.hash, videoHash: videoHash, @@ -603,7 +657,7 @@ const extractImageOrVideoMetadata = async ( parsedMetadataJSONMap: Map, worker: Remote, ) => { - const fileName = getFileName(fileOrPath); + const fileName = fopFileName(fileOrPath); const { fileType } = fileTypeInfo; let extractedMetadata: ParsedExtractedMetadata; @@ -659,7 +713,7 @@ const extractImageOrVideoMetadata = async ( }; const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = { - location: NULL_LOCATION, + location: { ...NULL_LOCATION }, creationTime: null, width: null, height: null, @@ -774,14 +828,14 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => { const readAsset = async ( fileTypeInfo: FileTypeInfo, - { isLivePhoto, file, livePhotoAssets }: UploadAsset2, + { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2, ) => isLivePhoto ? await readLivePhoto(livePhotoAssets, fileTypeInfo) - : await readImageOrVideo(file, fileTypeInfo); + : await readImageOrVideo(fileOrPath, fileTypeInfo); const readLivePhoto = async ( - livePhotoAssets: LivePhotoAssets2, + livePhotoAssets: LivePhotoAssets, fileTypeInfo: FileTypeInfo, ) => { const readImage = await readFileOrPath(livePhotoAssets.image); @@ -813,9 +867,9 @@ const readLivePhoto = async ( return { filedata: await encodeLivePhoto({ - imageFileName: getFileName(livePhotoAssets.image), + imageFileName: fopFileName(livePhotoAssets.image), imageData: await toData(imageDataOrStream), - videoFileName: getFileName(livePhotoAssets.video), + videoFileName: fopFileName(livePhotoAssets.video), videoData: await toData(readVideo.dataOrStream), }), thumbnail, diff --git a/web/apps/photos/src/services/watch.ts b/web/apps/photos/src/services/watch.ts index f036676b6a..1f60836c50 100644 --- a/web/apps/photos/src/services/watch.ts +++ b/web/apps/photos/src/services/watch.ts @@ -14,10 +14,11 @@ import type { import { ensureString } from "@/utils/ensure"; import { UPLOAD_RESULT } from "constants/upload"; import debounce from "debounce"; -import uploadManager from "services/upload/uploadManager"; +import uploadManager, { + type FileWithCollection, +} from "services/upload/uploadManager"; import { Collection } from "types/collection"; import { EncryptedEnteFile } from "types/file"; -import { type FileWithCollection2 } from "types/upload"; import { groupFilesBasedOnCollectionID } from "utils/file"; import { isHiddenFile } from "utils/upload"; import { removeFromCollection } from "./collectionService"; @@ -322,7 +323,7 @@ class FolderWatcher { */ async onFileUpload( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection2, + fileWithCollection: FileWithCollection, file: EncryptedEnteFile, ) { // The files we get here will have fileWithCollection.file as a string, @@ -346,7 +347,7 @@ class FolderWatcher { ); } else { this.uploadedFileForPath.set( - ensureString(fileWithCollection.file), + ensureString(fileWithCollection.fileOrPath), file, ); } @@ -364,7 +365,7 @@ class FolderWatcher { ); } else { this.unUploadableFilePaths.add( - ensureString(fileWithCollection.file), + ensureString(fileWithCollection.fileOrPath), ); } } @@ -375,7 +376,7 @@ class FolderWatcher { * {@link upload} get uploaded. */ async allFileUploadsDone( - filesWithCollection: FileWithCollection2[], + filesWithCollection: FileWithCollection[], collections: Collection[], ) { const electron = ensureElectron(); @@ -411,7 +412,7 @@ class FolderWatcher { this.debouncedRunNextEvent(); } - private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection2[]) { + private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection[]) { const syncedFiles: FolderWatch["syncedFiles"] = []; const ignoredFiles: FolderWatch["ignoredFiles"] = []; @@ -453,7 +454,7 @@ class FolderWatcher { markIgnored(videoPath); } } else { - const path = ensureString(fileWithCollection.file); + const path = ensureString(fileWithCollection.fileOrPath); const file = this.uploadedFileForPath.get(path); if (file) { markSynced(file, path); diff --git a/web/apps/photos/src/types/entity.ts b/web/apps/photos/src/types/entity.ts index 9580bf3332..60844ce466 100644 --- a/web/apps/photos/src/types/entity.ts +++ b/web/apps/photos/src/types/entity.ts @@ -1,4 +1,4 @@ -import { Location } from "types/upload"; +import { Location } from "types/metadata"; export enum EntityType { LOCATION_TAG = "location", diff --git a/web/apps/photos/src/types/metadata.ts b/web/apps/photos/src/types/metadata.ts new file mode 100644 index 0000000000..7994e62479 --- /dev/null +++ b/web/apps/photos/src/types/metadata.ts @@ -0,0 +1,11 @@ +export interface Location { + latitude: number; + longitude: number; +} + +export interface ParsedExtractedMetadata { + location: Location; + creationTime: number; + width: number; + height: number; +} diff --git a/web/apps/photos/src/types/upload/index.ts b/web/apps/photos/src/types/upload/index.ts deleted file mode 100644 index 6f81e60a47..0000000000 --- a/web/apps/photos/src/types/upload/index.ts +++ /dev/null @@ -1,122 +0,0 @@ -import type { Metadata } from "@/media/types/file"; -import type { ElectronFile } from "@/next/types/file"; -import { - B64EncryptionResult, - LocalFileAttributes, -} from "@ente/shared/crypto/types"; -import type { DataStream } from "@ente/shared/utils/data-stream"; -import { Collection } from "types/collection"; -import { - FilePublicMagicMetadata, - MetadataFileAttributes, - S3FileAttributes, -} from "types/file"; -import { EncryptedMagicMetadata } from "types/magicMetadata"; - -export interface Location { - latitude: number; - longitude: number; -} - -export interface MultipartUploadURLs { - objectKey: string; - partURLs: string[]; - completeURL: string; -} - -export interface UploadAsset { - isLivePhoto?: boolean; - file?: File | ElectronFile; - fileOrPath?: File | ElectronFile; - livePhotoAssets?: LivePhotoAssets; -} - -export interface LivePhotoAssets { - image: globalThis.File | ElectronFile; - video: globalThis.File | ElectronFile; -} - -export interface FileWithCollection extends UploadAsset { - localID: number; - collection?: Collection; - collectionID?: number; -} - -export interface UploadAsset2 { - isLivePhoto?: boolean; - file?: File | string; - fileOrPath?: File | string; - livePhotoAssets?: LivePhotoAssets2; -} - -export interface LivePhotoAssets2 { - image: File | string; - video: File | string; -} - -export interface FileWithCollection2 extends UploadAsset2 { - localID: number; - collection?: Collection; - collectionID: number; -} - -export interface UploadURL { - url: string; - objectKey: string; -} - -export interface FileInMemory { - filedata: Uint8Array | DataStream; - /** The JPEG data of the generated thumbnail */ - thumbnail: Uint8Array; - /** - * `true` if this is a fallback (all black) thumbnail we're returning since - * thumbnail generation failed for some reason. - */ - hasStaticThumbnail: boolean; -} - -export interface FileWithMetadata - extends Omit { - metadata: Metadata; - localID: number; - pubMagicMetadata: FilePublicMagicMetadata; -} - -export interface EncryptedFile { - file: ProcessedFile; - fileKey: B64EncryptionResult; -} - -export interface ProcessedFile { - file: LocalFileAttributes; - thumbnail: LocalFileAttributes; - metadata: LocalFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; - localID: number; -} -export interface BackupedFile { - file: S3FileAttributes; - thumbnail: S3FileAttributes; - metadata: MetadataFileAttributes; - pubMagicMetadata: EncryptedMagicMetadata; -} - -export interface UploadFile extends BackupedFile { - collectionID: number; - encryptedKey: string; - keyDecryptionNonce: string; -} - -export interface ParsedExtractedMetadata { - location: Location; - creationTime: number; - width: number; - height: number; -} - -export interface PublicUploadProps { - token: string; - passwordToken: string; - accessedThroughSharedURL: boolean; -} diff --git a/web/apps/photos/src/types/upload/ui.ts b/web/apps/photos/src/types/upload/ui.ts deleted file mode 100644 index bce381213f..0000000000 --- a/web/apps/photos/src/types/upload/ui.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload"; - -export type FileID = number; -export type FileName = string; - -export type PercentageUploaded = number; -export type UploadFileNames = Map; - -export interface UploadCounter { - finished: number; - total: number; -} - -export interface InProgressUpload { - localFileID: FileID; - progress: PercentageUploaded; -} - -export interface FinishedUpload { - localFileID: FileID; - result: UPLOAD_RESULT; -} - -export type InProgressUploads = Map; - -export type FinishedUploads = Map; - -export type SegregatedFinishedUploads = Map; - -export interface ProgressUpdater { - setPercentComplete: React.Dispatch>; - setUploadCounter: React.Dispatch>; - setUploadStage: React.Dispatch>; - setInProgressUploads: React.Dispatch< - React.SetStateAction - >; - setFinishedUploads: React.Dispatch< - React.SetStateAction - >; - setUploadFilenames: React.Dispatch>; - setHasLivePhotos: React.Dispatch>; - setUploadProgressView: React.Dispatch>; -} diff --git a/web/apps/photos/src/utils/upload/index.ts b/web/apps/photos/src/utils/upload/index.ts index 8923e68941..7f81408d69 100644 --- a/web/apps/photos/src/utils/upload/index.ts +++ b/web/apps/photos/src/utils/upload/index.ts @@ -1,9 +1,9 @@ import type { Metadata } from "@/media/types/file"; import { basename, dirname } from "@/next/file"; -import { ElectronFile } from "@/next/types/file"; import { PICKED_UPLOAD_TYPE } from "constants/upload"; import isElectron from "is-electron"; import { exportMetadataDirectoryName } from "services/export"; +import { fopFileName } from "services/upload/uploadService"; export const hasFileHash = (file: Metadata) => file.hash || (file.imageHash && file.videoHash); @@ -81,16 +81,16 @@ export function getImportSuggestion( // [a => [j], // b => [e,f,g], // c => [h, i]] -export function groupFilesBasedOnParentFolder( - toUploadFiles: File[] | ElectronFile[] | string[], -) { - const collectionNameToFilesMap = new Map< - string, - File[] | ElectronFile[] | string[] - >(); - for (const file of toUploadFiles) { +export const groupFilesBasedOnParentFolder = ( + fileOrPaths: (File | string)[], +) => { + const result = new Map(); + for (const fileOrPath of fileOrPaths) { const filePath = - typeof file == "string" ? file : (file["path"] as string); + /* TODO(MR): ElectronFile */ + typeof fileOrPath == "string" + ? fileOrPath + : (fileOrPath["path"] as string); let folderPath = filePath.substring(0, filePath.lastIndexOf("/")); // If the parent folder of a file is "metadata" @@ -104,42 +104,21 @@ export function groupFilesBasedOnParentFolder( const folderName = folderPath.substring( folderPath.lastIndexOf("/") + 1, ); - if (!folderName?.length) { - throw Error("folderName can't be null"); - } - if (!collectionNameToFilesMap.has(folderName)) { - collectionNameToFilesMap.set(folderName, []); - } - // TODO: Remove the cast - collectionNameToFilesMap.get(folderName).push(file as any); + if (!folderName) throw Error("Unexpected empty folder name"); + if (!result.has(folderName)) result.set(folderName, []); + result.get(folderName).push(fileOrPath); } - return collectionNameToFilesMap; -} + return result; +}; -export function filterOutSystemFiles( - files: File[] | ElectronFile[] | string[] | undefined | null, -) { - if (!files) return files; +/** + * Filter out hidden files from amongst {@link fileOrPaths}. + * + * Hidden files are those whose names begin with a "." (dot). + */ - if (files[0] instanceof File) { - const browserFiles = files as File[]; - return browserFiles.filter((file) => { - return !isSystemFile(file); - }); - } else if (typeof files[0] == "string") { - const filePaths = files as string[]; - return filePaths.filter((path) => !isHiddenFile(path)); - } else { - const electronFiles = files as ElectronFile[]; - return electronFiles.filter((file) => { - return !isSystemFile(file); - }); - } -} - -export function isSystemFile(file: File | ElectronFile) { - return file.name.startsWith("."); -} +export const pruneHiddenFiles = (fileOrPaths: (File | string)[]) => + fileOrPaths.filter((f) => !fopFileName(f).startsWith(".")); /** * Return true if the file at the given {@link path} is hidden. diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/apps/photos/src/worker/heic-convert.worker.ts index 3ce795e3db..96a1a94684 100644 --- a/web/apps/photos/src/worker/heic-convert.worker.ts +++ b/web/apps/photos/src/worker/heic-convert.worker.ts @@ -1,6 +1,5 @@ import { expose } from "comlink"; import HeicConvert from "heic-convert"; -import { getUint8ArrayView } from "services/readerService"; export class DedicatedHEICConvertWorker { async heicToJPEG(heicBlob: Blob) { @@ -16,9 +15,8 @@ expose(DedicatedHEICConvertWorker, self); * Both the input and output are blobs. */ export const heicToJPEG = async (heicBlob: Blob): Promise => { - const filedata = await getUint8ArrayView(heicBlob); - const result = await HeicConvert({ buffer: filedata, format: "JPEG" }); - const convertedFileData = new Uint8Array(result); - const convertedFileBlob = new Blob([convertedFileData]); - return convertedFileBlob; + const buffer = new Uint8Array(await heicBlob.arrayBuffer()); + const result = await HeicConvert({ buffer, format: "JPEG" }); + const convertedData = new Uint8Array(result); + return new Blob([convertedData]); };