[web] Forward some methods via the new crypto worker interface (#4114)

This commit is contained in:
Manav Rathi
2024-11-20 19:12:42 +05:30
committed by GitHub
4 changed files with 155 additions and 70 deletions

View File

@@ -1,7 +1,4 @@
import {
streamEncryptionChunkSize,
type B64EncryptionResult,
} from "@/base/crypto/libsodium";
import { streamEncryptionChunkSize } from "@/base/crypto/libsodium";
import type { BytesOrB64 } from "@/base/crypto/types";
import { type CryptoWorker } from "@/base/crypto/worker";
import { ensureElectron } from "@/base/electron";
@@ -238,11 +235,6 @@ interface FileWithMetadata extends Omit<ThumbnailedFile, "hasStaticThumbnail"> {
pubMagicMetadata: FilePublicMagicMetadata;
}
interface EncryptedFile {
file: ProcessedFile;
fileKey: B64EncryptionResult;
}
interface EncryptedFileStream {
/**
* A stream of the file's encrypted contents
@@ -260,12 +252,7 @@ interface EncryptedFileStream {
chunkCount: number;
}
interface EncryptedMetadata {
encryptedDataB64: string;
decryptionHeaderB64: string;
}
interface ProcessedFile {
interface EncryptedFilePieces {
file: {
encryptedData: Uint8Array | EncryptedFileStream;
decryptionHeader: string;
@@ -274,7 +261,10 @@ interface ProcessedFile {
encryptedData: Uint8Array;
decryptionHeader: string;
};
metadata: EncryptedMetadata;
metadata: {
encryptedDataB64: string;
decryptionHeaderB64: string;
};
pubMagicMetadata: EncryptedMagicMetadata;
localID: number;
}
@@ -616,7 +606,7 @@ export const uploader = async (
pubMagicMetadata,
};
const encryptedFile = await encryptFile(
const { encryptedFilePieces, encryptedFileKey } = await encryptFile(
fileWithMetadata,
collection.key,
worker,
@@ -625,7 +615,7 @@ export const uploader = async (
abortIfCancelled();
const backupedFile = await uploadToBucket(
encryptedFile.file,
encryptedFilePieces,
makeProgessTracker,
isCFUploadProxyDisabled,
abortIfCancelled,
@@ -633,8 +623,8 @@ export const uploader = async (
const uploadedFile = await uploadService.uploadFile({
collectionID: collection.id,
encryptedKey: encryptedFile.fileKey.encryptedData,
keyDecryptionNonce: encryptedFile.fileKey.nonce,
encryptedKey: encryptedFileKey.encryptedData,
keyDecryptionNonce: encryptedFileKey.nonce,
...backupedFile,
});
@@ -1346,7 +1336,7 @@ const encryptFile = async (
file: FileWithMetadata,
encryptionKey: string,
worker: CryptoWorker,
): Promise<EncryptedFile> => {
) => {
const fileKey = await worker.generateBlobOrStreamKey();
const { fileStreamOrData, thumbnail, metadata, pubMagicMetadata, localID } =
@@ -1383,17 +1373,19 @@ const encryptFile = async (
const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey);
const result: EncryptedFile = {
file: {
return {
encryptedFilePieces: {
file: encryptedFiledata,
thumbnail: encryptedThumbnail,
metadata: encryptedMetadata,
pubMagicMetadata: encryptedPubMagicMetadata,
localID: localID,
},
fileKey: encryptedKey,
encryptedFileKey: {
encryptedData: encryptedKey.encryptedData,
nonce: encryptedKey.nonce,
},
};
return result;
};
const encryptFileStream = async (
@@ -1427,15 +1419,17 @@ const encryptFileStream = async (
};
const uploadToBucket = async (
file: ProcessedFile,
encryptedFilePieces: EncryptedFilePieces,
makeProgessTracker: MakeProgressTracker,
isCFUploadProxyDisabled: boolean,
abortIfCancelled: () => void,
): Promise<BackupedFile> => {
const { localID, file, thumbnail, metadata, pubMagicMetadata } =
encryptedFilePieces;
try {
let fileObjectKey: string = null;
const encryptedData = file.file.encryptedData;
const encryptedData = file.encryptedData;
if (
!(encryptedData instanceof Uint8Array) &&
encryptedData.chunkCount >= multipartChunksPerPart
@@ -1443,7 +1437,7 @@ const uploadToBucket = async (
// We have a stream, and it is more than multipartChunksPerPart
// chunks long, so use a multipart upload to upload it.
fileObjectKey = await uploadStreamUsingMultipart(
file.localID,
localID,
encryptedData,
makeProgessTracker,
isCFUploadProxyDisabled,
@@ -1455,7 +1449,7 @@ const uploadToBucket = async (
? encryptedData
: await readEntireStream(encryptedData.stream);
const progressTracker = makeProgessTracker(file.localID);
const progressTracker = makeProgessTracker(localID);
const fileUploadURL = await uploadService.getUploadURL();
if (!isCFUploadProxyDisabled) {
fileObjectKey = await UploadHttpClient.putFileV2(
@@ -1476,31 +1470,31 @@ const uploadToBucket = async (
if (!isCFUploadProxyDisabled) {
thumbnailObjectKey = await UploadHttpClient.putFileV2(
thumbnailUploadURL,
file.thumbnail.encryptedData,
thumbnail.encryptedData,
null,
);
} else {
thumbnailObjectKey = await UploadHttpClient.putFile(
thumbnailUploadURL,
file.thumbnail.encryptedData,
thumbnail.encryptedData,
null,
);
}
const backupedFile: BackupedFile = {
file: {
decryptionHeader: file.file.decryptionHeader,
decryptionHeader: file.decryptionHeader,
objectKey: fileObjectKey,
},
thumbnail: {
decryptionHeader: file.thumbnail.decryptionHeader,
decryptionHeader: thumbnail.decryptionHeader,
objectKey: thumbnailObjectKey,
},
metadata: {
encryptedData: file.metadata.encryptedDataB64,
decryptionHeader: file.metadata.decryptionHeaderB64,
encryptedData: metadata.encryptedDataB64,
decryptionHeader: metadata.decryptionHeaderB64,
},
pubMagicMetadata: file.pubMagicMetadata,
pubMagicMetadata: pubMagicMetadata,
};
return backupedFile;
} catch (e) {

View File

@@ -46,10 +46,16 @@
* of going through this file.
*/
import { ComlinkWorker } from "@/base/worker/comlink-worker";
import { type StateAddress } from "libsodium-wrappers-sumo";
import { assertionFailed } from "../assert";
import { inWorker } from "../env";
import * as ei from "./ente-impl";
import type { BytesOrB64, EncryptedBlob, EncryptedBox } from "./types";
import type {
BytesOrB64,
EncryptedBlob,
EncryptedBox,
EncryptedFile,
} from "./types";
import type { CryptoWorker } from "./worker";
/**
@@ -147,7 +153,58 @@ export const encryptBlob = (data: BytesOrB64, key: BytesOrB64) =>
export const encryptBlobB64 = (data: BytesOrB64, key: BytesOrB64) =>
inWorker()
? ei._encryptBlobB64(data, key)
: sharedCryptoWorker().then((w) => w._encryptBlobB64(data, key));
: sharedCryptoWorker().then((w) => w.encryptBlobB64(data, key));
/**
* Encrypt the thumbnail for a file.
*
* This is midway variant of {@link encryptBlob} and {@link encryptBlobB64} that
* returns the decryption header as a base64 string, but leaves the data
* unchanged.
*
* Use {@link decryptThumbnail} to decrypt the result.
*/
export const encryptThumbnail = (data: BytesOrB64, key: BytesOrB64) =>
inWorker()
? ei._encryptThumbnail(data, key)
: sharedCryptoWorker().then((w) => w.encryptThumbnail(data, key));
/**
* Encrypt the given data using chunked streaming encryption, but process all
* the chunks in one go.
*
* For more details, see {@link encryptStreamBytes} in `libsodium.ts`.
*/
export const encryptStreamBytes = async (data: Uint8Array, key: BytesOrB64) =>
inWorker()
? ei._encryptStreamBytes(data, key)
: sharedCryptoWorker().then((w) => w.encryptStreamBytes(data, key));
/**
* Prepare for chunked streaming encryption using {@link encryptStreamChunk}.
*
* For more details, see {@link initChunkEncryption} in `libsodium.ts`.
*/
export const initChunkEncryption = async (key: BytesOrB64) =>
inWorker()
? ei._initChunkEncryption(key)
: sharedCryptoWorker().then((w) => w.initChunkEncryption(key));
/**
* Encrypt a chunk as part of a chunked streaming encryption.
*
* For more details, see {@link encryptStreamChunk} in `libsodium.ts`.
*/
export const encryptStreamChunk = async (
data: Uint8Array,
state: StateAddress,
isFinalChunk: boolean,
) =>
inWorker()
? ei._encryptStreamChunk(data, state, isFinalChunk)
: sharedCryptoWorker().then((w) =>
w.encryptStreamChunk(data, state, isFinalChunk),
);
/**
* Encrypt the JSON metadata associated with an Ente object (file, collection or
@@ -221,6 +278,47 @@ export const decryptBlobB64 = (blob: EncryptedBlob, key: BytesOrB64) =>
? ei._decryptBlobB64(blob, key)
: sharedCryptoWorker().then((w) => w.decryptBlobB64(blob, key));
/**
* Decrypt the thumbnail encrypted using {@link encryptThumbnail}.
*/
export const decryptThumbnail = (blob: EncryptedBlob, key: BytesOrB64) =>
inWorker()
? ei._decryptThumbnail(blob, key)
: sharedCryptoWorker().then((w) => w.decryptThumbnail(blob, key));
/**
* Decrypt the result of {@link encryptStreamBytes}.
*/
export const decryptStreamBytes = async (
file: EncryptedFile,
key: BytesOrB64,
) =>
inWorker()
? ei._decryptStreamBytes(file, key)
: sharedCryptoWorker().then((w) => w.decryptStreamBytes(file, key));
/**
* Prepare to decrypt the encrypted result produced using {@link initChunkEncryption} and
* {@link encryptStreamChunk}.
*/
export const initChunkDecryption = async (header: string, key: BytesOrB64) =>
inWorker()
? ei._initChunkDecryption(header, key)
: sharedCryptoWorker().then((w) => w.initChunkDecryption(header, key));
/**
* Decrypt an individual chunk produced by {@link encryptStreamChunk}.
*
* This function is used in tandem with {@link initChunkDecryption}.
*/
export const decryptStreamChunk = async (
data: Uint8Array,
state: StateAddress,
) =>
inWorker()
? ei._decryptStreamChunk(data, state)
: sharedCryptoWorker().then((w) => w.decryptStreamChunk(data, state));
/**
* Decrypt the metadata JSON encrypted using {@link encryptMetadataJSON}.
*

View File

@@ -17,7 +17,7 @@ export class CryptoWorker {
generateBlobOrStreamKey = ei._generateBlobOrStreamKey;
encryptBoxB64 = ei._encryptBoxB64;
encryptThumbnail = ei._encryptThumbnail;
_encryptBlobB64 = ei._encryptBlobB64;
encryptBlobB64 = ei._encryptBlobB64;
encryptStreamBytes = ei._encryptStreamBytes;
initChunkEncryption = ei._initChunkEncryption;
encryptStreamChunk = ei._encryptStreamChunk;

View File

@@ -3,8 +3,12 @@
import { isDesktop } from "@/base/app";
import { blobCache, type BlobCache } from "@/base/blob-cache";
import { sharedCryptoWorker } from "@/base/crypto";
import { type CryptoWorker } from "@/base/crypto/worker";
import {
decryptStreamBytes,
decryptStreamChunk,
decryptThumbnail,
initChunkDecryption,
} from "@/base/crypto";
import log from "@/base/log";
import { customAPIOrigin } from "@/base/origins";
import type { EnteFile, LivePhotoSourceURL, SourceURLs } from "@/media/file";
@@ -45,7 +49,6 @@ class DownloadManagerImpl {
* Only available when we're running in the desktop app.
*/
private fileCache?: BlobCache;
private cryptoWorker: CryptoWorker | undefined;
private fileObjectURLPromises = new Map<number, Promise<SourceURLs>>();
private fileConversionPromises = new Map<number, Promise<SourceURLs>>();
@@ -78,7 +81,7 @@ class DownloadManagerImpl {
// } catch (e) {
// log.error("Failed to open file cache, will continue without it", e);
// }
this.cryptoWorker = await sharedCryptoWorker();
this.ready = true;
}
@@ -88,15 +91,11 @@ class DownloadManagerImpl {
"Attempting to use an uninitialized download manager",
);
return {
downloadClient: this.downloadClient!,
cryptoWorker: this.cryptoWorker!,
};
return { downloadClient: this.downloadClient! };
}
logout() {
this.ready = false;
this.cryptoWorker = undefined;
this.downloadClient = undefined;
this.fileObjectURLPromises.clear();
this.fileConversionPromises.clear();
@@ -115,14 +114,11 @@ class DownloadManagerImpl {
}
private downloadThumb = async (file: EnteFile) => {
const { downloadClient, cryptoWorker } = this.ensureInitialized();
const { downloadClient } = this.ensureInitialized();
const encryptedData = await downloadClient.downloadThumbnail(file);
const decryptionHeader = file.thumbnail.decryptionHeader;
return cryptoWorker.decryptThumbnail(
{ encryptedData, decryptionHeader },
file.key,
);
return decryptThumbnail({ encryptedData, decryptionHeader }, file.key);
};
async getThumbnail(file: EnteFile, localOnly = false) {
@@ -270,7 +266,7 @@ class DownloadManagerImpl {
private async downloadFile(
file: EnteFile,
): Promise<ReadableStream<Uint8Array> | null> {
const { downloadClient, cryptoWorker } = this.ensureInitialized();
const { downloadClient } = this.ensureInitialized();
log.info(`download attempted for file id ${file.id}`);
@@ -301,7 +297,7 @@ class DownloadManagerImpl {
}
this.clearDownloadProgress(file.id);
const decrypted = await cryptoWorker.decryptStreamBytes(
const decrypted = await decryptStreamBytes(
{
encryptedData: new Uint8Array(encryptedArrayBuffer),
decryptionHeader: file.file.decryptionHeader,
@@ -331,11 +327,10 @@ class DownloadManagerImpl {
parseInt(res.headers.get("Content-Length") ?? "") || 0;
let downloadedBytes = 0;
const { pullState, decryptionChunkSize } =
await cryptoWorker.initChunkDecryption(
file.file.decryptionHeader,
file.key,
);
const { pullState, decryptionChunkSize } = await initChunkDecryption(
file.file.decryptionHeader,
file.key,
);
let leftoverBytes = new Uint8Array();
@@ -368,11 +363,10 @@ class DownloadManagerImpl {
// data.length might be a multiple of decryptionChunkSize,
// and we might need multiple iterations to drain it all.
while (data.length >= decryptionChunkSize) {
const decryptedData =
await cryptoWorker.decryptStreamChunk(
data.slice(0, decryptionChunkSize),
pullState,
);
const decryptedData = await decryptStreamChunk(
data.slice(0, decryptionChunkSize),
pullState,
);
controller.enqueue(decryptedData);
didEnqueue = true;
data = data.slice(decryptionChunkSize);
@@ -382,11 +376,10 @@ class DownloadManagerImpl {
// Send off the remaining bytes without waiting for a
// full chunk, no more bytes are going to come.
if (data.length) {
const decryptedData =
await cryptoWorker.decryptStreamChunk(
data,
pullState,
);
const decryptedData = await decryptStreamChunk(
data,
pullState,
);
controller.enqueue(decryptedData);
}
// Don't loop again even if we didn't enqueue.