[web] Non-functional crypto layer ontology tweaks (#4111)

This PR reworks the names etc, and how the parameters and names fit in
the ontology of our crypto layers as part of our long term improvements
to the crypto layering, but doesn't change _any_ implementation
specifics.
This commit is contained in:
Manav Rathi
2024-11-20 16:38:13 +05:30
committed by GitHub
10 changed files with 318 additions and 256 deletions

View File

@@ -343,13 +343,13 @@ const downloadFile = async (
);
const cryptoWorker = await sharedCryptoWorker();
const decrypted = await cryptoWorker.decryptFile(
new Uint8Array(await res.arrayBuffer()),
await cryptoWorker.fromB64(
shouldUseThumbnail
const decrypted = await cryptoWorker.decryptStreamBytes(
{
encryptedData: new Uint8Array(await res.arrayBuffer()),
decryptionHeader: shouldUseThumbnail
? file.thumbnail.decryptionHeader
: file.file.decryptionHeader,
),
},
file.key,
);
return new Response(decrypted).blob();

View File

@@ -1,7 +1,8 @@
import {
ENCRYPTION_CHUNK_SIZE,
streamEncryptionChunkSize,
type B64EncryptionResult,
} from "@/base/crypto/libsodium";
import type { BytesOrB64 } from "@/base/crypto/types";
import { type CryptoWorker } from "@/base/crypto/worker";
import { ensureElectron } from "@/base/electron";
import { basename, nameAndExtension } from "@/base/file";
@@ -68,16 +69,17 @@ interface FileStream {
/**
* A stream of the file's contents
*
* This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
* (except the last chunk which can be smaller since a file would rarely
* align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
* This stream is guaranteed to emit data in
* {@link streamEncryptionChunkSize} sized chunks (except the last chunk
* which can be smaller since a file would rarely align exactly to a
* {@link streamEncryptionChunkSize} multiple).
*
* Note: A stream can only be read once!
*/
stream: ReadableStream<Uint8Array>;
/**
* Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
* sized (except the last one).
* Number of chunks {@link stream} will emit, each
* {@link streamEncryptionChunkSize} sized (except the last one).
*/
chunkCount: number;
/**
@@ -95,11 +97,12 @@ interface FileStream {
}
/**
* If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use
* multipart uploads for it, with each multipart-part containing 5 chunks.
* If the stream we have is more than 5 {@link streamEncryptionChunkSize}
* chunks, then use multipart uploads for it, with each multipart-part
* containing 5 chunks.
*
* ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload
* part is 5, so each part is (up to) 20 MB.
* {@link streamEncryptionChunkSize} is 4 MB, and the number of chunks in a
* single upload part is 5, so each part is (up to) 20 MB.
*/
const multipartChunksPerPart = 5;
@@ -244,40 +247,33 @@ interface EncryptedFileStream {
/**
* A stream of the file's encrypted contents
*
* This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
* (except the last chunk which can be smaller since a file would rarely
* align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
* This stream is guaranteed to emit data in
* {@link streamEncryptionChunkSize} chunks (except the last chunk which can
* be smaller since a file would rarely align exactly to a
* {@link streamEncryptionChunkSize} multiple).
*/
stream: ReadableStream<Uint8Array>;
/**
* Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
* sized (except the last one).
* Number of chunks {@link stream} will emit, each
* {@link streamEncryptionChunkSize} sized (except the last one).
*/
chunkCount: number;
}
interface LocalFileAttributes<
T extends string | Uint8Array | EncryptedFileStream,
> {
encryptedData: T;
decryptionHeader: string;
}
interface EncryptedMetadata {
encryptedDataB64: string;
decryptionHeaderB64: string;
}
interface EncryptionResult<
T extends string | Uint8Array | EncryptedFileStream,
> {
file: LocalFileAttributes<T>;
key: string;
}
interface ProcessedFile {
file: LocalFileAttributes<Uint8Array | EncryptedFileStream>;
thumbnail: LocalFileAttributes<Uint8Array>;
file: {
encryptedData: Uint8Array | EncryptedFileStream;
decryptionHeader: string;
};
thumbnail: {
encryptedData: Uint8Array;
decryptionHeader: string;
};
metadata: EncryptedMetadata;
pubMagicMetadata: EncryptedMagicMetadata;
localID: number;
@@ -769,11 +765,11 @@ const readUploadItem = async (uploadItem: UploadItem): Promise<FileStream> => {
lastModifiedMs = file.lastModified;
}
const N = ENCRYPTION_CHUNK_SIZE;
const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE);
const N = streamEncryptionChunkSize;
const chunkCount = Math.ceil(fileSize / streamEncryptionChunkSize);
// Pipe the underlying stream through a transformer that emits
// ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be
// streamEncryptionChunkSize-ed chunks (except the last one, which can be
// smaller).
let pending: Uint8Array | undefined;
const transformer = new TransformStream<Uint8Array, Uint8Array>({
@@ -1351,34 +1347,35 @@ const encryptFile = async (
encryptionKey: string,
worker: CryptoWorker,
): Promise<EncryptedFile> => {
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
file.fileStreamOrData,
worker,
const fileKey = await worker.generateBlobOrStreamKey();
const { fileStreamOrData, thumbnail, metadata, pubMagicMetadata, localID } =
file;
const encryptedFiledata =
fileStreamOrData instanceof Uint8Array
? await worker.encryptStreamBytes(fileStreamOrData, fileKey)
: await encryptFileStream(fileStreamOrData, fileKey, worker);
const encryptedThumbnail = await worker.encryptThumbnail(
thumbnail,
fileKey,
);
const {
encryptedData: thumbEncryptedData,
decryptionHeader: thumbDecryptionHeader,
} = await worker.encryptThumbnail(file.thumbnail, fileKey);
const encryptedThumbnail = {
encryptedData: thumbEncryptedData,
decryptionHeader: thumbDecryptionHeader,
};
const encryptedMetadata = await worker.encryptMetadataJSON({
jsonValue: file.metadata,
jsonValue: metadata,
keyB64: fileKey,
});
let encryptedPubMagicMetadata: EncryptedMagicMetadata;
if (file.pubMagicMetadata) {
if (pubMagicMetadata) {
const encryptedPubMagicMetadataData = await worker.encryptMetadataJSON({
jsonValue: file.pubMagicMetadata.data,
jsonValue: pubMagicMetadata.data,
keyB64: fileKey,
});
encryptedPubMagicMetadata = {
version: file.pubMagicMetadata.version,
count: file.pubMagicMetadata.count,
version: pubMagicMetadata.version,
count: pubMagicMetadata.count,
data: encryptedPubMagicMetadataData.encryptedDataB64,
header: encryptedPubMagicMetadataData.decryptionHeaderB64,
};
@@ -1392,34 +1389,26 @@ const encryptFile = async (
thumbnail: encryptedThumbnail,
metadata: encryptedMetadata,
pubMagicMetadata: encryptedPubMagicMetadata,
localID: file.localID,
localID: localID,
},
fileKey: encryptedKey,
};
return result;
};
const encryptFiledata = async (
fileStreamOrData: FileStream | Uint8Array,
worker: CryptoWorker,
): Promise<EncryptionResult<Uint8Array | EncryptedFileStream>> =>
fileStreamOrData instanceof Uint8Array
? await worker.encryptFile(fileStreamOrData)
: await encryptFileStream(fileStreamOrData, worker);
const encryptFileStream = async (
fileData: FileStream,
{ stream, chunkCount }: FileStream,
fileKey: BytesOrB64,
worker: CryptoWorker,
) => {
const { stream, chunkCount } = fileData;
const fileStreamReader = stream.getReader();
const { key, decryptionHeader, pushState } =
await worker.initChunkEncryption();
const { decryptionHeader, pushState } =
await worker.initChunkEncryption(fileKey);
const ref = { pullCount: 1 };
const encryptedFileStream = new ReadableStream({
async pull(controller) {
const { value } = await fileStreamReader.read();
const encryptedFileChunk = await worker.encryptFileChunk(
const encryptedFileChunk = await worker.encryptStreamChunk(
value,
pushState,
ref.pullCount === chunkCount,
@@ -1432,11 +1421,8 @@ const encryptFileStream = async (
},
});
return {
key,
file: {
decryptionHeader,
encryptedData: { stream: encryptedFileStream, chunkCount },
},
decryptionHeader,
encryptedData: { stream: encryptedFileStream, chunkCount },
};
};

View File

@@ -1,6 +1,10 @@
/** Careful when adding add other imports! */
import * as libsodium from "./libsodium";
import type { BytesOrB64, EncryptedBlob } from "./types";
import type { BytesOrB64, EncryptedBlob, EncryptedFile } from "./types";
export const _generateBoxKey = libsodium.generateBoxKey;
export const _generateBlobOrStreamKey = libsodium.generateBlobOrStreamKey;
export const _encryptBoxB64 = libsodium.encryptBoxB64;
@@ -8,7 +12,10 @@ export const _encryptBlob = libsodium.encryptBlob;
export const _encryptBlobB64 = libsodium.encryptBlobB64;
export const _encryptThumbnail = async (data: BytesOrB64, key: BytesOrB64) => {
export const _encryptThumbnail = async (
data: BytesOrB64,
key: BytesOrB64,
): Promise<EncryptedFile> => {
const { encryptedData, decryptionHeader } = await _encryptBlob(data, key);
return {
encryptedData,
@@ -16,6 +23,12 @@ export const _encryptThumbnail = async (data: BytesOrB64, key: BytesOrB64) => {
};
};
export const _encryptStreamBytes = libsodium.encryptStreamBytes;
export const _initChunkEncryption = libsodium.initChunkEncryption;
export const _encryptStreamChunk = libsodium.encryptStreamChunk;
export const _encryptMetadataJSON_New = (jsonValue: unknown, key: BytesOrB64) =>
_encryptBlobB64(new TextEncoder().encode(JSON.stringify(jsonValue)), key);
@@ -44,6 +57,12 @@ export const _decryptBlobB64 = libsodium.decryptBlobB64;
export const _decryptThumbnail = _decryptBlob;
export const _decryptStreamBytes = libsodium.decryptStreamBytes;
export const _initChunkDecryption = libsodium.initChunkDecryption;
export const _decryptStreamChunk = libsodium.decryptStreamChunk;
export const _decryptMetadataJSON_New = async (
blob: EncryptedBlob,
key: BytesOrB64,

View File

@@ -18,11 +18,11 @@
* ensure that sodium.ready has been called before accessing libsodium's APIs,
* thus all the functions it exposes are async.
*
* The highest layer is this file, `crypto/index.ts`, and the one that our own
* code should use. These are usually simple compositions of functionality
* exposed by `crypto/libsodium.ts`, the primary difference being that these
* functions try to talk in terms of higher-level Ente specific goal we are
* trying to accomplish instead of the specific underlying crypto algorithms.
* The highest layer is this file, `crypto/index.ts`. These are usually simple
* compositions of functionality exposed by `crypto/libsodium.ts`, the primary
* difference being that these functions try to talk in terms of higher-level
* Ente specific goal we are trying to accomplish instead of the specific
* underlying crypto algorithms.
*
* There is an additional actor in play. Cryptographic operations like
* encryption are CPU intensive and would cause the UI to stutter if used
@@ -49,7 +49,6 @@ import { ComlinkWorker } from "@/base/worker/comlink-worker";
import { assertionFailed } from "../assert";
import { inWorker } from "../env";
import * as ei from "./ente-impl";
import * as libsodium from "./libsodium";
import type { BytesOrB64, EncryptedBlob, EncryptedBox } from "./types";
import type { CryptoWorker } from "./worker";
@@ -89,16 +88,22 @@ const assertInWorker = <T>(x: T): T => {
};
/**
* Return a new randomly generated 256-bit key suitable for use with the *Box
* encryption functions.
* Return a new randomly generated 256-bit key (as a base64 string) suitable for
* use with the *Box encryption functions.
*/
export const generateNewBoxKey = libsodium.generateNewBoxKey;
export const generateBoxKey = () =>
inWorker()
? ei._generateBoxKey()
: sharedCryptoWorker().then((w) => w.generateBoxKey());
/**
* Return a new randomly generated 256-bit key suitable for use with the *Blob
* or *Stream encryption functions.
* Return a new randomly generated 256-bit key (as a base64 string) suitable for
* use with the *Blob or *Stream encryption functions.
*/
export const generateNewBlobOrStreamKey = libsodium.generateNewBlobOrStreamKey;
export const generateBlobOrStreamKey = () =>
inWorker()
? ei._generateBlobOrStreamKey()
: sharedCryptoWorker().then((w) => w.generateBlobOrStreamKey());
/**
* Encrypt the given data, returning a box containing the encrypted data and a
@@ -128,7 +133,7 @@ export const encryptBoxB64 = (data: BytesOrB64, key: BytesOrB64) =>
* Use {@link decryptBlob} to decrypt the result.
*
* > The suffix "Blob" comes from our convention of naming functions that use
* > the secretstream APIs in one-shot mode.
* > the secretstream APIs without breaking the data into chunks.
* >
* > See: [Note: 3 forms of encryption (Box | Blob | Stream)]
*/
@@ -144,18 +149,6 @@ export const encryptBlobB64 = (data: BytesOrB64, key: BytesOrB64) =>
? ei._encryptBlobB64(data, key)
: sharedCryptoWorker().then((w) => w._encryptBlobB64(data, key));
/**
* Encrypt the thumbnail for a file.
*
* This is midway variant of {@link encryptBlob} and {@link encryptBlobB64} that
* returns the decryption header as a base64 string, but leaves the data
* unchanged.
*
* Use {@link decryptThumbnail} to decrypt the result.
*/
export const encryptThumbnail = (data: BytesOrB64, key: BytesOrB64) =>
assertInWorker(ei._encryptThumbnail(data, key));
/**
* Encrypt the JSON metadata associated with an Ente object (file, collection or
* entity) using the object's key.
@@ -228,12 +221,6 @@ export const decryptBlobB64 = (blob: EncryptedBlob, key: BytesOrB64) =>
? ei._decryptBlobB64(blob, key)
: sharedCryptoWorker().then((w) => w.decryptBlobB64(blob, key));
/**
* Decrypt the thumbnail encrypted using {@link encryptThumbnail}.
*/
export const decryptThumbnail = (blob: EncryptedBlob, key: BytesOrB64) =>
assertInWorker(ei._decryptThumbnail(blob, key));
/**
* Decrypt the metadata JSON encrypted using {@link encryptMetadataJSON}.
*

View File

@@ -9,7 +9,6 @@
* To see where this code fits, see [Note: Crypto code hierarchy].
*/
import { mergeUint8Arrays } from "@/utils/array";
import { CustomError } from "@ente/shared/error";
import sodium, { type StateAddress } from "libsodium-wrappers-sumo";
import type {
BytesOrB64,
@@ -18,6 +17,7 @@ import type {
EncryptedBlobBytes,
EncryptedBox,
EncryptedBoxB64,
EncryptedFile,
} from "./types";
/**
@@ -129,23 +129,25 @@ const bytes = async (bob: BytesOrB64) =>
typeof bob == "string" ? fromB64(bob) : bob;
/**
* Generate a key for use with the *Box encryption functions.
* Generate a new key for use with the *Box encryption functions, and return its
* base64 string representation.
*
* This returns a new randomly generated 256-bit key suitable for being used
* with libsodium's secretbox APIs.
*/
export const generateNewBoxKey = async () => {
export const generateBoxKey = async () => {
await sodium.ready;
return toB64(sodium.crypto_secretbox_keygen());
};
/**
* Generate a key for use with the *Blob or *Stream encryption functions.
* Generate a new key for use with the *Blob or *Stream encryption functions,
* and return its base64 string representation.
*
* This returns a new randomly generated 256-bit key suitable for being used
* with libsodium's secretstream APIs.
*/
export const generateNewBlobOrStreamKey = async () => {
export const generateBlobOrStreamKey = async () => {
await sodium.ready;
return toB64(sodium.crypto_secretstream_xchacha20poly1305_keygen());
};
@@ -194,35 +196,32 @@ export const generateNewBlobOrStreamKey = async () => {
* secretstream APIs are more appropriate.
*
* However, in our code we have evolved two different use cases for the 2nd
* option.
*
* Say we have an Ente object, specifically an {@link EnteFile}. This holds the
* encryption keys for encrypting the contents of the file that a user wishes to
* upload. The secretstream APIs are the obvious fit, and indeed that's what we
* use, chunking the file if the contents are bigger than some threshold. But if
* the file is small enough, there is no need to chunk, so we also expose a
* function that does streaming encryption, but in "one-shot" mode.
*
* Later on, say we have to encrypt the public magic metadata associated with
* the {@link EnteFile}. Instead of using the secretbox APIs, we just us the
* same streaming encryption that the rest of the file uses, but since such
* metadata is well below the threshold for chunking, it invariably uses the
* "one-shot" mode.
* option. The data to encrypt might be smaller than our streaming encryption
* chunk size (e.g. the public magic metadata associated with the
* {@link EnteFile}), so we do not chunk it and instead encrypt / decrypt it in
* a single go. In contrast, the actual file that the user wishes to upload may
* be arbitrarily big, and there we first break in into chunks before using the
* streaming encryption.
*
* Thus, we have three scenarios:
*
* 1. Box: Using secretbox APIs to encrypt some independent blob of data.
*
* 2. Blob: Using secretstream APIs in one-shot mode. This is used to encrypt
* 2. Blob: Using secretstream APIs without chunking. This is used to encrypt
* data associated to an Ente object (file, collection, entity, etc), when
* the data is small-ish (less than a few MBs).
*
* 3. Stream/Chunks: Using secretstream APIs for encrypting chunks. This is
* used to encrypt the actual content of the files associated with an
* EnteFile object.
* EnteFile object. This itself happens in two ways:
*
* 3a. One shot mode - where we do break the data into chunks, but a single
* function processes all the chunks in one go.
*
* 3b. Streaming - where all the chunks are processed one by one.
*
* "Blob" is not a prior term of art in this context, it is just something we
* use to abbreviate "data encrypted using secretstream APIs in one-shot mode".
* use to abbreviate "data encrypted using secretstream APIs without chunking".
*
* The distinction between Box and Blob is also handy since not only does the
* underlying algorithm differ, but also the terminology that libsodium use for
@@ -240,10 +239,15 @@ export const generateNewBlobOrStreamKey = async () => {
*
* 1. Box uses secretbox APIs (Salsa), Blob uses secretstream APIs (ChaCha).
*
* 2. While both are one-shot, Blob should generally be used for data
* associated with an Ente object, and Box for the other cases.
* 2. Blob should generally be used for data associated with an Ente object,
* and Box for the other cases.
*
* 3. Box returns a "nonce", while Blob returns a "header".
*
* The difference between case 2 and 3 (Blob vs Stream) is that while both use
* the same algorithms, in case of Blob the entire data is encrypted / decrypted
* without chunking, whilst the *Stream routines first break it into
* {@link streamEncryptionChunkSize} chunks.
*/
export const encryptBoxB64 = async (
data: BytesOrB64,
@@ -263,7 +267,7 @@ export const encryptBoxB64 = async (
};
/**
* Encrypt the given data using libsodium's secretstream APIs in one-shot mode.
* Encrypt the given data using libsodium's secretstream APIs without chunking.
*
* Use {@link decryptBlob} to decrypt the result.
*
@@ -283,9 +287,9 @@ export const encryptBlob = async (
): Promise<EncryptedBlobBytes> => {
await sodium.ready;
const uintkey = await bytes(key);
const keyBytes = await bytes(key);
const initPushResult =
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
sodium.crypto_secretstream_xchacha20poly1305_init_push(keyBytes);
const [pushState, header] = [initPushResult.state, initPushResult.header];
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
@@ -315,16 +319,46 @@ export const encryptBlobB64 = async (
};
};
export const ENCRYPTION_CHUNK_SIZE = 4 * 1024 * 1024;
/**
* The various *Stream encryption functions break up the input into chunks of
* {@link streamEncryptionChunkSize} bytes during encryption (except the last
* chunk which can be smaller since a file would rarely align exactly to a
* {@link streamEncryptionChunkSize} multiple).
*
* The various *Stream decryption functions also assume that each potential
* chunk is {@link streamEncryptionChunkSize} long.
*
* This value of this constant is 4 MB (and is unlikely to change).
*/
export const streamEncryptionChunkSize = 4 * 1024 * 1024;
export const encryptChaCha = async (data: Uint8Array) => {
/**
* Encrypt the given data using libsodium's secretstream APIs after breaking it
* into {@link streamEncryptionChunkSize} chunks.
*
* Use {@link decryptStreamBytes} to decrypt the result.
*
* Unlike {@link initChunkDecryption} / {@link encryptFileChunk}, this function
* processes all the chunks at once in a single call to this function.
*
* @param data The data to encrypt.
*
* @returns The encrypted bytes ({@link Uint8Array}) and the decryption header
* (as a base64 string).
*
* - See: [Note: 3 forms of encryption (Box | Blob | Stream)].
*
* - See: https://doc.libsodium.org/secret-key_cryptography/secretstream
*/
export const encryptStreamBytes = async (
data: Uint8Array,
key: BytesOrB64,
): Promise<EncryptedFile> => {
await sodium.ready;
const uintkey: Uint8Array =
sodium.crypto_secretstream_xchacha20poly1305_keygen();
const keyBytes = await bytes(key);
const initPushResult =
sodium.crypto_secretstream_xchacha20poly1305_init_push(uintkey);
sodium.crypto_secretstream_xchacha20poly1305_init_push(keyBytes);
const [pushState, header] = [initPushResult.state, initPushResult.header];
let bytesRead = 0;
let tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
@@ -332,7 +366,7 @@ export const encryptChaCha = async (data: Uint8Array) => {
const encryptedChunks = [];
while (tag !== sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL) {
let chunkSize = ENCRYPTION_CHUNK_SIZE;
let chunkSize = streamEncryptionChunkSize;
if (bytesRead + chunkSize >= data.length) {
chunkSize = data.length - bytesRead;
tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL;
@@ -349,45 +383,74 @@ export const encryptChaCha = async (data: Uint8Array) => {
encryptedChunks.push(pushResult);
}
return {
key: await toB64(uintkey),
file: {
encryptedData: mergeUint8Arrays(encryptedChunks),
decryptionHeader: await toB64(header),
},
encryptedData: mergeUint8Arrays(encryptedChunks),
decryptionHeader: await toB64(header),
};
};
export async function initChunkEncryption() {
/**
* Initialize libsodium's secretstream APIs for encrypting
* {@link streamEncryptionChunkSize} chunks. Subsequently, each chunk can be
* encrypted using {@link encryptStreamChunk}.
*
* Use {@link initChunkDecryption} to initialize the decryption routine, and
* {@link decryptStreamChunk} to decrypt the individual chunks.
*
* See also: {@link encryptStreamBytes} which also does chunked encryption but
* encrypts all the chunks in a single call.
*
* @param key The key to use for encryption.
*
* @returns The decryption header (as a base64 string) which should be preserved
* and used during decryption, and an opaque "push state" that should be passed
* to subsequent calls to {@link encryptStreamChunk} along with the chunks's
* contents.
*/
export const initChunkEncryption = async (key: BytesOrB64) => {
await sodium.ready;
const key = sodium.crypto_secretstream_xchacha20poly1305_keygen();
const initPushResult =
sodium.crypto_secretstream_xchacha20poly1305_init_push(key);
const [pushState, header] = [initPushResult.state, initPushResult.header];
const keyBytes = await bytes(key);
const { state, header } =
sodium.crypto_secretstream_xchacha20poly1305_init_push(keyBytes);
return {
key: await toB64(key),
decryptionHeader: await toB64(header),
pushState,
pushState: state,
};
}
};
export async function encryptFileChunk(
/**
* Encrypt an individual chunk using libsodium's secretstream APIs.
*
* This function is not meant to be standalone, but is instead called in tandem
* with {@link initChunkEncryption} for encrypting data after breaking it into
* chunks.
*
* @param data The chunk's data as bytes ({@link Uint8Array}).
*
* @param pushState The state for this instantiation of chunked encryption. This
* should be treated as opaque libsodium state that should be passed to all
* calls to {@link encryptStreamChunk} that are paired with a particular
* {@link initChunkEncryption}.
*
* @param isFinalChunk `true` if this is the last chunk in the sequence.
*
* @returns The encrypted chunk.
*/
export const encryptStreamChunk = async (
data: Uint8Array,
pushState: sodium.StateAddress,
isFinalChunk: boolean,
) {
) => {
await sodium.ready;
const tag = isFinalChunk
? sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL
: sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
const pushResult = sodium.crypto_secretstream_xchacha20poly1305_push(
return sodium.crypto_secretstream_xchacha20poly1305_push(
pushState,
data,
null,
tag,
);
return pushResult;
}
};
/**
* Decrypt the result of {@link encryptBoxB64} and return the decrypted bytes.
@@ -440,38 +503,34 @@ export const decryptBlobB64 = (
key: BytesOrB64,
): Promise<string> => decryptBlob(blob, key).then(toB64);
/** Decrypt Stream, but merge the results. */
export const decryptChaCha = async (
data: Uint8Array,
header: Uint8Array,
key: string,
/**
* Decrypt the result of {@link encryptStreamBytes}.
*/
export const decryptStreamBytes = async (
{ encryptedData, decryptionHeader }: EncryptedFile,
key: BytesOrB64,
) => {
await sodium.ready;
const pullState = sodium.crypto_secretstream_xchacha20poly1305_init_pull(
header,
await fromB64(key),
await fromB64(decryptionHeader),
await bytes(key),
);
const decryptionChunkSize =
ENCRYPTION_CHUNK_SIZE +
streamEncryptionChunkSize +
sodium.crypto_secretstream_xchacha20poly1305_ABYTES;
let bytesRead = 0;
const decryptedChunks = [];
let tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
while (tag !== sodium.crypto_secretstream_xchacha20poly1305_TAG_FINAL) {
let chunkSize = decryptionChunkSize;
if (bytesRead + chunkSize > data.length) {
chunkSize = data.length - bytesRead;
if (bytesRead + chunkSize > encryptedData.length) {
chunkSize = encryptedData.length - bytesRead;
}
const buffer = data.slice(bytesRead, bytesRead + chunkSize);
const buffer = encryptedData.slice(bytesRead, bytesRead + chunkSize);
const pullResult = sodium.crypto_secretstream_xchacha20poly1305_pull(
pullState,
buffer,
);
// TODO:
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (!pullResult.message) {
throw new Error(CustomError.PROCESSING_FAILED);
}
decryptedChunks.push(pullResult.message);
tag = pullResult.tag;
bytesRead += chunkSize;
@@ -479,36 +538,55 @@ export const decryptChaCha = async (
return mergeUint8Arrays(decryptedChunks);
};
export async function initChunkDecryption(header: Uint8Array, key: Uint8Array) {
/**
* Prepare to decrypt the result of {@link initChunkEncryption} and
* {@link encryptStreamChunk}.
*
* @param decryptionHeader The header (as a base64 string) that was produced
* during encryption by {@link initChunkEncryption}.
*
* @param key The encryption key.
*
* @returns The pull state, which should be treated as opaque libsodium specific
* state that should be passed along to each subsequent call to
* {@link decryptStreamChunk}, and the size of each (decrypted) chunk that will
* be produced by subsequent calls to {@link decryptStreamChunk}.
*/
export const initChunkDecryption = async (
decryptionHeader: string,
key: BytesOrB64,
) => {
await sodium.ready;
const pullState = sodium.crypto_secretstream_xchacha20poly1305_init_pull(
header,
key,
await fromB64(decryptionHeader),
await bytes(key),
);
const decryptionChunkSize =
ENCRYPTION_CHUNK_SIZE +
streamEncryptionChunkSize +
sodium.crypto_secretstream_xchacha20poly1305_ABYTES;
const tag = sodium.crypto_secretstream_xchacha20poly1305_TAG_MESSAGE;
return { pullState, decryptionChunkSize, tag };
}
return { pullState, decryptionChunkSize };
};
export async function decryptFileChunk(
/**
* Decrypt an individual chunk of the data encrypted using
* {@link initChunkEncryption} and {@link encryptStreamChunk}.
*
* This is meant to be used in tandem with {@link initChunkDecryption}. During
* each invocation, it should be passed the encrypted chunk, and the
* {@link pullState} returned by {@link initChunkDecryption}. It will then
* return the corresponding decrypted chunk's bytes.
*/
export const decryptStreamChunk = async (
data: Uint8Array,
pullState: StateAddress,
) {
) => {
await sodium.ready;
const pullResult = sodium.crypto_secretstream_xchacha20poly1305_pull(
pullState,
data,
);
// TODO:
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (!pullResult.message) {
throw new Error(CustomError.PROCESSING_FAILED);
}
const newTag = pullResult.tag;
return { decryptedData: pullResult.message, newTag };
}
return pullResult.message;
};
export interface B64EncryptionResult {
encryptedData: string;

View File

@@ -42,7 +42,7 @@ export interface EncryptedBoxB64 {
}
/**
* The result of encryption using the secretstream APIs in one-shot mode.
* The result of encryption using the secretstream APIs without chunking.
*
* It contains an encrypted data and a header that should be provided during
* decryption. The header does not need to be secret.
@@ -105,3 +105,27 @@ export interface EncryptedBlobB64 {
*/
decryptionHeader: string;
}
/**
* An intermediate between {@link EncryptedBlobBytes} and
* {@link EncryptedBlobB64} that has the encrypted as bytes
* ({@link Uint8Array}s), but the {@link decryptionHeader} as a base64 string.
*
* Such a format is handy for encrypting files, since it can then directly be
* used (the file's encrypted bytes get uploaded separately, whilst the base64
* decryption header becomes part of the corresponding {@link EnteFile}).
*/
export interface EncryptedFile {
/**
* The encrypted data.
*/
encryptedData: Uint8Array;
/**
* A base64 string containing the decryption header.
*
* While the exact contents of the header are libsodium's internal details,
* it effectively contains a random nonce generated by libsodium. It does
* not need to be secret, but it is required to decrypt the data.
*/
decryptionHeader: string;
}

View File

@@ -13,9 +13,14 @@ import * as libsodium from "./libsodium";
* Note: Keep these methods logic free. They are meant to be trivial proxies.
*/
export class CryptoWorker {
generateBoxKey = ei._generateBoxKey;
generateBlobOrStreamKey = ei._generateBlobOrStreamKey;
encryptBoxB64 = ei._encryptBoxB64;
encryptThumbnail = ei._encryptThumbnail;
_encryptBlobB64 = ei._encryptBlobB64;
encryptStreamBytes = ei._encryptStreamBytes;
initChunkEncryption = ei._initChunkEncryption;
encryptStreamChunk = ei._encryptStreamChunk;
encryptMetadataJSON_New = ei._encryptMetadataJSON_New;
encryptMetadataJSON = ei._encryptMetadataJSON;
decryptBox = ei._decryptBox;
@@ -23,39 +28,14 @@ export class CryptoWorker {
decryptBlob = ei._decryptBlob;
decryptBlobB64 = ei._decryptBlobB64;
decryptThumbnail = ei._decryptThumbnail;
decryptStreamBytes = ei._decryptStreamBytes;
initChunkDecryption = ei._initChunkDecryption;
decryptStreamChunk = ei._decryptStreamChunk;
decryptMetadataJSON_New = ei._decryptMetadataJSON_New;
decryptMetadataJSON = ei._decryptMetadataJSON;
// TODO: -- AUDIT BELOW --
async decryptFile(fileData: Uint8Array, header: Uint8Array, key: string) {
return libsodium.decryptChaCha(fileData, header, key);
}
async encryptFile(fileData: Uint8Array) {
return libsodium.encryptChaCha(fileData);
}
async encryptFileChunk(
data: Uint8Array,
pushState: StateAddress,
isFinalChunk: boolean,
) {
return libsodium.encryptFileChunk(data, pushState, isFinalChunk);
}
async initChunkEncryption() {
return libsodium.initChunkEncryption();
}
async initChunkDecryption(header: Uint8Array, key: Uint8Array) {
return libsodium.initChunkDecryption(header, key);
}
async decryptFileChunk(fileData: Uint8Array, pullState: StateAddress) {
return libsodium.decryptFileChunk(fileData, pullState);
}
async initChunkHashing() {
return libsodium.initChunkHashing();
}

View File

@@ -300,25 +300,15 @@ class DownloadManagerImpl {
);
}
this.clearDownloadProgress(file.id);
try {
const decrypted = await cryptoWorker.decryptFile(
new Uint8Array(encryptedArrayBuffer),
await cryptoWorker.fromB64(file.file.decryptionHeader),
file.key,
);
return new Response(decrypted).body;
} catch (e) {
if (
e instanceof Error &&
e.message == CustomError.PROCESSING_FAILED
) {
log.error(
`Failed to process file with fileID:${file.id}, localID: ${file.metadata.localID}, version: ${file.metadata.version}, deviceFolder:${file.metadata.deviceFolder}`,
e,
);
}
throw e;
}
const decrypted = await cryptoWorker.decryptStreamBytes(
{
encryptedData: new Uint8Array(encryptedArrayBuffer),
decryptionHeader: file.file.decryptionHeader,
},
file.key,
);
return new Response(decrypted).body;
}
const cachedBlob = await this.fileCache?.get(cacheKey);
@@ -341,12 +331,11 @@ class DownloadManagerImpl {
parseInt(res.headers.get("Content-Length") ?? "") || 0;
let downloadedBytes = 0;
const decryptionHeader = await cryptoWorker.fromB64(
file.file.decryptionHeader,
);
const fileKey = await cryptoWorker.fromB64(file.key);
const { pullState, decryptionChunkSize } =
await cryptoWorker.initChunkDecryption(decryptionHeader, fileKey);
await cryptoWorker.initChunkDecryption(
file.file.decryptionHeader,
file.key,
);
let leftoverBytes = new Uint8Array();
@@ -379,8 +368,8 @@ class DownloadManagerImpl {
// data.length might be a multiple of decryptionChunkSize,
// and we might need multiple iterations to drain it all.
while (data.length >= decryptionChunkSize) {
const { decryptedData } =
await cryptoWorker.decryptFileChunk(
const decryptedData =
await cryptoWorker.decryptStreamChunk(
data.slice(0, decryptionChunkSize),
pullState,
);
@@ -393,8 +382,8 @@ class DownloadManagerImpl {
// Send off the remaining bytes without waiting for a
// full chunk, no more bytes are going to come.
if (data.length) {
const { decryptedData } =
await cryptoWorker.decryptFileChunk(
const decryptedData =
await cryptoWorker.decryptStreamChunk(
data,
pullState,
);

View File

@@ -2,7 +2,7 @@ import {
decryptBoxB64,
encryptBlobB64,
encryptBoxB64,
generateNewBlobOrStreamKey,
generateBlobOrStreamKey,
} from "@/base/crypto";
import { nullishToEmpty, nullToUndefined } from "@/utils/transform";
import { z } from "zod";
@@ -258,16 +258,16 @@ const getOrCreateEntityKeyB64 = async (
// As a sanity check, genarate the key but immediately encrypt it as if it
// were fetched from remote and then try to decrypt it before doing anything
// with it.
const generated = await generateNewEncryptedEntityKey(masterKey);
const generated = await generateEncryptedEntityKey(masterKey);
const result = decryptEntityKey(generated, masterKey);
await postUserEntityKey(type, generated);
await saveRemoteUserEntityKey(type, generated);
return result;
};
const generateNewEncryptedEntityKey = async (masterKey: Uint8Array) => {
const generateEncryptedEntityKey = async (masterKey: Uint8Array) => {
const { encryptedData, nonce } = await encryptBoxB64(
await generateNewBlobOrStreamKey(),
await generateBlobOrStreamKey(),
masterKey,
);
// Remote calls it the header, but it really is the nonce.

View File

@@ -42,7 +42,6 @@ export const CustomError = {
EXPORT_STOPPED: "export stopped",
EXPORT_FOLDER_DOES_NOT_EXIST: "export folder does not exist",
AUTH_KEY_NOT_FOUND: "auth key not found",
PROCESSING_FAILED: "processing failed",
TWO_FACTOR_ENABLED: "two factor enabled",
};