namespace
This commit is contained in:
@@ -94,6 +94,12 @@ export const _decryptMetadataJSON = async (r: {
|
||||
r.keyB64,
|
||||
);
|
||||
|
||||
export const _chunkHashInit = libsodium.chunkHashInit;
|
||||
|
||||
export const _chunkHashUpdate = libsodium.chunkHashUpdate;
|
||||
|
||||
export const _chunkHashFinal = libsodium.chunkHashFinal;
|
||||
|
||||
export const _generateKeyPair = libsodium.generateKeyPair;
|
||||
|
||||
export const _boxSeal = libsodium.boxSeal;
|
||||
|
||||
@@ -632,6 +632,12 @@ export async function decryptToUTF8(
|
||||
return sodium.to_string(decrypted);
|
||||
}
|
||||
|
||||
/**
|
||||
* An opaque object meant to be threaded through {@link chunkHashInit},
|
||||
* {@link chunkHashUpdate} and {@link chunkHashFinal}.
|
||||
*/
|
||||
export type ChunkHashState = sodium.StateAddress;
|
||||
|
||||
/**
|
||||
* Initialize and return new state that can be used to hash the chunks of data
|
||||
* in a streaming manner.
|
||||
@@ -648,14 +654,13 @@ export async function decryptToUTF8(
|
||||
* (along with the data to hash) to {@link chunkHashUpdate}, and the final hash
|
||||
* obtained using {@link chunkHashFinal}.
|
||||
*/
|
||||
export async function initChunkHashing() {
|
||||
export const chunkHashInit = async (): Promise<ChunkHashState> => {
|
||||
await sodium.ready;
|
||||
const hashState = sodium.crypto_generichash_init(
|
||||
return sodium.crypto_generichash_init(
|
||||
null,
|
||||
sodium.crypto_generichash_BYTES_MAX,
|
||||
);
|
||||
return hashState;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update the hash state to incorporate the contents of the provided data chunk.
|
||||
@@ -666,31 +671,32 @@ export async function initChunkHashing() {
|
||||
*
|
||||
* @param chunk The data (bytes) to hash.
|
||||
*/
|
||||
export async function hashFileChunk(
|
||||
hashState: sodium.StateAddress,
|
||||
export const chunkHashUpdate = async (
|
||||
hashState: ChunkHashState,
|
||||
chunk: Uint8Array,
|
||||
) {
|
||||
) => {
|
||||
await sodium.ready;
|
||||
sodium.crypto_generichash_update(hashState, chunk);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Finalize a hash state and return the hash it represents (as a base64 string).
|
||||
*
|
||||
* See: [Note: Chunked hashing]
|
||||
*
|
||||
* @param hashState A hash state obtained using {@link chunkHashInit} and fed
|
||||
* chunks using {@link chunkHashUpdate}.
|
||||
*
|
||||
* @returns The hash of all the chunks (as a base64 string).
|
||||
*/
|
||||
export async function completeChunkHashing(hashState: sodium.StateAddress) {
|
||||
export const chunkHashFinal = async (hashState: ChunkHashState) => {
|
||||
await sodium.ready;
|
||||
const hash = sodium.crypto_generichash_final(
|
||||
hashState,
|
||||
sodium.crypto_generichash_BYTES_MAX,
|
||||
);
|
||||
const hashString = toB64(hash);
|
||||
return hashString;
|
||||
}
|
||||
return toB64(hash);
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a new public/private keypair for use with public-key encryption
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { expose } from "comlink";
|
||||
import { logUnhandledErrorsAndRejectionsInWorker } from "ente-base/log-web";
|
||||
import type { StateAddress } from "libsodium-wrappers-sumo";
|
||||
import * as ei from "./ente-impl";
|
||||
import * as libsodium from "./libsodium";
|
||||
|
||||
@@ -39,6 +38,9 @@ export class CryptoWorker {
|
||||
decryptStreamChunk = ei._decryptStreamChunk;
|
||||
decryptMetadataJSON_New = ei._decryptMetadataJSON_New;
|
||||
decryptMetadataJSON = ei._decryptMetadataJSON;
|
||||
chunkHashInit = ei._chunkHashInit;
|
||||
chunkHashUpdate = ei._chunkHashUpdate;
|
||||
chunkHashFinal = ei._chunkHashFinal;
|
||||
generateKeyPair = ei._generateKeyPair;
|
||||
boxSeal = ei._boxSeal;
|
||||
boxSealOpen = ei._boxSealOpen;
|
||||
@@ -48,18 +50,6 @@ export class CryptoWorker {
|
||||
|
||||
// TODO: -- AUDIT BELOW --
|
||||
|
||||
async initChunkHashing() {
|
||||
return libsodium.initChunkHashing();
|
||||
}
|
||||
|
||||
async hashFileChunk(hashState: StateAddress, chunk: Uint8Array) {
|
||||
return libsodium.hashFileChunk(hashState, chunk);
|
||||
}
|
||||
|
||||
async completeChunkHashing(hashState: StateAddress) {
|
||||
return libsodium.completeChunkHashing(hashState);
|
||||
}
|
||||
|
||||
async decryptB64(data: string, nonce: string, key: string) {
|
||||
return libsodium.decryptB64(data, nonce, key);
|
||||
}
|
||||
|
||||
@@ -1147,20 +1147,33 @@ const tryDetermineVideoDuration = async (uploadItem: UploadItem) => {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the hash of an item we're attempting to upload.
|
||||
*
|
||||
* The hash is retained in the file metadata, and is also used to detect
|
||||
* duplicates during upload.
|
||||
*
|
||||
* This process can take a noticable amount of time. As an extreme case, for a
|
||||
* 10 GB upload item, this can take a 2-3 minutes.
|
||||
*
|
||||
* @param uploadItem The {@link UploadItem} we're attempting to upload.
|
||||
*
|
||||
* @param worker A {@link CryptoWorker} to use for computing the hash.
|
||||
*/
|
||||
const computeHash = async (uploadItem: UploadItem, worker: CryptoWorker) => {
|
||||
const { stream, chunkCount } = await readUploadItem(uploadItem);
|
||||
const hashState = await worker.initChunkHashing();
|
||||
const hashState = await worker.chunkHashInit();
|
||||
|
||||
const streamReader = stream.getReader();
|
||||
for (let i = 0; i < chunkCount; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) throw new Error("Less chunks than expected");
|
||||
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
|
||||
await worker.chunkHashUpdate(hashState, Uint8Array.from(chunk));
|
||||
}
|
||||
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) throw new Error("More chunks than expected");
|
||||
return await worker.completeChunkHashing(hashState);
|
||||
return await worker.chunkHashFinal(hashState);
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user