Remove thresholds since it is now just fs paths

This commit is contained in:
Manav Rathi
2025-04-30 14:37:25 +05:30
parent 81e3c41749
commit 39f63b6339
2 changed files with 8 additions and 28 deletions

View File

@@ -351,14 +351,6 @@ export const processVideoNewUpload = (
return;
}
if (_state.videoProcessingQueue.length > 50) {
// Drop new requests if the queue can't keep up to avoid the app running
// out of memory by keeping hold of too many (potentially huge) video
// blobs. These items will later get processed as part of a backfill.
log.info("Will process new video upload later (backlog too big)");
return;
}
// Enqueue the item.
_state.videoProcessingQueue.push({
file,

View File

@@ -185,26 +185,14 @@ export class MLWorker {
onUpload(file: EnteFile, processableUploadItem: ProcessableUploadItem) {
// Add the recently uploaded file to the live indexing queue.
//
// Limit the queue to some maximum so that we don't keep growing
// indefinitely (and cause memory pressure) if the speed of uploads is
// exceeding the speed of indexing.
//
// In general, we can be sloppy with the items in the live queue (as
// long as we're not systematically ignoring it). This is because the
// live queue is just an optimization: if a file doesn't get indexed via
// the live queue, it'll later get indexed anyway when we backfill.
if (this.liveQ.length < 200) {
// The file is just being uploaded, and so will not have any
// pre-existing ML data on remote.
this.liveQ.push({
file,
processableUploadItem,
remoteMLData: undefined,
});
this.wakeUp();
} else {
log.debug(() => "Ignoring upload item since liveQ is full");
}
// We can unconditionally process it since the file is just being
// uploaded, and so will not have any pre-existing ML data on remote.
this.liveQ.push({
file,
processableUploadItem,
remoteMLData: undefined,
});
this.wakeUp();
}
/**