I’m trying to upload a file by chunking it and sending it to the server and in this code I just chunking and not doing with the chunks yet so the main problem is if the file is 2GB when I’m chunking it if the chunkCount be 1000 or more I’m ddos my server by multiple requests (is this right?) and if the chunkCount will be under 100 my client has so much lag
Why I don’t upload file normally and do this chunk job ?
you can pause the uploading and if the uploading of a chunk field cause a connection issue you can retry from the field chunk
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Video Thumbnail Generator</title>
<link
href="https://cdn.jsdelivr.net/npm/[email protected]/dist/full.min.css"
rel="stylesheet"
type="text/css"
/>
<script src="https://cdn.tailwindcss.com"></script>
</head>
<body>
<span class="loading loading-spinner loading-lg"></span>
<h1>File Chunk Processor</h1>
<input type="file" id="fileInput" />
<button id="processButton" onclick="DoChunk()">Process File</button>
<script>
function DoChunk() {
let chunkCount = 1700;
let chunkIndex = 0;
processNextChunk(chunkIndex, chunkCount);
}
function processNextChunk(chunkIndex, chunkCount) {
if (chunkIndex < chunkCount) {
processFileChunk('fileInput', chunkIndex, chunkCount, function () {
setTimeout(() => {
processNextChunk(chunkIndex + 1, chunkCount);
}, 100); // Adjust the delay as needed
});
}
}
function processFileChunk(elementId, chunkIndex, chunkCount, callback) {
// Get the file input element
const inputElement = document.getElementById(elementId);
// Check if the input element and file are available
if (!inputElement || !inputElement.files || !inputElement.files[0]) {
console.error('No file selected or element not found');
return;
}
// Get the selected file
const file = inputElement.files[0];
// Calculate the size of each chunk
const chunkSize = Math.ceil(file.size / chunkCount);
const start = chunkIndex * chunkSize;
const end = Math.min(start + chunkSize, file.size);
// Create a Blob for the specific chunk
const chunk = file.slice(start, end);
// Create a FileReader to read the chunk
const reader = new FileReader();
reader.onload = function (event) {
// Get the chunk content as a Base64 string
const base64String = event.target.result.split(',')[1]; // Remove data URL part
// Output or process the chunk as needed
console.log(`Chunk ${chunkIndex + 1} of ${chunkCount}:`);
console.log(base64String);
if (callback) {
callback();
}
};
reader.onerror = function (error) {
console.error('Error reading file chunk:', error);
if (callback) {
callback();
}
};
// Read the chunk as a Data URL (Base64 string)
reader.readAsDataURL(chunk);
}
</script>
</body>
</html>