I have an ASP.NET Core application that accepts file uploads via the API (mp4 files). I want to read data in chunks from the input stream and write them to Azure Blob Storage as they come in.
The files can be really large, and if there is a network error I want to at least have what has been written so far.
If I use the standard BlobClient.UploadAsync(stream)
there are zero bytes written if the upload fails after 99% for example.
I’m trying to read from a file just to get a proof of concept, and I have found that the following works, but I wonder if there is a better way?
<code>var blobUriWithSas = new Uri($"{storageAccountUrl}/?{sasToken}");
var blobServiceClient = new BlobServiceClient(blobUriWithSas);
var blobContainerClient = blobServiceClient.GetBlobContainerClient("container");
var blockBlobClient = blobContainerClient.GetBlockBlobClient("file.mp4");
var blockIds = new List<string>();
using var fileStream = File.OpenRead("file.mp4");
var buffer = new byte[2048*10];
int bytesRead;
int blockId = 0;
while ((bytesRead = await fileStream.ReadAsync(buffer)) > 0)
{
var blockIdBase64 = Convert.ToBase64String(BitConverter.GetBytes(blockId++));
blockIds.Add(blockIdBase64);
using var memoryStream = new MemoryStream(buffer, 0, bytesRead);
// Upload the block
await blockBlobClient.StageBlockAsync(blockIdBase64, memoryStream);
if (blockId % 50 == 0)
{
// Commit the blocks now and then
await blockBlobClient.CommitBlockListAsync(blockIds);
}
}
// Commit the blocks when all is done
await blockBlobClient.CommitBlockListAsync(blockIds);
</code>
<code>var blobUriWithSas = new Uri($"{storageAccountUrl}/?{sasToken}");
var blobServiceClient = new BlobServiceClient(blobUriWithSas);
var blobContainerClient = blobServiceClient.GetBlobContainerClient("container");
var blockBlobClient = blobContainerClient.GetBlockBlobClient("file.mp4");
var blockIds = new List<string>();
using var fileStream = File.OpenRead("file.mp4");
var buffer = new byte[2048*10];
int bytesRead;
int blockId = 0;
while ((bytesRead = await fileStream.ReadAsync(buffer)) > 0)
{
var blockIdBase64 = Convert.ToBase64String(BitConverter.GetBytes(blockId++));
blockIds.Add(blockIdBase64);
using var memoryStream = new MemoryStream(buffer, 0, bytesRead);
// Upload the block
await blockBlobClient.StageBlockAsync(blockIdBase64, memoryStream);
if (blockId % 50 == 0)
{
// Commit the blocks now and then
await blockBlobClient.CommitBlockListAsync(blockIds);
}
}
// Commit the blocks when all is done
await blockBlobClient.CommitBlockListAsync(blockIds);
</code>
var blobUriWithSas = new Uri($"{storageAccountUrl}/?{sasToken}");
var blobServiceClient = new BlobServiceClient(blobUriWithSas);
var blobContainerClient = blobServiceClient.GetBlobContainerClient("container");
var blockBlobClient = blobContainerClient.GetBlockBlobClient("file.mp4");
var blockIds = new List<string>();
using var fileStream = File.OpenRead("file.mp4");
var buffer = new byte[2048*10];
int bytesRead;
int blockId = 0;
while ((bytesRead = await fileStream.ReadAsync(buffer)) > 0)
{
var blockIdBase64 = Convert.ToBase64String(BitConverter.GetBytes(blockId++));
blockIds.Add(blockIdBase64);
using var memoryStream = new MemoryStream(buffer, 0, bytesRead);
// Upload the block
await blockBlobClient.StageBlockAsync(blockIdBase64, memoryStream);
if (blockId % 50 == 0)
{
// Commit the blocks now and then
await blockBlobClient.CommitBlockListAsync(blockIds);
}
}
// Commit the blocks when all is done
await blockBlobClient.CommitBlockListAsync(blockIds);