/// <summary>
/// Yes, copying the byte array to here. But given we'll not have many of these tasks going to parallel
/// and each byte array is AT MOST 4M, I think I can live with the memory overhead.
/// </summary>
/// <param name="offset"></param>
/// <param name="bytesRead"></param>
/// <param name="bytesToRead"></param>
/// <param name="blob"></param>
/// <param name="uploadedBlockList"></param>
/// <param name="testMode"></param>
/// <returns></returns>
private Task WriteBytes(long offset, int bytesRead, byte[] bytesToRead, CloudBlockBlob blob, ConcurrentBag <UploadedBlock> uploadedBlockList, bool testMode)
{
var t = Task.Factory.StartNew(() =>
{
var sig = CommonOps.GenerateBlockSig(bytesToRead, offset, (int)bytesRead, 0);
var blockId = Convert.ToBase64String(sig.MD5Signature);
bool isDupe = false;
lock (parallelLock)
{
isDupe = uploadedBlockList.Any(ub => ub.BlockId == blockId);
// store the block id that is associated with this byte range.
uploadedBlockList.Add(new UploadedBlock()
{
BlockId = blockId,
Offset = offset,
Sig = sig,
Size = bytesRead,
IsNew = true,
IsDuplicate = isDupe
});
}
if (!testMode)
{
if (!isDupe)
{
// yes, putting into memory stream is probably a waste here.
using (var ms = new MemoryStream(bytesToRead))
{
var options = new BlobRequestOptions()
{
ServerTimeout = new TimeSpan(0, 90, 0)
};
blob.PutBlock(blockId, ms, null, null, options);
}
}
}
});
return(t);
}