Пример #1
0
        private object UploadChunk_Worker(object state)
        {
            Tuple <string, string, ChunkInfo> rx = (Tuple <string, string, ChunkInfo>)state;
            string    filePath = rx.Item1;
            string    blobName = rx.Item2;
            ChunkInfo chunk    = rx.Item3;

            UploadChunk(filePath, blobName, ref chunk);
            return(null);
        }
Пример #2
0
        private void UploadChunkList(ref List <ChunkInfo> chunkList_toUpload, string filePath, string blobName)
        {
            SmartThreadPool threadPool = new SmartThreadPool();

            threadPool.MaxThreads = MaxConcurrentUploadThreads;
            foreach (ChunkInfo chunk in chunkList_toUpload)
            {
                ChunkInfo       chunkToUpload = chunk;
                IWorkItemResult wir1          = threadPool.QueueWorkItem(new WorkItemCallback(this.UploadChunk_Worker), new Tuple <string, string, ChunkInfo>(filePath, blobName, chunkToUpload));
                //UploadChunk(filePath, blobName, ref chunkToUpload);
            }
            threadPool.Start();
            threadPool.WaitForIdle();
            threadPool.Shutdown();
        }
Пример #3
0
        private void UploadChunk(string filePath, string blobName, ref ChunkInfo chunk)
        {
            int tid = System.Threading.Thread.CurrentThread.ManagedThreadId;

            if (logger != null)
            {
                logger.Log("Start Synchronizer Read Chunk From File" + tid);
            }
            if (StaticChunkSize > MaxAzureBlockSize && StaticChunkSize % MaxAzureBlockSize != 0)
            {
                throw new NotImplementedException("chunk size (current: " + chunk.rsize + ") has to be a fixed multiple of maximum block size :" + MaxAzureBlockSize);
            }

            // structuredLog("I", "uploading chunk with index: " + chunk.chunkIndex);

            byte[] chunkBuffer = new byte[chunk.rsize];


            long start = DateTime.Now.Ticks;

            using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read))
            {
                fs.Seek(chunk.roffset, SeekOrigin.Begin);
                BinaryReader br = new BinaryReader(fs);
                br.Read(chunkBuffer, 0, chunkBuffer.Length);//read chunk into buffer
                fs.Close();
            }
            long end = DateTime.Now.Ticks;

            // Console.WriteLine("time taken : " + (double)((double)(end - start) / (double)10000000));

            if (logger != null)
            {
                logger.Log("End Synchronizer Read Chunk From File" + tid);
            }



            // Here is the compression and encryption for the chunk
            if (logger != null)
            {
                logger.Log("Start Synchronizer Compress Chunk");
            }
            byte[] compressedChunkBuffer = Compress(chunkBuffer);// for now
            if (logger != null)
            {
                logger.Log("End Synchronizer Compress Chunk");
            }
            if (logger != null)
            {
                logger.Log("Start Synchronizer Encrypt Chunk");
            }
            byte[] encryptedCompressedChunkBuffer = Encrypt(compressedChunkBuffer);
            if (logger != null)
            {
                logger.Log("End Synchronizer Encrypt Chunk");
            }

            if (logger != null)
            {
                logger.Log("Start Synchronizer Upload Chunk" + tid);
            }

            int blockID;

            if (StaticChunkSize < MaxAzureBlockSize)
            {
                blockID = (int)chunk.chunkIndex;
            }
            else
            {
                blockID = (int)chunk.chunkIndex * (int)(StaticChunkSize / MaxAzureBlockSize);
            }
            int            blockCount = 0;
            CloudBlockBlob blockBlob  = GetBlockBlobReference(blobName);

            // Upload the chunk
            while (blockCount * MaxAzureBlockSize < encryptedCompressedChunkBuffer.Length)
            {
                string blockIdBase64 = Convert.ToBase64String(ASCIIEncoding.ASCII.GetBytes(blockID.ToString(CultureInfo.InvariantCulture).PadLeft(16, '0')));

                int blockSize = MaxAzureBlockSize <= encryptedCompressedChunkBuffer.Length - blockCount * MaxAzureBlockSize ? MaxAzureBlockSize : encryptedCompressedChunkBuffer.Length - blockCount * MaxAzureBlockSize;

                long startt = DateTime.Now.Ticks;
                using (MemoryStream ms = new MemoryStream(encryptedCompressedChunkBuffer, blockCount * MaxAzureBlockSize, blockSize))
                {
                    blockBlob.PutBlock(blockIdBase64, ms, GetMD5FromStream(ms.ToArray()), GetBlobRequestOptions());
                }
                long endt = DateTime.Now.Ticks;
                //Console.WriteLine("+" + (double)((double)(endt - startt) / (double)10000000));

                chunk.AddToBlockList(blockID);
                chunk.SetCSize(encryptedCompressedChunkBuffer.Length);
                blockID++;
                blockCount++;
            }
            if (logger != null)
            {
                logger.Log("End Synchronizer Upload Chunk" + tid);
            }
        }
Пример #4
0
        private void UploadChunk(string filePath, string s3objectName, ref ChunkInfo chunk)
        {
            // structuredLog("I", "uploading chunk with index: " + chunk.chunkIndex);
            // if (logger != null) logger.Log("Thread: " + System.Threading.Thread.CurrentThread.ManagedThreadId);
            if (logger != null)
            {
                logger.Log("Start Synchronizer Read Chunk From File");
            }

            byte[] chunkBuffer = new byte[chunk.rsize];

            using (FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read))
            {
                fs.Seek(chunk.roffset, SeekOrigin.Begin);
                BinaryReader br = new BinaryReader(fs);
                br.Read(chunkBuffer, 0, chunkBuffer.Length);//read chunk into buffer
                fs.Close();
            }
            if (logger != null)
            {
                logger.Log("End Synchronizer Read Chunk From File");
            }

            // Here is the compression and encryption for the chunk
            if (logger != null)
            {
                logger.Log("Start Synchronizer Compress Chunk");
            }
            byte[] compressedChunkBuffer = Compress(chunkBuffer);// for now
            if (logger != null)
            {
                logger.Log("End Synchronizer Compress Chunk");
            }
            if (logger != null)
            {
                logger.Log("Start Synchronizer Encrypt Chunk");
            }
            byte[] encryptedCompressedChunkBuffer = Encrypt(compressedChunkBuffer);
            if (logger != null)
            {
                logger.Log("End Synchronizer Encrypt Chunk");
            }

            if (logger != null)
            {
                logger.Log("Start Synchronizer Upload Chunk");
            }
            string chunkObjectName = ChunkObjectNamePrefix + s3objectName + "-" + chunk.chunkIndex;

            if (UploadByteArrayToS3Object(chunkObjectName, encryptedCompressedChunkBuffer))
            {
                chunk.SetBlobName(chunkObjectName);
                chunk.SetCSize(encryptedCompressedChunkBuffer.Length);
            }
            else
            {
                throw new Exception("Chunk upload for given chunk has failed. FileName: " + s3objectName + " . Chunk:" + chunk.ToString());
            }
            if (logger != null)
            {
                logger.Log("End Synchronizer Upload Chunk");
            }
        }