Exemple #1
0
        public RackspaceCloudFilesSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection)
        {
            this.disposed      = false;
            this.username      = ri.accountName;
            this.apiKey        = ri.accountKey;
            this.syncDirection = syncDirection;
            this.container     = container;
            try
            {
                var cloudIdentity = new CloudIdentity()
                {
                    APIKey = this.apiKey, Username = this.username
                };
                var         cloudFilesProvider      = new CloudFilesProvider(cloudIdentity);
                ObjectStore createContainerResponse = cloudFilesProvider.CreateContainer(container);// assume default region for now

                if (!createContainerResponse.Equals(ObjectStore.ContainerCreated) && !createContainerResponse.Equals(ObjectStore.ContainerExists))
                {
                    Console.WriteLine("Container creation failed! Response: " + createContainerResponse.ToString());
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("Exception in creating container: " + e);
            }
        }
 /// <summary>
 /// We-use the remote info as: accountName = awsAccessKeyId and accountKey = awsSecretAccessKey
 /// </summary>
 public AmazonS3Synchronizer(RemoteInfo remoteInfo, string bucket, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize,  int ThreadPoolSize = 1 )
 {
     this.logger = log;
     disposed = false;
     this.syncDirection = syncDirection;
     bucketName = bucket.ToString().Replace(' ', '-').ToLower(); ;// amazon S3 does not like spaces in bucket names
     
     amazonS3Helper = new AmazonS3Helper(remoteInfo, bucket, compressionType, encryptionType, encryptionKey, initializationVector, logger, ChunkSize, ThreadPoolSize);
     this.MaxConcurrentFileSyncThreads = ThreadPoolSize;
 }
        /// <summary>
        /// We-use the remote info as: accountName = awsAccessKeyId and accountKey = awsSecretAccessKey
        /// </summary>
        public AmazonS3Synchronizer(RemoteInfo remoteInfo, string bucket, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize = 1)
        {
            this.logger        = log;
            disposed           = false;
            this.syncDirection = syncDirection;
            bucketName         = bucket.ToString().Replace(' ', '-').ToLower();;// amazon S3 does not like spaces in bucket names

            amazonS3Helper = new AmazonS3Helper(remoteInfo, bucket, compressionType, encryptionType, encryptionKey, initializationVector, logger, ChunkSize, ThreadPoolSize);
            this.MaxConcurrentFileSyncThreads = ThreadPoolSize;
        }
        //protected SyncOrchestrator orchestrator;

        public AzureChunkSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize = 1)
        {
            logger   = log;
            disposed = false;
            // Setup Store and Provider
            //
            this.accountName    = ri.accountName;
            this.accountKey     = ri.accountKey;
            this.container      = container;
            this.syncDirection  = syncDirection;
            this.azureHelper    = new AzureHelper(this.accountName, this.accountKey, this.container, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSize, ThreadPoolSize);
            this.chunkListHash  = null;
            this.ThreadPoolSize = ThreadPoolSize;
        }
 //protected SyncOrchestrator orchestrator;
 public AzureChunkSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize=1)
 {
     logger = log;
     disposed = false;
     // Setup Store and Provider
     //
     this.accountName = ri.accountName;
     this.accountKey = ri.accountKey;
     this.container = container;
     this.syncDirection = syncDirection;
     this.azureHelper = new AzureHelper(this.accountName, this.accountKey, this.container, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSize, ThreadPoolSize);
     this.chunkListHash = null;
     this.ThreadPoolSize = ThreadPoolSize;
 }
Exemple #6
0
 public AzureHelper(string accountName, string accountKey, string containerName, CompressionType chunkCompression, EncryptionType chunkEncryption, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize)
 {
     this.logger = log;
     //this.azureBlobType = AzureBlobType.BlockBlob;
     //this.mapping = Mapping.FileToBlob;
     this.remoteInfo                 = new RemoteInfo(accountName, accountKey);
     this.containerName              = containerName;
     this.chunkCompressionType       = chunkCompression;
     this.chunkEncryptionType        = chunkEncryption;
     this.encryptionKey              = encryptionKey;
     this.InitializationVector       = initializationVector;
     this.StaticChunkSize            = ChunkSize;
     this.MaxConcurrentUploadThreads = ThreadPoolSize;
 }
Exemple #7
0
 protected virtual void Dispose(bool disposing)
 {
     if (!disposed)
     {
         if (disposing)
         {
             this.remoteInfo           = null;
             this.bucketName           = null;
             this.encryptionKey        = null;
             this.InitializationVector = null;
             amazonS3Client            = null;
             disposed = true;
         }
     }
 }
         public AmazonS3Helper(RemoteInfo remoteInfo,  string bucketName, CompressionType chunkCompression, EncryptionType chunkEncryption , byte[] encryptionKey , byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize)
        {
            this.logger = log;
            this.remoteInfo = remoteInfo;
            this.bucketName = bucketName;
            this.chunkCompressionType = chunkCompression;
            this.chunkEncryptionType = chunkEncryption;
            this.encryptionKey = encryptionKey;
            this.InitializationVector = initializationVector;
            amazonS3Client = new AmazonS3Client(remoteInfo.accountName, remoteInfo.accountKey);
            this.StaticChunkSize = ChunkSize;
            this.MaxConcurrentUploadThreads = ThreadPoolSize;

            this.disposed = false;
            CreateBucket(this.bucketName);
        }
Exemple #9
0
        public AmazonS3Helper(RemoteInfo remoteInfo, string bucketName, CompressionType chunkCompression, EncryptionType chunkEncryption, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize)
        {
            this.logger                     = log;
            this.remoteInfo                 = remoteInfo;
            this.bucketName                 = bucketName;
            this.chunkCompressionType       = chunkCompression;
            this.chunkEncryptionType        = chunkEncryption;
            this.encryptionKey              = encryptionKey;
            this.InitializationVector       = initializationVector;
            amazonS3Client                  = new AmazonS3Client(remoteInfo.accountName, remoteInfo.accountKey);
            this.StaticChunkSize            = ChunkSize;
            this.MaxConcurrentUploadThreads = ThreadPoolSize;

            this.disposed = false;
            CreateBucket(this.bucketName);
        }
        public void AzureChunkWriteRead()
        {
            string accountName = "testdrive";
            string accountKey = "zRTT++dVryOWXJyAM7NM0TuQcu0Y23BgCQfkt7xh2f/Mm+r6c8/XtPTY0xxaF6tPSACJiuACsjotDeNIVyXM8Q==";
            RemoteInfo ri = new RemoteInfo(accountName, accountKey);

            AzureHelper helper = new AzureHelper(accountName, accountKey, "testuploadchunksfile", CompressionType.None, EncryptionType.None, null, null, new Logger(), 4*1024*1024 , 1);


            helper.UploadFileAsChunks("D:\\testfiles\\testhuge.txt");


            int OFFSET_TO_READ = 4492321;
            int BYTES_TO_READ = 11000;

            List<ChunkInfo> metadata = helper.GetBlobMetadata("testhuge.txt").Item1; 
            Dictionary<int, long> chunkindexandoffsets = helper.GetChunkIndexAndOffsetInChunk(metadata, OFFSET_TO_READ, BYTES_TO_READ);

            byte[] temp = null;
            
            foreach(int chunkIndex in chunkindexandoffsets.Keys)
            {
                if(temp!=null)
                    temp = temp.Concat(helper.DownloadChunk("testhuge.txt", metadata,chunkIndex)).ToArray();
                else
                    temp = helper.DownloadChunk("testhuge.txt", metadata, chunkIndex);
            }

            byte[] test = temp.Skip((int)chunkindexandoffsets.ElementAt(0).Value).Take(BYTES_TO_READ).ToArray();

            byte[] truth = new byte[BYTES_TO_READ];
            using (BinaryReader reader = new BinaryReader(new FileStream("D:\\testfiles\\testhuge.txt", FileMode.Open)))
            {
                reader.BaseStream.Seek(OFFSET_TO_READ, SeekOrigin.Begin);
                reader.Read(truth, 0, BYTES_TO_READ);
            }

            bool arraysAreEqual = Enumerable.SequenceEqual(test, truth);
            Console.WriteLine(arraysAreEqual);
            if (!arraysAreEqual)
                throw new Exception("local and downloaded bits dont match");

        }
        public void AmazonS3ChunkWriteRead()
        {
            string accountName = "";
            string accountKey = "";


            RemoteInfo ri = new RemoteInfo(accountName, accountKey);

            AmazonS3Helper helper = new AmazonS3Helper(new RemoteInfo(accountName, accountKey), "testupl0", CompressionType.GZip, EncryptionType.None, null, null, new Logger(), 4*1024*1024, 1);


            helper.UploadFileAsChunks("D:\\testfiles\\test.txt");
            int OFFSET_TO_READ = 4492321;
            int BYTES_TO_READ = 11000;

            List<ChunkInfo> metadata = helper.GetObjectMetadata("test.txt").Item1;
            Dictionary<int, long> chunkindexandoffsets = helper.GetChunkIndexAndOffsetInChunk(metadata, OFFSET_TO_READ, BYTES_TO_READ);

            byte[] temp = null;

            foreach (int chunkIndex in chunkindexandoffsets.Keys)
            {
                if (temp != null)
                    temp = temp.Concat(helper.DownloadChunk("test.txt", chunkIndex)).ToArray();
                else
                    temp = helper.DownloadChunk("test.txt", chunkIndex);
            }

            byte[] test = temp.Skip((int)chunkindexandoffsets.ElementAt(0).Value).Take(BYTES_TO_READ).ToArray();

            byte[] truth = new byte[BYTES_TO_READ];
            using (BinaryReader reader = new BinaryReader(new FileStream("D:\\testfiles\\test.txt", FileMode.Open)))
            {
                reader.BaseStream.Seek(OFFSET_TO_READ, SeekOrigin.Begin);
                reader.Read(truth, 0, BYTES_TO_READ);
            }

            bool arraysAreEqual = Enumerable.SequenceEqual(test, truth);
            Console.WriteLine(arraysAreEqual);
            if (!arraysAreEqual)
                throw new Exception("local and downloaded bits dont match");

        }
        public RackspaceCloudFilesSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection)
        {
            this.disposed = false;
            this.username = ri.accountName;
            this.apiKey = ri.accountKey;
            this.syncDirection = syncDirection;
            this.container = container;
            try
            {
                var cloudIdentity = new CloudIdentity() { APIKey = this.apiKey, Username = this.username };
                var cloudFilesProvider = new CloudFilesProvider(cloudIdentity);
                ObjectStore createContainerResponse = cloudFilesProvider.CreateContainer(container);// assume default region for now

                if (!createContainerResponse.Equals(ObjectStore.ContainerCreated) && !createContainerResponse.Equals(ObjectStore.ContainerExists))
                    Console.WriteLine("Container creation failed! Response: " + createContainerResponse.ToString());
            }
            catch (Exception e)
            {
                Console.WriteLine("Exception in creating container: " + e);
            }
        }
Exemple #13
0
        public void AzureChunkWriteRead()
        {
            string accountName = "msrlot";
            string accountKey = "wC3ou+VLBTu8ryKbsiSMtsIIHIpxGSIAWA0NtK58da2wELQ+USgYQwmVMtyq/p8ILsuZc8TiLeHBjaPI+a3A2Q==";
            RemoteInfo ri = new RemoteInfo(accountName, accountKey);

            AzureHelper helper = new AzureHelper(accountName, accountKey, "testuploadchunksfile", CompressionType.None, EncryptionType.None, null, null, new Logger(), 4*1024*1024 , 1);

            helper.UploadFileAsChunks("D:\\testfiles\\testhuge.txt");

            int OFFSET_TO_READ = 4492321;
            int BYTES_TO_READ = 11000;

            List<ChunkInfo> metadata = helper.GetBlobMetadata("testhuge.txt").Item1;
            Dictionary<int, long> chunkindexandoffsets = helper.GetChunkIndexAndOffsetInChunk(metadata, OFFSET_TO_READ, BYTES_TO_READ);

            byte[] temp = null;

            foreach(int chunkIndex in chunkindexandoffsets.Keys)
            {
                if(temp!=null)
                    temp = temp.Concat(helper.DownloadChunk("testhuge.txt", metadata,chunkIndex)).ToArray();
                else
                    temp = helper.DownloadChunk("testhuge.txt", metadata, chunkIndex);
            }

            byte[] test = temp.Skip((int)chunkindexandoffsets.ElementAt(0).Value).Take(BYTES_TO_READ).ToArray();

            byte[] truth = new byte[BYTES_TO_READ];
            using (BinaryReader reader = new BinaryReader(new FileStream("D:\\testfiles\\testhuge.txt", FileMode.Open)))
            {
                reader.BaseStream.Seek(OFFSET_TO_READ, SeekOrigin.Begin);
                reader.Read(truth, 0, BYTES_TO_READ);
            }

            bool arraysAreEqual = Enumerable.SequenceEqual(test, truth);
            Console.WriteLine(arraysAreEqual);
            if (!arraysAreEqual)
                throw new Exception("local and downloaded bits dont match");
        }
        public AzureSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection)
        {
            disposed = false;
            string _containerName = container;

            //
            // Setup Store and Provider
            //
            CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(ri.accountName, ri.accountKey), true);
            AzureBlobStore blobStore = new AzureBlobStore(_containerName, storageAccount);
            Console.WriteLine("Successfully created/attached to container {0}.", _containerName);
            AzureBlobSyncProvider azureProvider = new AzureBlobSyncProvider(_containerName, blobStore);
            azureProvider.ApplyingChange += new EventHandler<ApplyingBlobEventArgs>(UploadingFile);

            orchestrator = new SyncOrchestrator();
            orchestrator.RemoteProvider = azureProvider;

            if (syncDirection == SynchronizeDirection.Upload)
                orchestrator.Direction = SyncDirectionOrder.Upload;
            else if (syncDirection == SynchronizeDirection.Download)
                orchestrator.Direction = SyncDirectionOrder.Download;
        }
Exemple #15
0
 private ISync CreateAzureSynchronizer(RemoteInfo ri, string container, Logger log, SynchronizeDirection syncDirection, CompressionType compressionType,int ChunkSizeForUpload, int ThreadPoolSize, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector)
 {
     return new AzureChunkSynchronizer(ri, container, syncDirection, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSizeForUpload, ThreadPoolSize);
 }
 protected virtual void Dispose(bool disposing)
 {
     if (!disposed)
     {
         if (disposing)
         {
             this.remoteInfo = null;
             this.bucketName = null;
             this.encryptionKey = null;
             this.InitializationVector = null;
             amazonS3Client = null;
             disposed = true;
         }
     }
 }
Exemple #17
0
 private ISync CreateAmazonS3Synchronizer(RemoteInfo ri, string container, Logger log, SynchronizeDirection syncDirection, CompressionType compressionType, int ChunkSizeForUpload, int ThreadPoolSize, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector)
 {
     return(new AmazonS3Synchronizer(ri, container, syncDirection, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSizeForUpload, ThreadPoolSize));
 }