public AzureSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection) { disposed = false; string _containerName = container; // // Setup Store and Provider // CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(ri.accountName, ri.accountKey), true); AzureBlobStore blobStore = new AzureBlobStore(_containerName, storageAccount); Console.WriteLine("Successfully created/attached to container {0}.", _containerName); AzureBlobSyncProvider azureProvider = new AzureBlobSyncProvider(_containerName, blobStore); azureProvider.ApplyingChange += new EventHandler <ApplyingBlobEventArgs>(UploadingFile); orchestrator = new SyncOrchestrator(); orchestrator.RemoteProvider = azureProvider; if (syncDirection == SynchronizeDirection.Upload) { orchestrator.Direction = SyncDirectionOrder.Upload; } else if (syncDirection == SynchronizeDirection.Download) { orchestrator.Direction = SyncDirectionOrder.Download; } }
public RackspaceCloudFilesSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection) { this.disposed = false; this.username = ri.accountName; this.apiKey = ri.accountKey; this.syncDirection = syncDirection; this.container = container; try { var cloudIdentity = new CloudIdentity() { APIKey = this.apiKey, Username = this.username }; var cloudFilesProvider = new CloudFilesProvider(cloudIdentity); ObjectStore createContainerResponse = cloudFilesProvider.CreateContainer(container);// assume default region for now if (!createContainerResponse.Equals(ObjectStore.ContainerCreated) && !createContainerResponse.Equals(ObjectStore.ContainerExists)) { Console.WriteLine("Container creation failed! Response: " + createContainerResponse.ToString()); } } catch (Exception e) { Console.WriteLine("Exception in creating container: " + e); } }
/// <summary> /// We-use the remote info as: accountName = awsAccessKeyId and accountKey = awsSecretAccessKey /// </summary> public AmazonS3Synchronizer(RemoteInfo remoteInfo, string bucket, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize = 1 ) { this.logger = log; disposed = false; this.syncDirection = syncDirection; bucketName = bucket.ToString().Replace(' ', '-').ToLower(); ;// amazon S3 does not like spaces in bucket names amazonS3Helper = new AmazonS3Helper(remoteInfo, bucket, compressionType, encryptionType, encryptionKey, initializationVector, logger, ChunkSize, ThreadPoolSize); this.MaxConcurrentFileSyncThreads = ThreadPoolSize; }
/// <summary> /// We-use the remote info as: accountName = awsAccessKeyId and accountKey = awsSecretAccessKey /// </summary> public AmazonS3Synchronizer(RemoteInfo remoteInfo, string bucket, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize = 1) { this.logger = log; disposed = false; this.syncDirection = syncDirection; bucketName = bucket.ToString().Replace(' ', '-').ToLower();;// amazon S3 does not like spaces in bucket names amazonS3Helper = new AmazonS3Helper(remoteInfo, bucket, compressionType, encryptionType, encryptionKey, initializationVector, logger, ChunkSize, ThreadPoolSize); this.MaxConcurrentFileSyncThreads = ThreadPoolSize; }
//protected SyncOrchestrator orchestrator; public AzureChunkSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize = 1) { logger = log; disposed = false; // Setup Store and Provider // this.accountName = ri.accountName; this.accountKey = ri.accountKey; this.container = container; this.syncDirection = syncDirection; this.azureHelper = new AzureHelper(this.accountName, this.accountKey, this.container, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSize, ThreadPoolSize); this.chunkListHash = null; this.ThreadPoolSize = ThreadPoolSize; }
//protected SyncOrchestrator orchestrator; public AzureChunkSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection, CompressionType compressionType, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector, Logger log, int ChunkSize, int ThreadPoolSize=1) { logger = log; disposed = false; // Setup Store and Provider // this.accountName = ri.accountName; this.accountKey = ri.accountKey; this.container = container; this.syncDirection = syncDirection; this.azureHelper = new AzureHelper(this.accountName, this.accountKey, this.container, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSize, ThreadPoolSize); this.chunkListHash = null; this.ThreadPoolSize = ThreadPoolSize; }
public ISync CreateSynchronizer(LocationInfo Li, string container, Logger log, SynchronizeDirection syncDirection = SynchronizeDirection.Upload, CompressionType compressionType = CompressionType.None, int ChunkSizeForUpload = 4*1024*1024, int ThreadPoolSize =1 , EncryptionType encryptionType = EncryptionType.None , byte[] encryptionKey = null, byte[] initializationVector =null) { ISync isync = null; switch (Li.st) { case SynchronizerType.Azure: isync = CreateAzureSynchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; case SynchronizerType.AmazonS3: isync = CreateAmazonS3Synchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; default: isync = null; break; } return isync; }
public RackspaceCloudFilesSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection) { this.disposed = false; this.username = ri.accountName; this.apiKey = ri.accountKey; this.syncDirection = syncDirection; this.container = container; try { var cloudIdentity = new CloudIdentity() { APIKey = this.apiKey, Username = this.username }; var cloudFilesProvider = new CloudFilesProvider(cloudIdentity); ObjectStore createContainerResponse = cloudFilesProvider.CreateContainer(container);// assume default region for now if (!createContainerResponse.Equals(ObjectStore.ContainerCreated) && !createContainerResponse.Equals(ObjectStore.ContainerExists)) Console.WriteLine("Container creation failed! Response: " + createContainerResponse.ToString()); } catch (Exception e) { Console.WriteLine("Exception in creating container: " + e); } }
public AzureSynchronizer(RemoteInfo ri, string container, SynchronizeDirection syncDirection) { disposed = false; string _containerName = container; // // Setup Store and Provider // CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(ri.accountName, ri.accountKey), true); AzureBlobStore blobStore = new AzureBlobStore(_containerName, storageAccount); Console.WriteLine("Successfully created/attached to container {0}.", _containerName); AzureBlobSyncProvider azureProvider = new AzureBlobSyncProvider(_containerName, blobStore); azureProvider.ApplyingChange += new EventHandler<ApplyingBlobEventArgs>(UploadingFile); orchestrator = new SyncOrchestrator(); orchestrator.RemoteProvider = azureProvider; if (syncDirection == SynchronizeDirection.Upload) orchestrator.Direction = SyncDirectionOrder.Upload; else if (syncDirection == SynchronizeDirection.Download) orchestrator.Direction = SyncDirectionOrder.Download; }
protected void CreateSync(SynchronizeDirection dir) { // Create Synchronizer if (account.location == "None") { synchronizer = null; } else if (streamtype == StreamFactory.StreamSecurityType.Secure) { LocationInfo Li = new LocationInfo(account.accountName, account.accountKey, SyncFactory.GetSynchronizerType(account.location)); synchronizer = SyncFactory.Instance.CreateSynchronizer(Li, streamid.ToString().Replace('/', '-').ToLower() + "-" + seq_num, logger, dir, streamcompressiontype, this.StreamChunkSizeForUpload, this.StreamThreadPoolSize, EncryptionType.AES, acl_md.encKey, acl_md.IV); synchronizer.SetLocalSource(targetDir); synchronizer.SetIndexFileName(IndexFileName); synchronizer.SetDataFileName(DataLogFileName); } else { LocationInfo Li = new LocationInfo(account.accountName, account.accountKey, SyncFactory.GetSynchronizerType(account.location)); synchronizer = SyncFactory.Instance.CreateSynchronizer(Li, streamid.ToString().Replace('/', '-').ToLower() + "-" + seq_num, logger, dir, streamcompressiontype, this.StreamChunkSizeForUpload, this.StreamThreadPoolSize); synchronizer.SetLocalSource(targetDir); synchronizer.SetIndexFileName(IndexFileName); synchronizer.SetDataFileName(DataLogFileName); } }
private ISync CreateAzureSynchronizer(RemoteInfo ri, string container, Logger log, SynchronizeDirection syncDirection, CompressionType compressionType,int ChunkSizeForUpload, int ThreadPoolSize, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector) { return new AzureChunkSynchronizer(ri, container, syncDirection, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSizeForUpload, ThreadPoolSize); }
/* Sample call for upload:- byte[] array = new byte[1024*1024*1024]; Random random = new Random(); random.NextBytes(array); double timeTaken_Upload = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Upload, "fooContainer", "fooBlob"); double timeTaken_Download = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Download, "fooContainer", "fooBlob"); * * */ public static double doRawCloudPerf(byte[] input, SynchronizerType synchronizerType, SynchronizeDirection syncDirection, string exp_directory, Logger logger, string containerName=null, string blobName=null) { string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); DateTime begin=DateTime.Now, end=DateTime.Now; if (synchronizerType == SynchronizerType.Azure) { #region azure download/upload if (containerName==null) containerName = "testingraw"; if(blobName==null) blobName = Guid.NewGuid().ToString(); CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(accountName, accountKey), true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(containerName); if (syncDirection == SynchronizeDirection.Upload) { logger.Log("Start Stream Append"); container.CreateIfNotExist(); begin = DateTime.UtcNow;////////////////////////////////////// try { using (MemoryStream memoryStream = new System.IO.MemoryStream(input)) { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); blockBlob.UploadFromStream(memoryStream); } } catch (Exception e) { } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Append"); } if (syncDirection == SynchronizeDirection.Download) { logger.Log("Start Stream Get"); logger.Log("Start Stream GetAll"); try { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); byte[] blobContents = blockBlob.DownloadByteArray(); //if (File.Exists(blobName)) // File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// // using (FileStream fs = new FileStream(blobName, FileMode.OpenOrCreate)) // { byte[] contents = blockBlob.DownloadByteArray(); // fs.Write(contents, 0, contents.Length); // } } catch (Exception e) { } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Get"); logger.Log("End Stream GetAll"); } #endregion } else if (synchronizerType == SynchronizerType.AmazonS3) { #region amazon s3 stuff if (containerName == null) containerName = "testingraw"; if (blobName == null) blobName = Guid.NewGuid().ToString(); AmazonS3Client amazonS3Client = new AmazonS3Client(accountName, accountKey); if (syncDirection == SynchronizeDirection.Upload) { ListBucketsResponse response = amazonS3Client.ListBuckets(); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == containerName) { break; } } amazonS3Client.PutBucket(new PutBucketRequest().WithBucketName(containerName)); begin = DateTime.UtcNow;////////////////////////////////////// MemoryStream ms = new MemoryStream(); ms.Write(input, 0, input.Length); PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); request.InputStream = ms; amazonS3Client.PutObject(request); end = DateTime.UtcNow;////////////////////////////////////// } if (syncDirection == SynchronizeDirection.Download) { if (File.Exists(blobName)) File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); GetObjectResponse response = amazonS3Client.GetObject(request); var localFileStream = File.Create(blobName); response.ResponseStream.CopyTo(localFileStream); localFileStream.Close(); end = DateTime.UtcNow;////////////////////////////////////// } #endregion } else { throw new InvalidDataException("syncronizer type is not valid"); } return (end - begin).TotalMilliseconds;// return total time to upload in milliseconds }
private static ISync CreateSyncForAccount(MetaDataService.AccountInfo account, string containerName, Logger log, SynchronizeDirection synchronizeDirection = SynchronizeDirection.Upload) { // Create Synchronizer if (account.location == "None") { return(null); } else { LocationInfo Li = new LocationInfo(account.accountName, account.accountKey, SyncFactory.GetSynchronizerType(account.location)); ISync synchronizer = SyncFactory.Instance.CreateSynchronizer(Li, containerName, log, synchronizeDirection); return(synchronizer); } }
private ISync CreateAmazonS3Synchronizer(RemoteInfo ri, string container, Logger log, SynchronizeDirection syncDirection, CompressionType compressionType, int ChunkSizeForUpload, int ThreadPoolSize, EncryptionType encryptionType, byte[] encryptionKey, byte[] initializationVector) { return(new AmazonS3Synchronizer(ri, container, syncDirection, compressionType, encryptionType, encryptionKey, initializationVector, log, ChunkSizeForUpload, ThreadPoolSize)); }
public ISync CreateSynchronizer(LocationInfo Li, string container, Logger log, SynchronizeDirection syncDirection = SynchronizeDirection.Upload, CompressionType compressionType = CompressionType.None, int ChunkSizeForUpload = 4 *1024 *1024, int ThreadPoolSize = 1, EncryptionType encryptionType = EncryptionType.None, byte[] encryptionKey = null, byte[] initializationVector = null) { ISync isync = null; switch (Li.st) { case SynchronizerType.Azure: isync = CreateAzureSynchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; case SynchronizerType.AmazonS3: isync = CreateAmazonS3Synchronizer(new RemoteInfo(Li.accountName, Li.accountKey), container, log, syncDirection, compressionType, ChunkSizeForUpload, ThreadPoolSize, encryptionType, encryptionKey, initializationVector); break; default: isync = null; break; } return(isync); }
/* * Sample call for upload:- * byte[] array = new byte[1024*1024*1024]; * Random random = new Random(); * random.NextBytes(array); * double timeTaken_Upload = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Upload, "fooContainer", "fooBlob"); * double timeTaken_Download = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Download, "fooContainer", "fooBlob"); * * */ public static double doRawCloudPerf(byte[] input, SynchronizerType synchronizerType, SynchronizeDirection syncDirection, string exp_directory, Logger logger, string containerName = null, string blobName = null) { string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); DateTime begin = DateTime.Now, end = DateTime.Now; if (synchronizerType == SynchronizerType.Azure) { #region azure download/upload if (containerName == null) { containerName = "testingraw"; } if (blobName == null) { blobName = Guid.NewGuid().ToString(); } CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(accountName, accountKey), true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(containerName); if (syncDirection == SynchronizeDirection.Upload) { logger.Log("Start Stream Append"); container.CreateIfNotExist(); begin = DateTime.UtcNow;////////////////////////////////////// try { using (MemoryStream memoryStream = new System.IO.MemoryStream(input)) { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); blockBlob.UploadFromStream(memoryStream); } } catch (Exception e) { Console.WriteLine("exception: " + e); } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Append"); } if (syncDirection == SynchronizeDirection.Download) { logger.Log("Start Stream Get"); logger.Log("Start Stream GetAll"); try { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); byte[] blobContents = blockBlob.DownloadByteArray(); //if (File.Exists(blobName)) // File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// // using (FileStream fs = new FileStream(blobName, FileMode.OpenOrCreate)) // { byte[] contents = blockBlob.DownloadByteArray(); // fs.Write(contents, 0, contents.Length); // } } catch (Exception e) { Console.WriteLine("exception: " + e); } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Get"); logger.Log("End Stream GetAll"); } #endregion } else if (synchronizerType == SynchronizerType.AmazonS3) { #region amazon s3 stuff if (containerName == null) { containerName = "testingraw"; } if (blobName == null) { blobName = Guid.NewGuid().ToString(); } AmazonS3Client amazonS3Client = new AmazonS3Client(accountName, accountKey); if (syncDirection == SynchronizeDirection.Upload) { ListBucketsResponse response = amazonS3Client.ListBuckets(); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == containerName) { break; } } amazonS3Client.PutBucket(new PutBucketRequest().WithBucketName(containerName)); begin = DateTime.UtcNow;////////////////////////////////////// MemoryStream ms = new MemoryStream(); ms.Write(input, 0, input.Length); PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); request.InputStream = ms; amazonS3Client.PutObject(request); end = DateTime.UtcNow;////////////////////////////////////// } if (syncDirection == SynchronizeDirection.Download) { if (File.Exists(blobName)) { File.Delete(blobName); } begin = DateTime.UtcNow;////////////////////////////////////// GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); GetObjectResponse response = amazonS3Client.GetObject(request); var localFileStream = File.Create(blobName); response.ResponseStream.CopyTo(localFileStream); localFileStream.Close(); end = DateTime.UtcNow;////////////////////////////////////// } #endregion } else { throw new InvalidDataException("syncronizer type is not valid"); } return((end - begin).TotalMilliseconds);// return total time to upload in milliseconds }