public AzureImportJob(string jobId, Server.StoreWorker worker, BlobImportSource importSource, Action <string, string> statusCallback) { _jobId = jobId; _worker = worker; _importSource = importSource; _graphUri = Constants.DefaultGraphUri; _statusCallback = statusCallback; }
/// <summary> /// Returns a stream for writing to the source /// </summary> /// <returns></returns> public static Stream OpenWrite(this BlobImportSource source) { if (string.IsNullOrEmpty(source.ConnectionString)) { var client = new WebClient(); var rawStream = client.OpenWrite(source.BlobUri); return(source.IsGZiped ? new GZipStream(rawStream, CompressionMode.Compress) : rawStream); } else { CloudStorageAccount storageAccount; if (!CloudStorageAccount.TryParse(source.ConnectionString, out storageAccount)) { throw new Exception("Invalid blob storage connection string"); } var blobClient = storageAccount.CreateCloudBlobClient(); var blob = blobClient.GetBlobReference(source.BlobUri); var rawStream = (Stream)blob.OpenWrite(); return(source.IsGZiped ? new GZipStream(rawStream, CompressionMode.Compress) : rawStream); } }
/// <summary> /// Returns a stream for reading from the source /// </summary> /// <returns></returns> public static Stream OpenRead(this BlobImportSource source) { if (String.IsNullOrEmpty(source.ConnectionString)) { Uri importUri; if (Uri.TryCreate(source.BlobUri, UriKind.Absolute, out importUri)) { var client = new WebClient(); var rawStream = client.OpenRead(importUri); return(source.IsGZiped ? new GZipStream(rawStream, CompressionMode.Decompress) : rawStream); } throw new Exception("Invalid BlobUri specified for import."); } CloudStorageAccount storageAccount; if (CloudStorageAccount.TryParse(source.ConnectionString, out storageAccount)) { var blobClient = storageAccount.CreateCloudBlobClient(); var cloudBlob = blobClient.GetBlobReference(source.BlobUri); var rawStream = cloudBlob.OpenRead() as Stream; return(source.IsGZiped ? new GZipStream(rawStream, CompressionMode.Decompress) : rawStream); } throw new Exception("Invalid ConnectionString specified for import."); }
public AzureExportJob(string jobId, Server.StoreWorker worker, BlobImportSource exportSource) { _jobId = jobId; _worker = worker; _exportSource = exportSource; }
private void ProcessJob(JobInfo jobInfo) { switch (jobInfo.JobType) { case JobType.Transaction: { try { Trace.TraceInformation("ProcessJob: Starting processing on Transaction job {0}", jobInfo.Id); var storeManager = GetStoreManager(); var worker = new Server.StoreWorker(jobInfo.StoreId, storeManager); var transactionData = jobInfo.Data.Split(new string[] { AzureConstants.TransactionSeparator }, StringSplitOptions.None); var update = new UpdateTransaction(Guid.Parse(jobInfo.Id), worker, transactionData[0], transactionData[1], transactionData[2]); update.Run(); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedOk, "Transaction completed successfully"); } catch (Exception ex) { Trace.TraceError("ProcessJob: Transaction job failed with exception {0}", ex); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedWithErrors, String.Format("Transaction failed: {0}", ex)); } break; } case JobType.Import: { try { Trace.TraceInformation("ProcessJob: Starting processing on Import job {0}", jobInfo.Id); var storeManager = GetStoreManager(); var worker = new Server.StoreWorker(jobInfo.StoreId, storeManager); BlobImportSource importSource; if (!TryDeserialize(jobInfo.Data, out importSource)) { importSource = new BlobImportSource { BlobUri = jobInfo.Data }; } var import = new AzureImportJob(jobInfo.Id, worker, importSource, (jobId, statusMessage) => _jobQueue.UpdateStatus(jobId, statusMessage)); import.Run(); if (import.Errors) { Trace.TraceError("ProcessJob: Import job {0} failed with error: {1}. Marking job as CompletedWithErrors", jobInfo.Id, import.ErrorMessage); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedWithErrors, import.ErrorMessage); } else { _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedOk, "Import completed successfully"); } } catch (Exception ex) { Trace.TraceError("ProcessJob: Import job {0} failed with exception {1}", jobInfo.Id, ex); _jobQueue.FailWithException(jobInfo.Id, "Import failed", ex); } break; } case JobType.Export: { try { Trace.TraceInformation("ProcessJob: Starting processing on Export job {0}", jobInfo.Id); var storeManager = GetStoreManager(); var worker = new Server.StoreWorker(jobInfo.StoreId, storeManager); BlobImportSource exportSource; if (!TryDeserialize(jobInfo.Data, out exportSource)) { exportSource = new BlobImportSource { BlobUri = jobInfo.Data }; } var export = new AzureExportJob(jobInfo.Id, worker, exportSource); export.Run(); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedOk, "Export completed successfully"); } catch (Exception ex) { Trace.TraceError("ProcessJob: Export job {0} failed with exception {1}", jobInfo.Id, ex); _jobQueue.FailWithException(jobInfo.Id, "Export failed", ex); } break; } case JobType.DeleteStore: { try { Trace.TraceInformation("ProcessJob: Starting processing on Delete job {0}", jobInfo.Id); var storeManager = GetStoreManager(); storeManager.DeleteStore(jobInfo.StoreId); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedOk, "Store deleted."); } catch (Exception ex) { Trace.TraceError("ProcessJob: DeleteStore {0} failed with exception {1}", jobInfo.StoreId, ex); _jobQueue.FailWithException(jobInfo.Id, "Delete failed", ex); } break; } default: // TODO: Implement me Thread.Sleep(1000); _jobQueue.UpdateStatus(jobInfo.Id, "20% Complete"); Thread.Sleep(1000); _jobQueue.UpdateStatus(jobInfo.Id, "40% Complete"); Thread.Sleep(1000); _jobQueue.UpdateStatus(jobInfo.Id, "60% Complete"); Thread.Sleep(1000); _jobQueue.UpdateStatus(jobInfo.Id, "80% Complete"); Thread.Sleep(1000); _jobQueue.CompleteJob(jobInfo.Id, JobStatus.CompletedOk, "Completed without any processing"); break; } }