public void NewDownload(string rootUri, string blobContainerName, string rootPath) { foreach (string localeUri in AzureUtils.ListCurrentDirectories(rootUri)) { string locale; try { locale = localeUri.GetLastNPart('/'); } catch { continue; } foreach (string timeStampUri in AzureUtils.ListCurrentDirectories(localeUri)) { string timeStamp = timeStampUri.GetLastNPart('/'); DateTime dt = DateTime.Parse(timeStamp); timeStamp = dt.ToString("yyyyMMdd"); foreach (string speakerIdUri in AzureUtils.ListCurrentDirectories(timeStampUri)) { string speakerId = timeStampUri.GetLastNPart('/'); string localFolderPath = Path.Combine(rootPath, locale, timeStamp, speakerId); Directory.CreateDirectory(localFolderPath); foreach (string azureFilePath in AzureUtils.ListCurrentBlobs(speakerIdUri)) { string fileName = azureFilePath.GetLastNPart('/'); string localFilePath = Path.Combine(localFolderPath, fileName); AzureUtils.DownloadFile(azureFilePath, localFilePath); } } } } }
public static bool Bug11782_LowLevelQueryableManipulation() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); Expression lambda = Expression.Lambda <Func <int, int> >( Expression.Constant(1), new[] { Expression.Parameter(typeof(int), "x") }); var z = pt1.Provider.CreateQuery( Expression.Call( typeof(Queryable), "Select", new Type[] { pt1.ElementType, pt1.ElementType }, pt1.Expression, Expression.Quote(lambda))); passed &= false; // the use of non-generic Provider.CreateQuery() should have thrown } catch (DryadLinqException) { passed &= true; } return(passed); }
public static bool AssumeRangePartition() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/BasicAPITests_AssumeRangePartition.out"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); var q = pt1 .AssumeRangePartition(x => x, false) .Select(x => 100 + x).ToStore(outFile); var info = q.Submit(); info.Wait(); passed &= Utils.FileExists(Config.accountName, Config.storageKey, Config.containerName, outFile); } catch (DryadLinqException) { passed &= false; } return(passed); }
public override void Ingress <T>(DryadLinqContext context, IEnumerable <T> source, Uri dataSetUri, DryadLinqMetaData metaData, CompressionScheme compressionScheme, bool isTemp = false) { string account, key, container, blob; AzureUtils.FromAzureUri(dataSetUri, out account, out key, out container, out blob); if (compressionScheme != CompressionScheme.None) { throw new DryadLinqException("Not implemented: writing to Azure temporary storage with compression enabled"); } AzureDfsClient client = new AzureDfsClient(account, key, container); DryadLinqFactory <T> factory = (DryadLinqFactory <T>)DryadLinqCodeGen.GetFactory(context, typeof(T)); using (Stream stream = client.GetFileStreamWriterAsync(blob).Result) { DryadLinqBlockStream nativeStream = new DryadLinqBlockStream(stream); DryadLinqRecordWriter <T> writer = factory.MakeWriter(nativeStream); foreach (T rec in source) { writer.WriteRecordSync(rec); } writer.Close(); } }
public IEnumerable <string> EnumerateFiles(string path, string extension, SearchOption searchOption) { Assert.ArgumentNotNullOrEmpty(path, nameof(path)); extension = extension.Trim('.'); string prefix = AzureUtils.DirectoryPathToPrefix(path); IEnumerable <IListBlobItem> items = this.azureProvider.EnumerateBlobs(prefix, searchOption); foreach (IListBlobItem item in items) { if (item is CloudBlockBlob cloudBlockBlob) { var name = cloudBlockBlob.Name; if (string.IsNullOrEmpty(prefix) || name.StartsWith(prefix)) { if (string.IsNullOrEmpty(extension) || name.EndsWith($".{extension}")) { yield return(cloudBlockBlob.Name); } } } } }
public static bool GroupByReduceWithCustomDecomposableFunction_NonDistributableCombiner() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateGroupByReduceDataSet()); IQueryable <int> data = simple.Select(x => x.First()); double[] aggregates = data .Select(x => (double)x) .GroupBy(x => 0, (key, seq) => DecomposableFunc4(seq)).ToArray(); double[] expected = new[] { Enumerable.Range(1, 200).Sum() / 100.0 }; //note the order of the result elements is not guaranteed, so order them before testing double[] aggregatesOrdered = aggregates.OrderBy(x => x).ToArray(); double[] expectedOrdered = expected.OrderBy(x => x).ToArray(); passed &= aggregatesOrdered.SequenceEqual(expectedOrdered); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool GroupByReduce_ResultSelector_ComplexNewExpression() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateGroupByReduceDataSet()); IQueryable <int> data = simple.Select(x => x.First()); var aggregates = data.GroupBy(x => 0, (key, seq) => new KeyValuePair <int, KeyValuePair <double, double> >(key, new KeyValuePair <double, double>(seq.Average(), seq.Average()))).ToArray(); var expected = new KeyValuePair <int, KeyValuePair <double, double> >[] { new KeyValuePair <int, KeyValuePair <double, double> >(0, new KeyValuePair <double, double>(100.5, 100.5)) }; passed &= aggregates.SequenceEqual(expected); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool MaterializeNonToStoreTerminated() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); IQueryable <int> query = pt1.Select(x => 100 + x); DryadLinqQueryable.Submit(query); //materialize // throws foreach (int x in query) { //Console.WriteLine(x); } //@TODO: assert that only one query execution occurred. } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool EnumeratePlainData() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); foreach (int x in pt1) // throws { //Console.WriteLine(x); } } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool template() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/x.txt"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); passed &= Utils.FileExists(Config.accountName, Config.storageKey, Config.containerName, outFile); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool SubmitNonToStoreTerminated() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); var q1 = pt1.Select(x => 100 + x); var q2 = q1.Where(x => true); q2.SubmitAndWait(); // throws here var outPT = q2.ToList(); foreach (int x in outPT) { //Console.WriteLine(x); } } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool HomomorphicUnaryApply() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/HomomorphicUnaryApply.out"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); var q1 = pt1.ApplyPerPartition(x => Homomorphic_Unary_Func(x)); var jobInfo = q1.ToStore <int>(outFile).Submit(); jobInfo.Wait(); passed &= Utils.FileExists(Config.accountName, Config.storageKey, Config.containerName, outFile); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool Aggregate_WithCombiner() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); string q1 = pt1.Aggregate("", (str, x) => IntToStringCSVAggregator(str, x)); passed &= (q1.Length == 27); // string should have numbers 1..12 separated by commas } catch (DryadLinqException) { passed &= false; } return(passed); }
private IEnumerable <CloudBlockBlob> GetAllBlobs() { string prefix = AzureUtils.DirectoryPathToPrefix(this.physicalRootPath); BlobContinuationToken blobContinuationToken = null; do { BlobResultSegment results = this.cloudBlobContainer.ListBlobsSegmented( prefix, true, BlobListingDetails.None, null, blobContinuationToken, null, null); blobContinuationToken = results.ContinuationToken; foreach (IListBlobItem item in results.Results) { if (item is CloudBlockBlob cloudBlockBlob) { yield return(cloudBlockBlob); } } }while (blobContinuationToken != null); }
public static bool BuiltInCountIsDistributable() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); // Built in Count is Distributable as built-in logic knows to use Sum() as the combiner function. // Count(a,b,c,d) = Sum(Count(a,b), Count(c,d)) int[] aggregates = pt1.GroupBy(x => x % 2, (key, seq) => seq.Count()).ToArray(); int[] expected = new[] { 6, 6 }; // six elements in each full group. //note the order of the result elements is not guaranteed, so order them before testing int[] aggregatesOrdered = aggregates.OrderBy(x => x).ToArray(); int[] expectedOrdered = expected.OrderBy(x => x).ToArray(); passed &= aggregatesOrdered.SequenceEqual(expectedOrdered); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool CopyPlainDataViaToStoreMaterialize() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/CopyPlainDataViaToStoreMaterialize.txt"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); var q = pt1.ToStore(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, outFile), true); DryadLinqJobInfo info = DryadLinqQueryable.Submit(q); info.Wait(); foreach (int x in q) { //Console.WriteLine(x); } passed &= Utils.FileExists(Config.accountName, Config.storageKey, Config.containerName, outFile); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool GroupByReduce_BitwiseNegationOperator() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateGroupByReduceDataSet()); IQueryable <int> pt1 = simple.Select(x => x.First()); var results = pt1.GroupBy(x => x % 2, (k, g) => new KeyValuePair <int, int>(k, ~g.Sum())).ToArray(); //local sort.. so that keys are in order. var resultsSorted = results.OrderBy(list => list.Key).ToArray(); //key0: count = 6, sum = 42 //key1: count = 6, sum = 36 passed &= (resultsSorted[0].Key == 0); // "incorrect results.1" passed &= (resultsSorted[0].Value == ~42); // "incorrect results.2" passed &= (resultsSorted[1].Key == 1); // "incorrect results.3" passed &= (resultsSorted[1].Value == ~36); // "incorrect results.4" } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool ToStoreGetEnumeratorThrows() // pass { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/ToStoreGetEnumeratorThrows.txt"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); IQueryable <int> q1 = pt1.Select(x => 100 + x); var output = q1.ToStore(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, outFile), true); output.GetEnumerator(); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool GroupByReduce_BuiltIn_First() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateGroupByReduceDataSet()); IQueryable <int> data = simple.Select(x => x.First()); int[] aggregates = data .GroupBy(x => 0, (key, seq) => seq.First()) .ToArray(); // the output of First can be the first item of either partition. passed &= aggregates.SequenceEqual(new[] { 1 }) || aggregates.SequenceEqual(new[] { 101 }); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool GetEnumeratorNonToStoreTerminated() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); IQueryable <int> q1 = pt1.Select(x => 100 + x); IQueryable <int> q2 = q1.Where(x => true); foreach (int x in q2) // throws here { //Console.WriteLine(x); } //@TODO: perform a sequence-equals test. //IQueryable<LineRecord> format = q2.Select(x => new LineRecord(String.Format("{0}", x))); //DryadLinqJobInfo output = format.ToStore(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, // "unittest/output/test2.txt")).SubmitAndWait(); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool DistributiveSelect_1() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); // this result selector satisfies "DistributiveOverConcat" int[] aggregates = pt1.GroupBy(x => x % 2).Select(group => group.Sum()).ToArray(); int[] expected = new[] { 1 + 3 + 5 + 7 + 9 + 11, 2 + 4 + 6 + 8 + 10 + 12 }; //note the order of the result elements is not guaranteed, so order them before testing int[] aggregatesOrdered = aggregates.OrderBy(x => x).ToArray(); int[] expectedOrdered = expected.OrderBy(x => x).ToArray(); passed &= aggregatesOrdered.SequenceEqual(expectedOrdered); } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool Bug11782_Aggregate() { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/Bug11782_Aggregate.out"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); //test Aggregate() var c = pt1.Select(x => x).Aggregate((x, y) => x + y); //test AggregateAsQuery() var q = pt1.Select(x => x).AggregateAsQuery((x, y) => x + y).ToStore(outFile); DryadLinqJobInfo info = DryadLinqQueryable.Submit(q); info.Wait(); passed &= Utils.FileExists(Config.accountName, Config.storageKey, Config.containerName, outFile); } catch (DryadLinqException) { passed &= false; } return(passed); }
public T GetValue(string filePath, Func <string, T> populateFunction) { T cacheValue = this.GetValue(filePath, true); if (cacheValue != null) { return(cacheValue); } if (!this.azureManager.FileExists(filePath)) { return(default(T)); } string name = AzureUtils.FilePathToName(filePath); var mutex = new Mutex(false, $"blob-{name}"); try { mutex.WaitOne(); T cacheValueAgain = this.GetValue(filePath, true); if (cacheValueAgain != null) { return(cacheValueAgain); } T calculatedValue = populateFunction(filePath); this.AddOrUpdateCache(filePath, calculatedValue); return(calculatedValue); } finally { mutex.ReleaseMutex(); } }
public static bool ToStoreSubmitGetEnumerator() // pass { var context = new DryadLinqContext(Config.cluster); context.LocalExecution = false; bool passed = true; try { string outFile = "unittest/output/ToStoreSubmitGetEnumerator.txt"; IQueryable <LineRecord> input = context.FromStore <LineRecord>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, "unittest/inputdata/SimpleFile.txt")); IQueryable <IEnumerable <int> > simple = input.Apply(x => DataGenerator.CreateSimpleFileSets()); IQueryable <int> pt1 = simple.Select(x => x.First()); var q1 = pt1.Select(x => 100 + x).HashPartition(x => x); var q2 = q1.Where(x => true); IQueryable <int> output = q2.ToStore(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, outFile), true); DryadLinqJobInfo info = output.SubmitAndWait(); foreach (int x in output) // should not run a new dryad job. { //Console.WriteLine(x); } } catch (DryadLinqException) { passed &= false; } return(passed); }
public static bool Bug15371_NoDataMembersForSerialization(DryadLinqContext context) { string testName = "Bug15371_NoDataMembersForSerialization"; TestLog.TestStart(testName); bool passed = true; try { // cluster { context.LocalDebug = false; string outFile = "unittest/output/Bug15371_NoDataMembersForSerialization"; IQueryable <int> pt1 = DataGenerator.GetSimpleFileSets(context); IQueryable <NoDataMembersClass> output = pt1.Select(x => new NoDataMembersClass()); var jobInfo = output.ToStore(AzureUtils.ToAzureUri(Config.accountName, Config.containerName, outFile), true).Submit(); jobInfo.Wait(); var result = context.FromStore <NoDataMembersClass>(AzureUtils.ToAzureUri(Config.accountName, Config.storageKey, Config.containerName, outFile)).ToArray(); passed &= false; } } catch (DryadLinqException Ex) { passed &= (Ex.ErrorCode == ReflectionHelper.GetDryadLinqErrorCode("TypeMustHaveDataMembers") || Ex.InnerException != null && ((DryadLinqException)Ex.InnerException).ErrorCode == ReflectionHelper.GetDryadLinqErrorCode("TypeMustHaveDataMembers")); // "exception should have been thrown. } TestLog.LogResult(new TestResult(testName, context, passed)); return(passed); }
private void TransferFromAzureToAzure(string inputAzureUri, string outputAzureRootUri, string localDailyRootFolder) { Console.WriteLine($"Processing {inputAzureUri}"); var split = inputAzureUri.Trim('/').Split('/'); string subFolderName = split.Last().Trim().Replace(":", ""); string locale = Dict[split[split.Length - 2]]; string downloadFolder = Path.Combine(WorkFolder, "Wave", subFolderName, "Download"); string intermediaFolder = Path.Combine(WorkFolder, "Wave", subFolderName, "Intermedia"); string uploadFolder = Path.Combine(Cfg.AudioRootFolder, locale, subFolderName); string localDailyFolder = Path.Combine(localDailyRootFolder, subFolderName); string reportFolderPath = Path.Combine(Cfg.ReportRootFolderPath, subFolderName); string reportFilePath = Path.Combine(reportFolderPath, "Report.txt"); string errorFilePath = Path.Combine(reportFolderPath, "Error.txt"); string outputAzureUri = AzureUtils.PathCombine(outputAzureRootUri, locale, subFolderName); Directory.CreateDirectory(downloadFolder); Directory.CreateDirectory(intermediaFolder); Directory.CreateDirectory(uploadFolder); Directory.CreateDirectory(localDailyFolder); Directory.CreateDirectory(reportFolderPath); Download(inputAzureUri, downloadFolder); CheckAndTransfer(reportFilePath, errorFilePath, downloadFolder, intermediaFolder, uploadFolder); Copy(uploadFolder, localDailyFolder); Upload(uploadFolder, outputAzureUri); }
public void DirectoryPathToPrefix(string filePath) { string prefix = AzureUtils.DirectoryPathToPrefix(filePath); Assert.StartsWith("media", prefix); Assert.EndsWith("/", prefix); }
private bool DownloadConfig_Azure(string downloadedZipPath, string AzureAccountName, string AzureAccountKey) { CloudStorageAccount storageAccount = null; CloudBlobClient blobClient = null; CloudBlobContainer container = null; CloudBlockBlob blockBlob = null; string leaseId = null; try { storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(AzureAccountName, AzureAccountKey), true); blobClient = storageAccount.CreateCloudBlobClient(); container = blobClient.GetContainerReference(AzureConfigContainerName); container.CreateIfNotExist(); blockBlob = container.GetBlockBlobReference(DesiredConfigBlobName()); bool blobExists = AzureUtils.BlockBlobExists(logger, blockBlob); if (blobExists) { leaseId = AzureUtils.AcquireLease(logger, blockBlob, AzureBlobLeaseTimeout); // Acquire Lease on Blob } else { return(false); } if (blobExists && leaseId == null) { Utils.structuredLog(logger, "ER", "AcquireLease on Blob: " + DesiredConfigBlobName() + " Failed"); return(false); } string url = blockBlob.Uri.ToString(); if (blockBlob.ServiceClient.Credentials.NeedsTransformUri) { url = blockBlob.ServiceClient.Credentials.TransformUri(url); } var req = BlobRequest.Get(new Uri(url), AzureBlobLeaseTimeout, null, leaseId); blockBlob.ServiceClient.Credentials.SignRequest(req); using (var reader = new BinaryReader(req.GetResponse().GetResponseStream())) { FileStream zipFile = new FileStream(downloadedZipPath, FileMode.OpenOrCreate); reader.BaseStream.CopyTo(zipFile); zipFile.Close(); } req.GetResponse().GetResponseStream().Close(); AzureUtils.ReleaseLease(logger, blockBlob, leaseId, AzureBlobLeaseTimeout); // Release Lease on Blob return(true); } catch (Exception e) { Utils.structuredLog(logger, "E", e.Message + ". DownloadConfig_Azure, downloadZipPath: " + downloadedZipPath + ". " + e); AzureUtils.ReleaseLease(logger, blockBlob, leaseId, AzureBlobLeaseTimeout); return(false); } }
public void FilePathToName(string filePath) { string name = AzureUtils.FilePathToName(filePath); Assert.StartsWith("media", name); Assert.EndsWith(".yml", name); }
private bool UploadConfig_Azure(string configZipPath, string AzureAccountName, string AzureAccountKey) { CloudStorageAccount storageAccount = null; CloudBlobClient blobClient = null; CloudBlobContainer container = null; CloudBlockBlob blockBlob = null; string leaseId = null; try { storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(AzureAccountName, AzureAccountKey), true); blobClient = storageAccount.CreateCloudBlobClient(); container = blobClient.GetContainerReference(AzureConfigContainerName); container.CreateIfNotExist(); blockBlob = container.GetBlockBlobReference(ActualConfigBlobName()); bool blobExists = AzureUtils.BlockBlobExists(logger, blockBlob); if (blobExists) { leaseId = AzureUtils.AcquireLease(logger, blockBlob, AzureBlobLeaseTimeout); // Acquire Lease on Blob } else { blockBlob.Container.CreateIfNotExist(); } if (blobExists && leaseId == null) { Utils.structuredLog(logger, "ER", "AcquireLease on Blob: " + ActualConfigBlobName() + " Failed"); return(false); } string url = blockBlob.Uri.ToString(); if (blockBlob.ServiceClient.Credentials.NeedsTransformUri) { url = blockBlob.ServiceClient.Credentials.TransformUri(url); } var req = BlobRequest.Put(new Uri(url), AzureBlobLeaseTimeout, new BlobProperties(), BlobType.BlockBlob, leaseId, 0); using (var writer = new BinaryWriter(req.GetRequestStream())) { writer.Write(File.ReadAllBytes(configZipPath)); writer.Close(); } blockBlob.ServiceClient.Credentials.SignRequest(req); req.GetResponse().Close(); AzureUtils.ReleaseLease(logger, blockBlob, leaseId, AzureBlobLeaseTimeout); // Release Lease on Blob return(true); } catch (Exception e) { Utils.structuredLog(logger, "E", e.Message + ". UploadConfig_Azure, configZipPath: " + configZipPath + ". " + e); AzureUtils.ReleaseLease(logger, blockBlob, leaseId, AzureBlobLeaseTimeout); return(false); } }