public UploadContext Create() { AssertIfValidhVhd(localVhd); AssertIfValidVhdSize(localVhd); this.blobObjectFactory.CreateContainer(blobDestination); UploadContext context = null; bool completed = false; try { context = new UploadContext { DestinationBlob = destinationBlob, SingleInstanceMutex = AcquireSingleInstanceMutex(destinationBlob.Uri) }; if (overWrite) { destinationBlob.DeleteIfExists(DeleteSnapshotsOption.IncludeSnapshots, null, requestOptions); } if (destinationBlob.Exists(requestOptions)) { Program.SyncOutput.MessageResumingUpload(); if (destinationBlob.GetBlobMd5Hash(requestOptions) != null) { throw new InvalidOperationException( "An image already exists in blob storage with this name. If you want to upload again, use the Overwrite option."); } var metaData = destinationBlob.GetUploadMetaData(); AssertMetaDataExists(metaData); AssertMetaDataMatch(metaData, OperationMetaData); PopulateContextWithUploadableRanges(localVhd, context, true); PopulateContextWithDataToUpload(localVhd, context); } else { CreateRemoteBlobAndPopulateContext(context); } context.Md5HashOfLocalVhd = MD5HashOfLocalVhd; completed = true; } finally { if (!completed && context != null) { context.SingleInstanceMutex.ReleaseMutex(); context.SingleInstanceMutex.Close(); } } return(context); }
/// <summary> /// Deletes the VHD file specified along with its container from the storage account. /// </summary> /// <param name="containerName">The container in the Azure Storage account where the VHD file is located.</param> /// <param name="vhdFileUri">The location of the VHD file in the storage account, as a <see cref="Uri"/> object.</param> /// <param name="useDevelopment">True to use the Azure Storage Emulator, False to connect to the storage account using the other parameters.</param> /// <param name="accountName">The name of the account to use, if useDevelopment is false.</param> /// <param name="accountKey">The key of the account to use, if useDevelopment is false.</param> public static void DeleteVhdFileInContainer(string containerName, Uri vhdFileUri, bool useDevelopment = true, string accountName = null, string accountKey = null) { var account = GetStorageAccount(useDevelopment, accountName, accountKey); var client = account.CreateCloudBlobClient(); var container = client.GetContainerReference(containerName); var blob = new CloudPageBlob(vhdFileUri, client); blob.DeleteIfExists(); container.DeleteIfExists(); }
static void DeleteOldBlobs(List <ICloudBlob> blobs) { //Iterate through all blobs and delete foreach (IListBlobItem b in blobs) { if (b.GetType() == typeof(CloudBlockBlob)) { CloudBlockBlob cloudBlob = (CloudBlockBlob)b; cloudBlob.DeleteIfExists(); } else { CloudPageBlob pageBlob = (CloudPageBlob)b; pageBlob.DeleteIfExists(); } } }
public void BlobIfExistsShouldNotHitSecondary() { AssertSecondaryEndpoint(); BlobRequestOptions options = new BlobRequestOptions(); CloudBlobContainer container = BlobTestBase.GetRandomContainerReference(); TestPrimaryOnlyCommand((opt, ctx) => container.CreateIfNotExists(opt, ctx), options); TestPrimaryOnlyCommand((opt, ctx) => container.EndCreateIfNotExists(container.BeginCreateIfNotExists(opt, ctx, null, null)), options); TestPrimaryOnlyCommand((opt, ctx) => container.DeleteIfExists(null, opt, ctx), options); TestPrimaryOnlyCommand((opt, ctx) => container.EndDeleteIfExists(container.BeginDeleteIfExists(null, opt, ctx, null, null)), options); CloudBlockBlob blockBlob = container.GetBlockBlobReference("blob1"); TestPrimaryOnlyCommand((opt, ctx) => blockBlob.DeleteIfExists(DeleteSnapshotsOption.None, null, opt, ctx), options); TestPrimaryOnlyCommand((opt, ctx) => blockBlob.EndDeleteIfExists(blockBlob.BeginDeleteIfExists(DeleteSnapshotsOption.None, null, opt, ctx, null, null)), options); CloudPageBlob pageBlob = container.GetPageBlobReference("blob2"); TestPrimaryOnlyCommand((opt, ctx) => pageBlob.DeleteIfExists(DeleteSnapshotsOption.None, null, opt, ctx), options); TestPrimaryOnlyCommand((opt, ctx) => pageBlob.EndDeleteIfExists(pageBlob.BeginDeleteIfExists(DeleteSnapshotsOption.None, null, opt, ctx, null, null)), options); }
public static AzureBlobDataSet CreateSetWithSmallData(string uri, SerializableDataSetSchema schema, IDictionary <string, Array> dataToPut) { SerializableDataSetSchema info = schema; List <SerializableDimension> dimensions = schema.Dimensions.ToList(); List <SerializableVariableSchema> varsUnsorted = schema.Variables.ToList(); List <SerializableVariableSchema> vars = new List <SerializableVariableSchema>(varsUnsorted.Count); //vars for which data is provided should go first int varsWithDataCount = 0; foreach (var v in varsUnsorted) { if (dataToPut.ContainsKey(v.Name)) { vars.Add(v); ++varsWithDataCount; } } foreach (var v in varsUnsorted) { if (!dataToPut.ContainsKey(v.Name)) { vars.Add(v); } } Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: dimension added {0}[{1}]", i.Name, i.Length)); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); CloudPageBlob blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); int schemeSize; using (MemoryStream bufferStream = new MemoryStream()) { using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[0], vars[i].Name)); estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[j], vars[i].Name)); rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } //System.Diagnostics.Trace.WriteLine(string.Format("ABDS: looking for dim \"{0}\" for var \"{1}\"", vars[i].Dimensions[0], vars[i].Name)); estimatedBlobSize += (long)dimLengthDictionary[vars[i].Dimensions[0]] * (long)(((rowSize * vars[i].ValueSize + 511) / 512) * 512); } } blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); //blob.WritePages(sizeStream, 0); //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512 + 512; byte[] scheme = new byte[sizeAligned]; sizeStream.Seek(0, SeekOrigin.Begin); sizeStream.Read(scheme, 0, 512); memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 512, schemeSize); bufferStream.Write(scheme, 0, sizeAligned); //for (int i = 0; i < sizeAligned; i += maxBlobChunk) // blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), i); } } for (int i = 0; i < varsWithDataCount; ++i) { if (vars[i].Dimensions.Length == 1) { int len = dimLengthDictionary[vars[i].Dimensions[0]]; var data = dataToPut[vars[i].Name]; if (vars[i].Type == typeof(DateTime)) { var temp = new Int64[data.Length]; for (int j = 0; j < temp.Length; ++j) { temp[j] = ((DateTime)data.GetValue(j)).Ticks; } data = temp; } int bufferSize = 512 * ((len * vars[i].ValueSize + 511) / 512); byte[] buffer = new byte[bufferSize]; Buffer.BlockCopy(data, 0, buffer, 0, len * vars[i].ValueSize); bufferStream.Write(buffer, 0, bufferSize); //for (int j = 0; j < bufferSize; j += maxBlobChunk) // blob.WritePages(new MemoryStream(buffer, j, Math.Min(maxBlobChunk, bufferSize - j)), varOffsets[i] + j); } else { int outerDimLen = dimLengthDictionary[vars[i].Dimensions[0]]; int rowLen = vars[i].ValueSize; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowLen *= dimLengthDictionary[vars[i].Dimensions[j]]; } int rowLenUnaligned = rowLen; rowLen = 512 * ((rowLen + 511) / 512); byte[] buffer = new byte[rowLen]; Array data = dataToPut[vars[i].Name]; if (vars[i].Type == typeof(DateTime)) { int[] shapeTemp = new int[data.Rank]; for (int k = 0; k < shapeTemp.Length; ++k) { shapeTemp[k] = data.GetUpperBound(k) + 1; } Array temp = Array.CreateInstance(typeof(Int64), shapeTemp); int[] resPos = new int[shapeTemp.Length]; for (int k = 0; k < resPos.Length; ++k) { resPos[k] = 0; } do { temp.SetValue(((DateTime)data.GetValue(resPos)).Ticks, resPos); }while (Move(resPos, shapeTemp)); data = temp; } for (int j = 0; j < outerDimLen; ++j) { Buffer.BlockCopy(data, j * rowLenUnaligned, buffer, 0, rowLenUnaligned); bufferStream.Write(buffer, 0, rowLen); } } } int bufferStreamSize = (int)bufferStream.Length; int bufferStreamSizeAligned = ((bufferStreamSize + 511) / 512) * 512; byte[] bufferAligned = new byte[bufferStreamSizeAligned + 512]; bufferStream.Seek(0, SeekOrigin.Begin); bufferStream.Read(bufferAligned, 0, bufferStreamSize); for (int i = 0; i < bufferStreamSizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(bufferAligned, i, Math.Min(maxBlobChunk, bufferStreamSizeAligned - i)), i); } } return(new AzureBlobDataSet(uri, schemeSize, info)); }
public static AzureBlobDataSet CreateEmptySet(string uri, SerializableDataSetSchema schema) { SerializableDataSetSchema info = schema; List <SerializableDimension> dimensions = schema.Dimensions.ToList(); List <SerializableVariableSchema> vars = schema.Variables.ToList(); Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); CloudPageBlob blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); int schemeSize; using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } estimatedBlobSize += (long)dimLengthDictionary[vars[i].Dimensions[0]] * (long)(((rowSize * vars[i].ValueSize + 511) / 512) * 512); } } blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); //blob.WritePages(sizeStream, 0); //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512 + 512; byte[] scheme = new byte[sizeAligned]; sizeStream.Seek(0, SeekOrigin.Begin); sizeStream.Read(scheme, 0, 512); memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 512, schemeSize); for (int i = 0; i < sizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), i); } } } return(new AzureBlobDataSet(uri, schemeSize, info)); }
public static AzureBlobDataSet ArrangeData(string uri, DataSet source, SerializableVariableSchema[] emptyVariables) { List <SerializableDimension> dimensions = new List <SerializableDimension>(); foreach (var i in source.Dimensions) { dimensions.Add(new SerializableDimension(i.Name, i.Length)); } List <SerializableVariableSchema> oldVars = source.Variables.Select <Variable, SerializableVariableSchema>(x => x.GetSchema().AsSerializable()).ToList(); List <SerializableVariableSchema> vars = new List <SerializableVariableSchema>(oldVars); vars.AddRange(emptyVariables); SerializableDataSetSchema info = new SerializableDataSetSchema(dimensions.ToArray(), vars.ToArray(), source.Metadata.AsDictionary()); Dictionary <string, int> dimLengthDictionary = new Dictionary <string, int>(dimensions.Count); foreach (var i in dimensions) { dimLengthDictionary.Add(i.Name, i.Length); } long estimatedBlobSize = 512;//only scheme size on 1st page long[] varOffsets = new long[vars.Count]; AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); container.CreateIfNotExist(); container.SetPermissions(new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Container }); CloudPageBlob blob; int schemeSize; using (MemoryStream memStream = new MemoryStream()) { DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(SerializableDataSetSchema)); serializer.WriteObject(memStream, info); schemeSize = (int)memStream.Length; estimatedBlobSize += 512 * ((schemeSize + 511) / 512);//remembering the need to align data for (int i = 0; i < vars.Count; ++i) { varOffsets[i] = estimatedBlobSize; if (vars[i].Dimensions.Length == 1) { estimatedBlobSize += ((dimLengthDictionary[vars[i].Dimensions[0]] * vars[i].ValueSize + 511) / 512) * 512; } else { int rowSize = 1; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowSize *= dimLengthDictionary[vars[i].Dimensions[j]]; } estimatedBlobSize += dimLengthDictionary[vars[i].Dimensions[0]] * ((rowSize * vars[i].ValueSize + 511) / 512) * 512; } } blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); // CRITICAL: some may interfere between calls blob.Create(estimatedBlobSize); //writing scheme size into the 1st page UTF8Encoding utf8 = new UTF8Encoding(); using (MemoryStream sizeStream = new MemoryStream(new byte[512], true)) { byte[] sizeBuf = utf8.GetBytes(schemeSize.ToString()); sizeStream.Write(sizeBuf, 0, sizeBuf.Length); sizeStream.Seek(0, SeekOrigin.Begin); blob.WritePages(sizeStream, 0); } //writing scheme starting with 2nd page int sizeAligned = ((schemeSize + 511) / 512) * 512; byte[] scheme = new byte[sizeAligned]; memStream.Seek(0, SeekOrigin.Begin); memStream.Read(scheme, 0, schemeSize); for (int i = 0; i < sizeAligned; i += maxBlobChunk) { blob.WritePages(new MemoryStream(scheme, i, Math.Min(maxBlobChunk, sizeAligned - i)), 512 + i); } } //populating blob with values from source for (int i = 0; i < oldVars.Count; ++i) { if (oldVars[i].Dimensions.Length == 1) { int len = dimLengthDictionary[oldVars[i].Dimensions[0]]; var data = source[oldVars[i].Name].GetData(); if (oldVars[i].Type == typeof(DateTime)) { var temp = new Int64[data.Length]; for (int j = 0; j < temp.Length; ++j) { temp[j] = ((DateTime)data.GetValue(j)).Ticks; } data = temp; } int bufferSize = 512 * ((len * oldVars[i].ValueSize + 511) / 512); byte[] buffer = new byte[bufferSize]; Buffer.BlockCopy(data, 0, buffer, 0, len * oldVars[i].ValueSize); for (int j = 0; j < bufferSize; j += maxBlobChunk) { blob.WritePages(new MemoryStream(buffer, j, Math.Min(maxBlobChunk, bufferSize - j)), varOffsets[i] + j); } } else { int outerDimLen = dimLengthDictionary[oldVars[i].Dimensions[0]]; int rowLen = vars[i].ValueSize; for (int j = 1; j < vars[i].Dimensions.Length; ++j) { rowLen *= dimLengthDictionary[vars[i].Dimensions[j]]; } int rowLenUnaligned = rowLen; rowLen = 512 * ((rowLen + 511) / 512); int[] origin = new int[oldVars[i].Dimensions.Length]; for (int j = 0; j < origin.Length; ++j) { origin[j] = 0; } int[] shape = new int[oldVars[i].Dimensions.Length]; shape[0] = 1; for (int j = 1; j < origin.Length; ++j) { shape[j] = dimLengthDictionary[oldVars[i].Dimensions[j]]; } byte[] buffer = new byte[rowLen]; for (int j = 0; j < outerDimLen; ++j) { origin[0] = j; Array data = source[oldVars[i].Name].GetData(origin, shape); if (oldVars[i].Type == typeof(DateTime)) { int[] shapeTemp = new int[data.Rank]; for (int k = 0; k < shapeTemp.Length; ++k) { shapeTemp[k] = data.GetUpperBound(k) + 1; } Array temp = Array.CreateInstance(typeof(Int64), shapeTemp); int[] resPos = new int[shapeTemp.Length]; for (int k = 0; k < resPos.Length; ++k) { resPos[k] = 0; } do { temp.SetValue(((DateTime)data.GetValue(resPos)).Ticks, resPos); }while (Move(resPos, shapeTemp)); data = temp; } Buffer.BlockCopy(data, 0, buffer, 0, rowLenUnaligned); for (int k = 0; k < rowLen; k += maxBlobChunk) { blob.WritePages(new MemoryStream(buffer, k, Math.Min(maxBlobChunk, rowLen - k)), varOffsets[i] + (long)rowLen * (long)j + (long)k); } } } } //blob is prepared: values are where they gotta be, trash is everwhere else! return(new AzureBlobDataSet(uri, schemeSize, info)); }
static void Main(string[] args) { var schema = new SerializableDataSetSchema( new SerializableDimension[] { new SerializableDimension("i", 30000), new SerializableDimension("j", 30000) }, new SerializableVariableSchema[] { new SerializableVariableSchema("vals", typeof(double), new string[] { "i", "j" }, null) }, null ); string BlobConnectionAccountName = @"fetchclimate2"; string BlobConnectionAccountKey = @"vQpyUA7h5QFX6VlEH944gyv/h2Kx//WDy32brNip+YKDpsrN5/pxcSOnP2igQQ5pkA8lRXkmqmAYrgB29nwo/w=="; string uri = @"msds:ab?DefaultEndpointsProtocol=http&Container=testcontainer&Blob=testBlob30000x30000&AccountName=" + BlobConnectionAccountName + @"&AccountKey=" + BlobConnectionAccountKey; try { var ds = AzureBlobDataSet.CreateEmptySet(uri, schema); double[,] data = new double[1, 30000]; for (int i = 0; i < 30000; ++i) { data[0, i] = (double)i; } ds["vals"].PutData(new int[] { 29999, 0 }, data); var recvData = (double[, ])ds["vals"].GetData(new int[] { 29999, 0 }, new int[] { 1, 30000 }); for (int i = 0; i < 30000; ++i) { if (data[0, i] != recvData[0, i]) { throw new Exception("difference at " + i.ToString()); } } Console.WriteLine("Everything is successful!"); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.ReadLine(); //delete test blob try { AzureBlobDataSetUri azureUri = null; if (DataSetUri.IsDataSetUri(uri)) { azureUri = new AzureBlobDataSetUri(uri); } else { azureUri = AzureBlobDataSetUri.ToUri(uri); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(azureUri.ConnectionString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(azureUri.Container); CloudPageBlob blob = container.GetPageBlobReference(azureUri.Blob); blob.DeleteIfExists(); Console.WriteLine("Deleted test blob successfully!"); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.ReadLine(); }