/// <summary> /// Calculate the size of the blob-serialized form of this object. /// </summary> /// <returns>The needed size in bytes.</returns> virtual public System.Int32 CalculateBlobSize() { if (m_InitialSize == -1) { m_InitialSize = BlobOperations.GetInitialSize(ClassTypeId); } return(m_InitialSize); }
private static async Task MonitorAsync(BatchJob batchJob) { //Creating BatchOperations BatchOperations batchOperations = new BatchOperations(authConfig.BatchAccountName, authConfig.BatchAccountKey, authConfig.BatchAccountUrl); //Creating BlobOperations BlobOperations blobOperations = new BlobOperations(authConfig.StorageAccountName, authConfig.StorageAccountKey); //Monitor tasks var taskResult = await batchOperations.MonitorTasks ( jobId : JobId, timeout : TimeSpan.FromHours(config.TaskWaitHours) ); var outputFolderPath = Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.OutputFolderPath)); await blobOperations.DownloadOutputFiles(outputFolderPath, batchJob.OutputContainerID); try { PSharpOperations.MergeOutputCoverageReport(outputFolderPath, Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.PSharpBinariesFolderPath))); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(e.StackTrace); } //All task completed Console.WriteLine(); //Console.Write("Delete job? [yes] no: "); //string response = Console.ReadLine().ToLower(); if (/*response == "y" || response == "yes"*/ config.DeleteJobAfterDone) { await batchOperations.DeleteJobAsync(JobId); } Console.WriteLine(); //Console.Write("Delete Containers? [yes] no: "); //response = Console.ReadLine().ToLower(); if (/*response == "y" || response == "yes"*/ config.DeleteContainerAfterDone) { await blobOperations.DeleteAllContainers(batchJob); } if (config.DeletePoolAfterDone) { await blobOperations.DeleteNodeContainer(config.PoolId); await batchOperations.DeletePoolAsync(config.PoolId); } }
public static async Task BlobStorage() { var blobSettings = new SomeBlobSettings(); var blobOperations = new BlobOperations(blobSettings); const string path = @"some/blobs"; var filePath = Path.Combine(path, "blob.json"); await blobOperations.UploadTextToBlobAsync(filePath, "{ \"message\" : \"document\" }"); await blobOperations.DoesFileExistsAsync(filePath); var listBlobItems = blobOperations.GetBlobs(path); }
public RegistryClient(RegistryClientConfiguration configuration, AuthenticationProvider authenticationProvider) { if (configuration == null) { throw new ArgumentNullException(nameof(configuration)); } if (authenticationProvider == null) { throw new ArgumentNullException(nameof(authenticationProvider)); } _client = new NetworkClient(configuration, authenticationProvider); Manifest = new ManifestOperations(_client); Catalog = new CatalogOperations(_client); Blobs = new BlobOperations(_client); BlobUploads = new BlobUploadOperations(_client); System = new SystemOperations(_client); Tags = new TagOperations(_client); }
/// <summary> /// Initializes client properties. /// </summary> private void Initialize() { V2Support = new V2SupportOperations(this); Manifests = new ManifestsOperations(this); Blob = new BlobOperations(this); Repository = new RepositoryOperations(this); Tag = new TagOperations(this); RefreshTokens = new RefreshTokensOperations(this); AccessTokens = new AccessTokensOperations(this); BaseUri = "{url}"; AcceptLanguage = "en-US"; LongRunningOperationRetryTimeout = 30; GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Newtonsoft.Json.Formatting.Indented, DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List <JsonConverter> { new Iso8601TimeSpanConverter() } }; DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc, NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore, ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List <JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); }
public ProfileManagerController() { blobOperations = new BlobOperations(); tableOperations = new TableOperations(); }
public OrderEntityController() { tableOperations = new TableOperations(); blobOperations = new BlobOperations(); }
private static async Task MainAsync() { //Creating BatchOperations BatchOperations batchOperations = new BatchOperations(authConfig.BatchAccountName, authConfig.BatchAccountKey, authConfig.BatchAccountUrl); //Creating BlobOperations BlobOperations blobOperations = new BlobOperations(authConfig.StorageAccountName, authConfig.StorageAccountKey); //Pool operations if (!(await batchOperations.CheckIfPoolExists(config.PoolId))) { //Upload the application and the dependencies to azure storage and get the resource objects. var nodeFiles = await blobOperations.UploadNodeFiles(config.PSharpBinariesFolderPath, config.PoolId); //Creating the pool await batchOperations.CreatePoolIfNotExistAsync ( poolId : config.PoolId, resourceFiles : nodeFiles, numberOfNodes : config.NumberOfNodesInPool, OSFamily : config.NodeOsFamily, VirtualMachineSize : config.NodeVirtualMachineSize, NodeStartCommand : PSharpBatchTestCommon.Constants.PSharpDefaultNodeStartCommand, NodeMaxConcurrentTasks : config.NodeMaxConcurrentTasks ); } string executingDirectory = Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location); //Job Details string jobManagerFilePath = /*typeof(PSharpBatchJobManager.Program).Assembly.Location;*/ Path.Combine(executingDirectory, @".\PSharpBatchJobManager\PSharpBatchJobManager.exe"); // Data files for Job Manager Task string jobTimeStamp = PSharpBatchTestCommon.Constants.GetTimeStamp(); JobId = config.JobDefaultId + jobTimeStamp; //Creating BatchJob object var batchJob = new BatchJob(); batchJob.PoolID = config.PoolId; batchJob.JobID = JobId; //Uploading the data files to azure storage and get the resource objects. var inputTupleRes = await blobOperations.UploadInputFilesFromTestEntities(config.TestEntities, config.PoolId, JobId); var inputFilesDict = inputTupleRes.Item1; batchJob.InputContainerIDs = inputTupleRes.Item2; //Uploading JobManager Files var jobTupleRes = await blobOperations.UploadJobManagerFiles(jobManagerFilePath, config.PoolId, JobId); var jobManagerFiles = jobTupleRes.Item1; batchJob.JobManagerContainerID = jobTupleRes.Item2; batchJob.OutputContainerID = await blobOperations.CreateOutputContainer(config.PoolId, JobId); var outputContainerSasUrl = blobOperations.GetOutputContainerSasUrl(batchJob.OutputContainerID); var numberOfTasks = config.TestEntities.Select(t => t.NumberOfTasks()).Sum(); //Creating the job await batchOperations.CreateJobAsync ( jobId : JobId, poolId : config.PoolId, resourceFiles : jobManagerFiles, outputContainerSasUrl : outputContainerSasUrl, numberOfTasks : numberOfTasks, timeoutInHours : config.TaskWaitHours ); //Adding tasks await batchOperations.AddTasksFromTestEntities ( jobId : JobId, taskIDPrefix : config.TaskDefaultId, inputFilesDict : inputFilesDict, TestEntities : config.TestEntities ); var outputFolderPath = Path.GetFullPath(Environment.ExpandEnvironmentVariables(config.OutputFolderPath)); //Write BatchJob to file Directory.CreateDirectory(outputFolderPath); var batchJobPath = Path.Combine(outputFolderPath, "batchjob.psbatch"); batchJob.SaveAsXML(batchJobPath); Logger.FlushLogs(); if (config.MonitorBatch) { await MonitorAsync(batchJob); } }
//public IConfiguration Configuration { get; } public AzureStorageBlobController() { blobOperations = new BlobOperations(); tableOperations = new TableOperations(); }