static async System.Threading.Tasks.Task Main(string[] args) { var configuration = new ConfigurationBuilder() .AddJsonFile("appsettings.json") .AddEnvironmentVariables() .Build(); Console.WriteLine(configuration.GetConnectionString("DestinationIotHubConnectionString")); Console.WriteLine(configuration.GetConnectionString("BlobSasToken")); RegistryManager sourceRegistryManager = RegistryManager.CreateFromConnectionString(configuration.GetConnectionString("SourceIotHubConnectionString")); RegistryManager destinationRegistryManager = RegistryManager.CreateFromConnectionString(configuration.GetConnectionString("DestinationIotHubConnectionString")); string containerSasUri = configuration.GetConnectionString("BlobSasToken"); Console.WriteLine($"Start export of IoT Hub devices to blob"); // Call an export job on the IoT Hub to retrieve all devices JobProperties exportJob = await sourceRegistryManager.ExportDevicesAsync(containerSasUri, false); var exportJobResult = await WaitForJobCompletion(sourceRegistryManager, exportJob); Console.WriteLine($"Start import of IoT Hub devices to blob"); JobProperties importJob = await destinationRegistryManager.ImportDevicesAsync(containerSasUri, containerSasUri); var importJobResult = await WaitForJobCompletion(destinationRegistryManager, importJob); }
public async Task <bool> ImportAllDevices(string connString, string containerSasUri) { bool bret = false; // Call an export job on the IoT Hub to retrieve all devices RegistryManager registryManager = RegistryManager.CreateFromConnectionString(connString); JobProperties exportJob = await registryManager.ImportDevicesAsync(containerSasUri, containerSasUri); // Wait until job is finished while (true) { exportJob = await registryManager.GetJobAsync(exportJob.JobId); if (exportJob.Status == JobStatus.Completed || exportJob.Status == JobStatus.Failed || exportJob.Status == JobStatus.Cancelled) { if (exportJob.Status == JobStatus.Completed) { bret = true; } break; } await Task.Delay(TimeSpan.FromSeconds(5)); } return(bret); }
public async Task <Response <JobProperties> > CreateImportExportJobAsync(JobProperties jobProperties, CancellationToken cancellationToken = default) { if (jobProperties == null) { throw new ArgumentNullException(nameof(jobProperties)); } using var message = CreateCreateImportExportJobRequest(jobProperties); await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); switch (message.Response.Status) { case 200: { JobProperties value = default; using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, cancellationToken).ConfigureAwait(false); if (document.RootElement.ValueKind == JsonValueKind.Null) { value = null; } else { value = JobProperties.DeserializeJobProperties(document.RootElement); } return(Response.FromValue(value, message.Response)); }
/// Get the list of devices registered to the IoT Hub /// and export it to a blob as deserialized objects. private async Task ExportDevices(string containerURI, string hubConnectionString) { try { Console.WriteLine("Creating and running registry manager job to retrieve the devices from the hub."); // Create an instance of the registry manager class. using RegistryManager registryManager = RegistryManager.CreateFromConnectionString(hubConnectionString); // Call an export job on the IoT Hub to retrieve all devices. // This writes them to devices.txt in the container. // The second parameter indicates whether to export the keys or not. JobProperties exportJob = await registryManager.ExportDevicesAsync(containerURI, false).ConfigureAwait(false); // Poll every 5 seconds to see if the job has finished executing. while (true) { exportJob = await registryManager.GetJobAsync(exportJob.JobId).ConfigureAwait(false); if (exportJob.Status == JobStatus.Completed || exportJob.Status == JobStatus.Failed || exportJob.Status == JobStatus.Cancelled) { // Job has finished executing break; } await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false); } } catch (Exception ex) { Debug.Print("Error exporting devices to blob storage. Exception message = {0}", ex.Message); } }
/// Get the list of devices registered to the IoT Hub /// and export it to a blob as deserialized objects. public static async Task ExportDevices(string containerURI, string hubConnectionString) { // Create an instance of the registry manager class. RegistryManager registryManager = RegistryManager.CreateFromConnectionString(hubConnectionString); // Call an export job on the IoT Hub to retrieve all devices. // This writes them to devices.txt in the container. // The second parameter indicates whether to export the keys or not. JobProperties exportJob = await registryManager.ExportDevicesAsync(containerURI, false); // Poll every 5 seconds to see if the job has finished executing. while (true) { exportJob = await registryManager.GetJobAsync(exportJob.JobId); if (exportJob.Status == JobStatus.Completed || exportJob.Status == JobStatus.Failed || exportJob.Status == JobStatus.Cancelled) { // Job has finished executing break; } await Task.Delay(TimeSpan.FromSeconds(5)); } // Note: could add twin data here if you want to export it. }
private async Task <JobProperties> CreateAndWaitForJobAsync( StorageAuthenticationType storageAuthenticationType, string devicesFileName, string configsFileName, RegistryManager registryManager, Uri containerUri, ManagedIdentity identity) { int tryCount = 0; JobProperties importJobResponse = null; JobProperties jobProperties = JobProperties.CreateForImportJob( containerUri.ToString(), containerUri.ToString(), devicesFileName, storageAuthenticationType, identity); jobProperties.ConfigurationsBlobName = configsFileName; jobProperties.IncludeConfigurations = true; while (tryCount < MaxIterationWait) { try { importJobResponse = await registryManager.ImportDevicesAsync(jobProperties).ConfigureAwait(false); if (!string.IsNullOrWhiteSpace(importJobResponse.FailureReason)) { Logger.Trace($"Job failed due to {importJobResponse.FailureReason}"); } break; } // Concurrent jobs can be rejected, so implement a retry mechanism to handle conflicts with other tests catch (JobQuotaExceededException) when(++tryCount < MaxIterationWait) { Logger.Trace($"JobQuotaExceededException... waiting."); await Task.Delay(s_waitDuration).ConfigureAwait(false); continue; } } // wait for job to complete for (int i = 0; i < MaxIterationWait; ++i) { await Task.Delay(1000).ConfigureAwait(false); importJobResponse = await registryManager.GetJobAsync(importJobResponse?.JobId).ConfigureAwait(false); Logger.Trace($"Job {importJobResponse.JobId} is {importJobResponse.Status} with progress {importJobResponse.Progress}%"); if (!s_incompleteJobs.Contains(importJobResponse.Status)) { break; } } return(importJobResponse); }
public JobProperties getDefaultJobProperties() { JobProperties properties = new JobProperties(); properties.topMargin = 0; properties.interspacing = "1/8"; return(properties); }
public void PrintDocumentToFile(String Filename, String Type, String Template, Int64 documentId, String connectionstring) { datacollection = new DataSet(); port = new FilePort(Filename); printer = PrinterFactory.getPrinter(Type); port.SetDocumentName("CDS DOC " + documentId); job = printer.getDefaultJobProperties(); PrintDocumentFromTemplate(Template, documentId, connectionstring); printer.endJob(); }
public JobWindow(JobObjectHandle jobHandle) { InitializeComponent(); this.AddEscapeToClose(); this.SetTopMost(); _jobProps = new JobProperties(jobHandle); _jobProps.Dock = DockStyle.Fill; panelJob.Controls.Add(_jobProps); }
public async Task ImportDevicesAsync(string hubConnectionString, string blobContainerUri, string userDefinedManagedIdentityResourceId = null) { using RegistryManager destRegistryManager = RegistryManager.CreateFromConnectionString(hubConnectionString); // If StorageAuthenticationType is set to IdentityBased and userAssignedIdentity property is // not null, the jobs will use user defined managed identity. If the IoT hub is not // configured with the user defined managed identity specified in userAssignedIdentity, // the job will fail. // If StorageAuthenticationType is set to IdentityBased and userAssignedIdentity property is // null, the jobs will use system defined identity by default. If the IoT hub is configured with the // system defined managed identity, the job will succeed but will not use the user defined managed identity. // If the IoT hub is not configured with system defined managed identity, the job will fail. // If StorageAuthenticationType is set to IdentityBased and neither user defined nor system defined // managed identities are configured on the hub, the job will fail. JobProperties jobProperties = JobProperties.CreateForImportJob( inputBlobContainerUri: blobContainerUri, outputBlobContainerUri: blobContainerUri, storageAuthenticationType: StorageAuthenticationType.IdentityBased, identity: new ManagedIdentity { userAssignedIdentity = userDefinedManagedIdentityResourceId }); JobProperties jobResult = await destRegistryManager .ImportDevicesAsync(jobProperties); // Poll every 5 seconds to see if the job has finished executing. while (true) { jobResult = await destRegistryManager.GetJobAsync(jobResult.JobId); if (jobResult.Status == JobStatus.Completed) { break; } else if (jobResult.Status == JobStatus.Failed) { throw new Exception("Import job failed."); } else if (jobResult.Status == JobStatus.Cancelled) { throw new Exception("Import job was canceled."); } else { await Task.Delay(TimeSpan.FromSeconds(5)); } } }
public static async void RunAsync([TimerTrigger("0 */2 * * * *")] TimerInfo myTimer, ILogger log, ExecutionContext context) { log.LogInformation($"C# Timer trigger function executed at: {DateTime.Now}"); var config = new ConfigurationBuilder() .SetBasePath(context.FunctionAppDirectory) .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .Build(); var BlobConStr = config["blobConStr"]; var BlobContainer = config["blobContainer"]; var BlobURL = config["blobURL"]; var IotHubConStr = config["iotHubConStr"]; CloudStorageAccount storageAccount = CloudStorageAccount.Parse(BlobConStr); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(BlobContainer); string storedPolicyName = null; string sasContainerToken; if (storedPolicyName == null) { SharedAccessBlobPolicy adHocPolicy = new SharedAccessBlobPolicy() { SharedAccessExpiryTime = DateTime.UtcNow.AddHours(1), Permissions = SharedAccessBlobPermissions.Read | SharedAccessBlobPermissions.Write | SharedAccessBlobPermissions.List | SharedAccessBlobPermissions.Delete }; sasContainerToken = container.GetSharedAccessSignature(adHocPolicy, null); Console.WriteLine("SAS for blob container (ad hoc): {0}", sasContainerToken); } else { sasContainerToken = container.GetSharedAccessSignature(null, storedPolicyName); Console.WriteLine("SAS for blob container (stored access policy): {0}", sasContainerToken); } string containerSasUri = container.Uri + sasContainerToken; Console.WriteLine("Blob SAS URI: {0}", containerSasUri); RegistryManager registryManager = RegistryManager.CreateFromConnectionString(IotHubConStr); JobProperties exportJob = await registryManager.ExportDevicesAsync(containerSasUri, false); }
static async Task <string> BulkCreateListAsync(IList <string> deviceIds) { var serializedDevices = deviceIds .Select(id => new ExportImportDevice { Id = id, ImportMode = ImportMode.CreateOrUpdate }) .Select(JsonConvert.SerializeObject); CloudBlockBlob blob = await WriteDevicesToBlobAsync(serializedDevices); string containerUri = blob.Container.StorageUri.PrimaryUri.AbsoluteUri + GetSasToken(); JobProperties job = await registry.ImportDevicesAsync(containerUri, containerUri, blob.Name); return(job.JobId); }
/// <summary> /// Get job properties. /// </summary> /// <param name="updateJobParams">Job properties specified via PowerShell.</param> /// <param name="existingJobProperties">Exsting job properties.</param> /// <returns></returns> private JobProperties GetExistingJobPropertiesParams(PSJobParams updateJobParams, JobProperties existingJobProperties) { var newJobProperties = new JobProperties() { Action = this.GetExistingJobAction(updateJobParams.JobAction, existingJobProperties.Action), Recurrence = this.GetExistingJobRecurrence(updateJobParams.JobRecurrence, existingJobProperties.Recurrence), StartTime = updateJobParams.StartTime ?? existingJobProperties.StartTime, }; newJobProperties.Action.ErrorAction = this.GetExistingJobErrorAction(updateJobParams.JobErrorAction, existingJobProperties.Action.ErrorAction); newJobProperties.State = updateJobParams.JobState.GetValueOrDefaultEnum <JobState?>(defaultValue: null); return(newJobProperties); }
/// <summary> /// Get job properties. /// </summary> /// <param name="updateJobParams">Job properties specified via PowerShell.</param> /// <param name="existingJobProperties">Exsting job properties.</param> /// <returns></returns> private JobProperties GetExistingJobPropertiesParams(PSJobParams updateJobParams, JobProperties existingJobProperties) { var newJobProperties = new JobProperties() { Action = this.GetExistingJobAction(updateJobParams.JobAction, existingJobProperties.Action), Recurrence = this.GetExistingJobRecurrence(updateJobParams.JobRecurrence, existingJobProperties.Recurrence), StartTime = updateJobParams.StartTime ?? existingJobProperties.StartTime, }; newJobProperties.Action.ErrorAction = this.GetExistingJobErrorAction(updateJobParams.JobErrorAction, existingJobProperties.Action.ErrorAction); newJobProperties.State = updateJobParams.JobState.GetValueOrDefaultEnum<JobState?>(defaultValue: null); return newJobProperties; }
public void PrintDocument(String Printer, String Type, String Template, Int64 documentId, String connectionstring) { datacollection = new DataSet(); port = new WindowsPrinter(Printer); printer = PrinterFactory.getPrinter(Type); port.SetDocumentName("CDS DOC " + documentId); job = printer.getDefaultJobProperties(); job.draftQuality = true; job.pitch = 10; //job.paperSize = PaperSize.LETTER; PrintDocumentFromTemplate(Template, documentId, connectionstring); printer.endJob(); }
static async Task WaitForJobToComplete(RegistryManager registryManager, JobProperties job) { while (true) { job = await registryManager.GetJobAsync(job.JobId); Console.WriteLine("\t Job " + job.Status); if (job.Status == JobStatus.Completed || job.Status == JobStatus.Failed || job.Status == JobStatus.Cancelled) { break; } await Task.Delay(TimeSpan.FromSeconds(5)); } }
internal HttpMessage CreateCreateImportExportJobRequest(JobProperties jobProperties) { var message = _pipeline.CreateMessage(); var request = message.Request; request.Method = RequestMethod.Post; var uri = new RawRequestUriBuilder(); uri.Reset(endpoint); uri.AppendPath("/jobs/create", false); uri.AppendQuery("api-version", apiVersion, true); request.Uri = uri; request.Headers.Add("Content-Type", "application/json"); var content = new Utf8JsonRequestContent(); content.JsonWriter.WriteObjectValue(jobProperties); request.Content = content; return(message); }
/// <summary> /// Creates a job to export device registrations to the container. /// </summary> /// <param name="outputBlobContainerUri">URI containing SAS token to a blob container. This is used to output the results of the export job.</param> /// <param name="excludeKeys">If true, authorization keys are NOT included in export output.</param> /// <param name="options">The optional settings for this request.</param> /// <param name="cancellationToken">Task cancellation token.</param> /// <returns>JobProperties of the newly created job.</returns> public virtual Task <Response <JobProperties> > CreateExportDevicesJobAsync( Uri outputBlobContainerUri, bool excludeKeys, ExportJobRequestOptions options = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(outputBlobContainerUri, nameof(outputBlobContainerUri)); var jobProperties = new JobProperties { Type = JobPropertiesType.Export, OutputBlobContainerUri = outputBlobContainerUri.ToString(), ExcludeKeysInExport = excludeKeys, StorageAuthenticationType = options?.AuthenticationType, OutputBlobName = options?.OutputBlobName }; return(_jobsRestClient.CreateImportExportJobAsync(jobProperties, cancellationToken)); }
/// <summary> /// Creates a job to import device registrations into the IoT Hub. /// </summary> /// <param name="importBlobContainerUri">URI containing SAS token to a blob container that contains registry data to sync.</param> /// <param name="outputBlobContainerUri">URI containing SAS token to a blob container. This is used to output the status of the job.</param> /// <param name="options">The optional settings for this request.</param> /// <param name="cancellationToken">Task cancellation token.</param> /// <returns>JobProperties of the newly created job.</returns> public virtual Response <JobProperties> CreateImportDevicesJob( Uri importBlobContainerUri, Uri outputBlobContainerUri, ImportJobRequestOptions options = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(importBlobContainerUri, nameof(importBlobContainerUri)); Argument.AssertNotNull(outputBlobContainerUri, nameof(outputBlobContainerUri)); var jobProperties = new JobProperties { Type = JobPropertiesType.Import, InputBlobContainerUri = importBlobContainerUri.ToString(), OutputBlobContainerUri = outputBlobContainerUri.ToString(), StorageAuthenticationType = options?.AuthenticationType, InputBlobName = options?.InputBlobName, OutputBlobName = options?.OutputBlobName, }; return(_jobsRestClient.CreateImportExportJob(jobProperties, cancellationToken)); }
public void Test_InputOperations_ReferenceBlob() { BasicDelegatingHandler handler = new BasicDelegatingHandler(); using (var undoContext = UndoContext.Current) { undoContext.Start(); string resourceGroupName = TestUtilities.GenerateName("StreamAnalytics"); string resourceName = TestUtilities.GenerateName("MyStreamingJobSubmittedBySDK"); string serviceLocation = TestHelper.GetDefaultLocation(); var resourceClient = TestHelper.GetResourceClient(handler); var client = TestHelper.GetStreamAnalyticsManagementClient(handler); try { ResourceGroup resourceGroup = new ResourceGroup() { Location = serviceLocation }; resourceClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup); Job job = new Job(); job.Name = resourceName; job.Location = serviceLocation; // Construct the general properties for JobProperties JobProperties jobProperties = new JobProperties(); jobProperties.Sku = new Sku() { Name = "standard" }; jobProperties.EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Drop; jobProperties.EventsOutOfOrderMaxDelayInSeconds = 0; job.Properties = jobProperties; // Construct the JobCreateProperties JobCreateOrUpdateParameters jobCreateOrUpdateParameters = new JobCreateOrUpdateParameters(); jobCreateOrUpdateParameters.Job = job; // Create a streaming job JobCreateOrUpdateResponse jobCreateOrUpdateResponse = client.StreamingJobs.CreateOrUpdate(resourceGroupName, jobCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, jobCreateOrUpdateResponse.StatusCode); // Get a streaming job to check JobGetParameters jobGetParameters = new JobGetParameters(string.Empty); JobGetResponse jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(serviceLocation, jobGetResponse.Job.Location); Assert.Equal(resourceName, jobGetResponse.Job.Name); // Construct the Input StorageAccount storageAccount = new StorageAccount(); storageAccount.AccountName = TestHelper.AccountName; storageAccount.AccountKey = TestHelper.AccountKey; InputProperties inputProperties = new ReferenceInputProperties() { Serialization = new CsvSerialization() { Properties = new CsvSerializationProperties() { FieldDelimiter = ",", Encoding = "UTF8" } }, DataSource = new BlobReferenceInputDataSource() { Properties = new BlobReferenceInputDataSourceProperties() { StorageAccounts = new[] { storageAccount }, Container = "state", PathPattern = "{date}", DateFormat = "yyyy/MM/dd" } } }; string inputName = TestUtilities.GenerateName("inputtest"); Input input1 = new Input(inputName) { Properties = inputProperties }; // Add an input InputCreateOrUpdateParameters inputCreateOrUpdateParameters = new InputCreateOrUpdateParameters(); inputCreateOrUpdateParameters.Input = input1; InputCreateOrUpdateResponse inputCreateOrUpdateResponse = client.Inputs.CreateOrUpdate(resourceGroupName, resourceName, inputCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, inputCreateOrUpdateResponse.StatusCode); Assert.Equal(inputName, inputCreateOrUpdateResponse.Input.Name); Assert.Equal("Reference", inputCreateOrUpdateResponse.Input.Properties.Type); Assert.True(inputCreateOrUpdateResponse.Input.Properties is ReferenceInputProperties); ReferenceInputProperties referenceInputPropertiesInResponse1 = (ReferenceInputProperties)inputCreateOrUpdateResponse.Input.Properties; Assert.True(referenceInputPropertiesInResponse1.DataSource is BlobReferenceInputDataSource); BlobReferenceInputDataSource blobReferenceInputDataSourceInResponse1 = (BlobReferenceInputDataSource)referenceInputPropertiesInResponse1.DataSource; Assert.Equal("{date}", blobReferenceInputDataSourceInResponse1.Properties.PathPattern); Assert.Equal("yyyy/MM/dd", blobReferenceInputDataSourceInResponse1.Properties.DateFormat); Assert.NotNull(inputCreateOrUpdateResponse.Input.Properties.Etag); // Get the input InputGetResponse inputGetResponse = client.Inputs.Get(resourceGroupName, resourceName, inputName); Assert.Equal(HttpStatusCode.OK, inputGetResponse.StatusCode); Assert.Equal(inputName, inputGetResponse.Input.Name); Assert.True(inputGetResponse.Input.Properties is ReferenceInputProperties); ReferenceInputProperties referenceInputPropertiesInResponse2 = (ReferenceInputProperties)inputGetResponse.Input.Properties; Assert.True(referenceInputPropertiesInResponse2.DataSource is BlobReferenceInputDataSource); BlobReferenceInputDataSource blobReferenceInputDataSourceInResponse2 = (BlobReferenceInputDataSource)referenceInputPropertiesInResponse2.DataSource; Assert.Equal("{date}", blobReferenceInputDataSourceInResponse2.Properties.PathPattern); Assert.Equal("yyyy/MM/dd", blobReferenceInputDataSourceInResponse2.Properties.DateFormat); Assert.Equal(inputCreateOrUpdateResponse.Input.Properties.Etag, inputGetResponse.Input.Properties.Etag); // List inputs InputListResponse inputListResponse = client.Inputs.ListInputInJob(resourceGroupName, resourceName, new InputListParameters()); Assert.Equal(HttpStatusCode.OK, inputListResponse.StatusCode); Assert.Equal(1, inputListResponse.Value.Count); // Test input connectivity DataSourceTestConnectionResponse response = client.Inputs.TestConnection(resourceGroupName, resourceName, inputName); Assert.Equal(OperationStatus.Succeeded, response.Status); Assert.Equal(DataSourceTestStatus.TestSucceeded, response.DataSourceTestStatus); // Update the input BlobReferenceInputDataSource blobReferenceInputDataSource = new BlobReferenceInputDataSource() { Properties = new BlobReferenceInputDataSourceProperties() { StorageAccounts = new[] { storageAccount }, Container = "state", PathPattern = "test.csv", DateFormat = "yyyy/MM/dd" } }; ((ReferenceInputProperties)inputProperties).DataSource = blobReferenceInputDataSource; inputProperties.Etag = inputCreateOrUpdateResponse.Input.Properties.Etag; InputPatchParameters inputPatchParameters = new InputPatchParameters(inputProperties); InputPatchResponse inputPatchResponse = client.Inputs.Patch(resourceGroupName, resourceName, inputName, inputPatchParameters); Assert.Equal(HttpStatusCode.OK, inputPatchResponse.StatusCode); Assert.True(inputPatchResponse.Properties is ReferenceInputProperties); ReferenceInputProperties referenceInputPropertiesInResponse3 = (ReferenceInputProperties)inputPatchResponse.Properties; Assert.True(referenceInputPropertiesInResponse3.DataSource is BlobReferenceInputDataSource); BlobReferenceInputDataSource blobReferenceInputDataSourceInResponse3 = (BlobReferenceInputDataSource)referenceInputPropertiesInResponse3.DataSource; Assert.Equal("test.csv", blobReferenceInputDataSourceInResponse3.Properties.PathPattern); Assert.Equal("yyyy/MM/dd", blobReferenceInputDataSourceInResponse3.Properties.DateFormat); Assert.NotNull(inputPatchResponse.Properties.Etag); Assert.NotEqual(inputCreateOrUpdateResponse.Input.Properties.Etag, inputPatchResponse.Properties.Etag); // Delete the inputs AzureOperationResponse deleteInputOperationResponse = client.Inputs.Delete(resourceGroupName, resourceName, inputName); Assert.Equal(HttpStatusCode.OK, deleteInputOperationResponse.StatusCode); // Check that there are 0 inputs in the job jobGetParameters = new JobGetParameters("inputs"); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(0, jobGetResponse.Job.Properties.Inputs.Count); } finally { client.StreamingJobs.Delete(resourceGroupName, resourceName); resourceClient.ResourceGroups.Delete(resourceGroupName); } } }
// This shows how to copy devices from one IoT Hub to another. // First, export the list from the Source hut to devices.txt (ExportDevices). // Next, read in that file. Each row is a serialized object; // read them into the generic list serializedDevices. // Delete the devices.txt in blob storage, because you're going to recreate it. // For each serializedDevice, deserialize it, set ImportMode to CREATE, // reserialize it, and write it to a StringBuilder. The ImportMode field is what // tells the job framework to add each device. // Write the new StringBuilder to the block blob. // This essentially replaces the list with a list of devices that have ImportJob = Delete. // Call ImportDevicesAsync, which will read in the list in devices.txt, then add each one // because it doesn't already exist. If it already exists, it will write an entry to // the import error log and not add the new one. private async Task CopyAllDevicesToNewHub(string sourceHubConnectionString, string destHubConnectionString, string containerUri, string deviceListFile) { Console.WriteLine("Exporting devices on current hub"); // Read the devices from the hub and write them to devices.txt in blob storage. await ExportDevices(containerUri, sourceHubConnectionString).ConfigureAwait(false); // Read devices.txt which contains serialized objects. // Write each line to the serializedDevices list. (List<string>). CloudBlockBlob blockBlob = _cloudBlobContainer.GetBlockBlobReference(deviceListFile); // Get the URI for the blob. string blobUri = blockBlob.Uri.ToString(); // Instantiate the generic list. var serializedDevices = new List <string>(); Console.WriteLine("Read in list of devices from blob storage."); // Read the blob file of devices, import each row into serializedDevices. using Stream blobStream = await blockBlob.OpenReadAsync(AccessCondition.GenerateIfExistsCondition(), null, null).ConfigureAwait(false); using var streamReader = new StreamReader(blobStream, Encoding.UTF8); while (streamReader.Peek() != -1) { string line = await streamReader.ReadLineAsync().ConfigureAwait(false); serializedDevices.Add(line); } // Delete the blob containing the list of devices, because you're going to recreate it. CloudBlockBlob blobToDelete = _cloudBlobContainer.GetBlockBlobReference("devices.txt"); Console.WriteLine("Update ImportMode to be Create."); // Step 1: Update each device's ImportMode to be Create var sb = new StringBuilder(); serializedDevices.ForEach(serializedDevice => { // Deserialize back to an ExportImportDevice. var device = JsonConvert.DeserializeObject <ExportImportDevice>(serializedDevice); // Update the property. device.ImportMode = ImportMode.Create; // Re-serialize the object now that you're updated the property. sb.AppendLine(JsonConvert.SerializeObject(device)); }); // Step 2: Delete the blob if it already exists, then write the list in memory to the blob. await blobToDelete.DeleteIfExistsAsync().ConfigureAwait(false); using CloudBlobStream stream = await blobToDelete.OpenWriteAsync().ConfigureAwait(false); byte[] bytes = Encoding.UTF8.GetBytes(sb.ToString()); for (var i = 0; i < bytes.Length; i += 500) { int length = Math.Min(bytes.Length - i, 500); await stream.WriteAsync(bytes, i, length).ConfigureAwait(false); } Console.WriteLine("Creating and running registry manager job to import the entries from the text file to the new hub"); // Step 3: Call import using the same blob to create all devices. // Loads devices.txt and adds the devices to the destination hub. using RegistryManager registryManager = RegistryManager.CreateFromConnectionString(destHubConnectionString); JobProperties importJob = await registryManager.ImportDevicesAsync(containerUri, containerUri).ConfigureAwait(false); // Wait until job is finished while (true) { importJob = await registryManager.GetJobAsync(importJob.JobId).ConfigureAwait(false); Console.WriteLine($"Import job status is {importJob.Status}"); if (importJob.Status == JobStatus.Completed || importJob.Status == JobStatus.Failed || importJob.Status == JobStatus.Cancelled) { // Job has finished executing break; } await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false); } }
public void Test_OutputOperations_E2E() { BasicDelegatingHandler handler = new BasicDelegatingHandler(); using (var undoContext = UndoContext.Current) { undoContext.Start(); string resourceGroupName = TestUtilities.GenerateName("StreamAnalytics"); string resourceName = TestUtilities.GenerateName("MyStreamingJobSubmittedBySDK"); string serviceLocation = TestHelper.GetDefaultLocation(); var resourceClient = TestHelper.GetResourceClient(handler); var client = TestHelper.GetStreamAnalyticsManagementClient(handler); try { ResourceGroup resourceGroup = new ResourceGroup() { Location = serviceLocation }; resourceClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup); Job job = new Job(); job.Name = resourceName; job.Location = serviceLocation; // Construct the general properties for JobProperties JobProperties jobProperties = new JobProperties(); jobProperties.Sku = new Sku() { Name = "standard" }; jobProperties.EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Drop; jobProperties.EventsOutOfOrderMaxDelayInSeconds = 0; job.Properties = jobProperties; // Construct the JobCreateProperties JobCreateOrUpdateParameters jobCreateOrUpdateParameters = new JobCreateOrUpdateParameters(); jobCreateOrUpdateParameters.Job = job; // Create a streaming job JobCreateOrUpdateResponse jobCreateOrUpdateResponse = client.StreamingJobs.CreateOrUpdate(resourceGroupName, jobCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, jobCreateOrUpdateResponse.StatusCode); // Get a streaming job to check JobGetParameters jobGetParameters = new JobGetParameters(string.Empty); JobGetResponse jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(serviceLocation, jobGetResponse.Job.Location); Assert.Equal(resourceName, jobGetResponse.Job.Name); // Construct the Output OutputProperties outputProperties = new OutputProperties(); string outputName = TestUtilities.GenerateName("outputtest"); string tableName = "StateInfo"; SqlAzureOutputDataSource sqlAzureOutputDataSource = new SqlAzureOutputDataSource() { Properties = new SqlAzureOutputDataSourceProperties() { Server = TestHelper.Server, Database = TestHelper.Database, User = TestHelper.User, Password = TestHelper.Password, Table = tableName } }; outputProperties.DataSource = sqlAzureOutputDataSource; Output output1 = new Output(outputName) { Properties = outputProperties }; // Add an output OutputCreateOrUpdateParameters outputCreateOrUpdateParameters = new OutputCreateOrUpdateParameters(); outputCreateOrUpdateParameters.Output = output1; OutputCreateOrUpdateResponse outputCreateOrUpdateResponse = client.Outputs.CreateOrUpdate(resourceGroupName, resourceName, outputCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, outputCreateOrUpdateResponse.StatusCode); Assert.Equal(outputName, outputCreateOrUpdateResponse.Output.Name); Assert.True(outputCreateOrUpdateResponse.Output.Properties.DataSource is SqlAzureOutputDataSource); SqlAzureOutputDataSource sqlAzureOutputDataSourceInResponse1 = (SqlAzureOutputDataSource)outputCreateOrUpdateResponse.Output.Properties.DataSource; Assert.Equal(tableName, sqlAzureOutputDataSourceInResponse1.Properties.Table); Assert.NotNull(outputCreateOrUpdateResponse.Output.Properties.Etag); // Get the output OutputGetResponse outputGetResponse = client.Outputs.Get(resourceGroupName, resourceName, outputName); Assert.Equal(HttpStatusCode.OK, outputGetResponse.StatusCode); Assert.Equal(outputName, outputGetResponse.Output.Name); Assert.True(outputGetResponse.Output.Properties.DataSource is SqlAzureOutputDataSource); SqlAzureOutputDataSource sqlAzureOutputDataSourceInResponse2 = (SqlAzureOutputDataSource)outputGetResponse.Output.Properties.DataSource; Assert.Equal(tableName, sqlAzureOutputDataSourceInResponse2.Properties.Table); // List outputs OutputListResponse outputListResponse = client.Outputs.ListOutputInJob(resourceGroupName, resourceName, new OutputListParameters()); Assert.Equal(HttpStatusCode.OK, outputListResponse.StatusCode); Assert.Equal(1, outputListResponse.Value.Count); // Check that there is 1 output in the job jobGetParameters = new JobGetParameters("outputs"); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(1, jobGetResponse.Job.Properties.Outputs.Count); // Test output connectivity DataSourceTestConnectionResponse response = client.Outputs.TestConnection(resourceGroupName, resourceName, outputName); Assert.Equal(OperationStatus.Succeeded, response.Status); Assert.Equal(DataSourceTestStatus.TestSucceeded, response.DataSourceTestStatus); // Update the output string newTableName = TestUtilities.GenerateName("NewTableName"); sqlAzureOutputDataSource.Properties.Table = newTableName; outputProperties.DataSource = sqlAzureOutputDataSource; outputProperties.Etag = outputCreateOrUpdateResponse.Output.Properties.Etag; OutputPatchParameters outputPatchParameters = new OutputPatchParameters(outputProperties); OutputPatchResponse outputPatchResponse = client.Outputs.Patch(resourceGroupName, resourceName, outputName, outputPatchParameters); Assert.Equal(HttpStatusCode.OK, outputPatchResponse.StatusCode); Assert.True(outputPatchResponse.Properties.DataSource is SqlAzureOutputDataSource); SqlAzureOutputDataSource sqlAzureOutputDataSourceInResponse3 = (SqlAzureOutputDataSource)outputPatchResponse.Properties.DataSource; Assert.Equal(newTableName, sqlAzureOutputDataSourceInResponse3.Properties.Table); Assert.NotNull(outputPatchResponse.Properties.Etag); Assert.NotEqual(outputCreateOrUpdateResponse.Output.Properties.Etag, outputPatchResponse.Properties.Etag); // Add second output string outputName2 = TestUtilities.GenerateName("outputtest"); Output output2 = new Output(outputName2) { Properties = outputProperties }; outputCreateOrUpdateParameters.Output = output2; outputCreateOrUpdateResponse = client.Outputs.CreateOrUpdate(resourceGroupName, resourceName, outputCreateOrUpdateParameters); // List outputs outputListResponse = client.Outputs.ListOutputInJob(resourceGroupName, resourceName, new OutputListParameters()); Assert.Equal(HttpStatusCode.OK, outputListResponse.StatusCode); Assert.Equal(2, outputListResponse.Value.Count); // Check that there are 2 outputs in the job jobGetParameters = new JobGetParameters("outputs"); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(2, jobGetResponse.Job.Properties.Outputs.Count); // Delete the outputs AzureOperationResponse deleteInputOperationResponse = client.Outputs.Delete(resourceGroupName, resourceName, outputName); Assert.Equal(HttpStatusCode.OK, deleteInputOperationResponse.StatusCode); deleteInputOperationResponse = client.Outputs.Delete(resourceGroupName, resourceName, outputName2); Assert.Equal(HttpStatusCode.OK, deleteInputOperationResponse.StatusCode); // Check that there are 0 outputs in the job jobGetParameters = new JobGetParameters("outputs"); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(0, jobGetResponse.Job.Properties.Outputs.Count); } finally { client.StreamingJobs.Delete(resourceGroupName, resourceName); resourceClient.ResourceGroups.Delete(resourceGroupName); } } }
/// <summary>c /// Generate NumToAdd devices and add them to the hub. /// To do this, generate each identity. /// Include authentication keys. /// Write the device info to a block blob. /// Import the devices into the identity registry by calling the import job. /// </summary> private async Task GenerateAndAddDevices(string hubConnectionString, string containerURI, int NumToAdd, string devicesToAdd) { int interimProgressCount = 0; int displayProgressCount = 1000; int totalProgressCount = 0; //generate reference for list of new devices you're going to add, will write list to this blob CloudBlockBlob generatedListBlob = _cloudBlobContainer.GetBlockBlobReference(devicesToAdd); // define serializedDevices as a generic list<string> List <string> serializedDevices = new List <string>(); for (var i = 1; i <= NumToAdd; i++) { // Create device name with this format: Hub_00000000 + a new guid. // This should be large enough to display the largest number (1 million). //string deviceName = "Hub_" + i.ToString("D8") + "-" + Guid.NewGuid().ToString(); string deviceName = $"Hub_{i.ToString("D8")}-{Guid.NewGuid().ToString()}"; Debug.Print($"device = '{deviceName}'\n"); // Create a new ExportImportDevice. // CryptoKeyGenerator is in the Microsoft.Azure.Devices.Common namespace. var deviceToAdd = new ExportImportDevice() { Id = deviceName, Status = DeviceStatus.Enabled, Authentication = new AuthenticationMechanism { SymmetricKey = new SymmetricKey { PrimaryKey = CryptoKeyGenerator.GenerateKey(32), SecondaryKey = CryptoKeyGenerator.GenerateKey(32) } }, // This indicates that the entry should be added as a new device. ImportMode = ImportMode.Create }; // Add device to the list as a serialized object. serializedDevices.Add(JsonConvert.SerializeObject(deviceToAdd)); // Not real progress as you write the new devices, but will at least show *some* progress. interimProgressCount++; totalProgressCount++; if (interimProgressCount >= displayProgressCount) { Console.WriteLine("Added {0} messages.", totalProgressCount); interimProgressCount = 0; } } // Now have a list of devices to be added, each one has been serialized. // Write the list to the blob. var sb = new StringBuilder(); serializedDevices.ForEach(serializedDevice => sb.AppendLine(serializedDevice)); // Before writing the new file, make sure there's not already one there. await generatedListBlob.DeleteIfExistsAsync().ConfigureAwait(false); // Write list of serialized objects to the blob. using (CloudBlobStream stream = await generatedListBlob.OpenWriteAsync().ConfigureAwait(false)) { byte[] bytes = Encoding.UTF8.GetBytes(sb.ToString()); for (var i = 0; i < bytes.Length; i += 500) { int length = Math.Min(bytes.Length - i, 500); await stream.WriteAsync(bytes, i, length).ConfigureAwait(false); } } Console.WriteLine("Creating and running registry manager job to write the new devices."); // Should now have a file with all the new devices in it as serialized objects in blob storage. // generatedListBlob has the list of devices to be added as serialized objects. // Call import using the blob to add the new devices. // Log information related to the job is written to the same container. // This normally takes 1 minute per 100 devices (according to the docs). // First, initiate an import job. // This reads in the rows from the text file and writes them to IoT Devices. // If you want to add devices from a file, you can create a file and use this to import it. // They have to be in the exact right format. JobProperties importJob = new JobProperties(); RegistryManager registryManager = RegistryManager.CreateFromConnectionString(hubConnectionString); try { // First URL is the container to import from; the file must be called devices.txt // Second URL points to the container to write errors to as a block blob. // This lets you import the devices from any file name. Since we wrote the new // devices to [devicesToAdd], need to read the list from there as well. importJob = await registryManager.ImportDevicesAsync(containerURI, containerURI, devicesToAdd).ConfigureAwait(false); // This will catch any errors if something bad happens to interrupt the job. while (true) { importJob = await registryManager.GetJobAsync(importJob.JobId).ConfigureAwait(false); if (importJob.Status == JobStatus.Completed || importJob.Status == JobStatus.Failed || importJob.Status == JobStatus.Cancelled) { // Job has finished executing break; } await Task.Delay(TimeSpan.FromSeconds(5)).ConfigureAwait(false); } } catch (Exception ex) { Debug.Print("exception message {0}", ex.Message); } }
public void Test_TransformationOperations_E2E() { BasicDelegatingHandler handler = new BasicDelegatingHandler(); using (var undoContext = UndoContext.Current) { undoContext.Start(); string resourceGroupName = TestUtilities.GenerateName("StreamAnalytics"); string resourceName = TestUtilities.GenerateName("MyStreamingJobSubmittedBySDK"); string serviceLocation = TestHelper.GetDefaultLocation(); var resourceClient = TestHelper.GetResourceClient(handler); var client = TestHelper.GetStreamAnalyticsManagementClient(handler); try { ResourceGroup resourceGroup = new ResourceGroup() { Location = serviceLocation }; resourceClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup); Job job = new Job(); job.Name = resourceName; job.Location = serviceLocation; // Construct the general properties for JobProperties JobProperties jobProperties = new JobProperties(); jobProperties.Sku = new Sku() { Name = "standard" }; jobProperties.EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Drop; jobProperties.EventsOutOfOrderMaxDelayInSeconds = 0; job.Properties = jobProperties; // Construct the JobCreateProperties JobCreateOrUpdateParameters jobCreateOrUpdateParameters = new JobCreateOrUpdateParameters(); jobCreateOrUpdateParameters.Job = job; // Create a streaming job JobCreateOrUpdateResponse jobCreateOrUpdateResponse = client.StreamingJobs.CreateOrUpdate(resourceGroupName, jobCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, jobCreateOrUpdateResponse.StatusCode); // Get a streaming job to check JobGetParameters jobGetParameters = new JobGetParameters(string.Empty); JobGetResponse jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(serviceLocation, jobGetResponse.Job.Location); Assert.Equal(resourceName, jobGetResponse.Job.Name); // Construct the Transformation string transformationName = TestUtilities.GenerateName("transformationtest"); int numberOfStreamingUnits = 1; Transformation transformation = new Transformation() { Name = transformationName, Properties = new TransformationProperties() { Query = "Select Id, Name from inputtest", StreamingUnits = numberOfStreamingUnits } }; // Add an Transformation TransformationCreateOrUpdateParameters transformationCreateOrUpdateParameters = new TransformationCreateOrUpdateParameters(); transformationCreateOrUpdateParameters.Transformation = transformation; TransformationCreateOrUpdateResponse transformationCreateOrUpdateResponse = client.Transformations.CreateOrUpdate(resourceGroupName, resourceName, transformationCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, transformationCreateOrUpdateResponse.StatusCode); Assert.Equal(numberOfStreamingUnits, transformationCreateOrUpdateResponse.Transformation.Properties.StreamingUnits); Assert.NotNull(transformationCreateOrUpdateResponse.Transformation.Properties.Etag); // Update the Transformation transformation.Properties.StreamingUnits = 3; transformation.Properties.Etag = transformationCreateOrUpdateResponse.Transformation.Properties.Etag; TransformationPatchParameters transformationPatchParameters = new TransformationPatchParameters(transformation.Properties); TransformationPatchResponse transformationPatchResponse = client.Transformations.Patch(resourceGroupName, resourceName, transformationName, transformationPatchParameters); Assert.Equal(HttpStatusCode.OK, transformationPatchResponse.StatusCode); Assert.Equal(3, transformationPatchResponse.Properties.StreamingUnits); Assert.NotNull(transformationPatchResponse.Properties.Etag); Assert.NotEqual(transformationCreateOrUpdateResponse.Transformation.Properties.Etag, transformationPatchResponse.Properties.Etag); } finally { client.StreamingJobs.Delete(resourceGroupName, resourceName); resourceClient.ResourceGroups.Delete(resourceGroupName); } } }
public async Task RegistryManager_ImportDevices(StorageAuthenticationType storageAuthenticationType, bool isUserAssignedMsi) { // arrange const string idPrefix = nameof(RegistryManager_ImportDevices); string deviceId = $"{idPrefix}-device-{StorageContainer.GetRandomSuffix(4)}"; string configId = $"{idPrefix}-config-{StorageContainer.GetRandomSuffix(4)}".ToLower(); // Configuration Id characters must be all lower-case. Logger.Trace($"Using Ids {deviceId} and {configId}."); string devicesFileName = $"{idPrefix}-devices-{StorageContainer.GetRandomSuffix(4)}.txt"; string configsFileName = $"{idPrefix}-configs-{StorageContainer.GetRandomSuffix(4)}.txt"; using RegistryManager registryManager = RegistryManager.CreateFromConnectionString(TestConfiguration.IoTHub.ConnectionString); try { string containerName = StorageContainer.BuildContainerName(nameof(RegistryManager_ImportDevices)); using StorageContainer storageContainer = await StorageContainer.GetInstanceAsync(containerName).ConfigureAwait(false); Logger.Trace($"Using devices container {storageContainer.Uri}"); Uri containerUri = storageAuthenticationType == StorageAuthenticationType.KeyBased ? storageContainer.SasUri : storageContainer.Uri; using Stream devicesStream = ImportExportHelpers.BuildImportStream( new List <ExportImportDevice> { new ExportImportDevice( new Device(deviceId) { Authentication = new AuthenticationMechanism { Type = AuthenticationType.Sas } }, ImportMode.Create), }); await UploadFileAndConfirmAsync(storageContainer, devicesStream, devicesFileName).ConfigureAwait(false); using Stream configsStream = ImportExportHelpers.BuildImportStream( new List <ImportConfiguration> { new ImportConfiguration(configId) { ImportMode = ConfigurationImportMode.CreateOrUpdateIfMatchETag, Priority = 3, Labels = { { "labelName", "labelValue" } }, TargetCondition = "*", Content = { DeviceContent = { { "properties.desired.x", 5L } }, }, Metrics = { Queries = { { "successfullyConfigured", "select deviceId from devices where properties.reported.x = 5" } } }, }, }); await UploadFileAndConfirmAsync(storageContainer, configsStream, configsFileName).ConfigureAwait(false); ManagedIdentity identity = isUserAssignedMsi ? new ManagedIdentity { UserAssignedIdentity = TestConfiguration.IoTHub.UserAssignedMsiResourceId } : null; // act JobProperties importJobResponse = await CreateAndWaitForJobAsync( storageAuthenticationType, devicesFileName, configsFileName, registryManager, containerUri, identity) .ConfigureAwait(false); // assert importJobResponse.Status.Should().Be(JobStatus.Completed, "Otherwise import failed"); importJobResponse.FailureReason.Should().BeNullOrEmpty("Otherwise import failed"); // should not throw due to 404, but device may not immediately appear in registry Device device = null; Configuration config = null; for (int i = 0; i < MaxIterationWait; ++i) { await Task.Delay(s_waitDuration).ConfigureAwait(false); try { device = await registryManager.GetDeviceAsync(deviceId).ConfigureAwait(false); config = await registryManager.GetConfigurationAsync(configId).ConfigureAwait(false); break; } catch (Exception ex) { Logger.Trace($"Could not find device/config on iteration {i} due to [{ex.Message}]"); } } if (device == null) { Assert.Fail($"Device {deviceId} not found in registry manager"); } if (config == null) { Assert.Fail($"Config {configId} not found in registry manager"); } } finally { try { await registryManager.RemoveDeviceAsync(deviceId).ConfigureAwait(false); await registryManager.RemoveConfigurationAsync(configId).ConfigureAwait(false); } catch (Exception ex) { Logger.Trace($"Failed to clean up device/config due to {ex}"); } } }
public static Job CreateJob(IDatabaseSource src, IDatabaseSource dst, DataSynDef datasyn, string outFile, List <IJobReportConfiguration> reports, JobProperties jobProps, DataSynGuiEnv guienv) { //UsageStats.Usage("datasyn_job", "src", src.ToString(), "dst", dst.ToString()); return(Job.FromCommand(new DataSynJobCommand(src, dst, datasyn, outFile, reports, guienv), jobProps)); }
private async Task CleanupDevices() { Console.WriteLine($"Using storage container {_blobContainerClient.Name}" + $" for exporting devices identities to and importing device identities from."); // Retrieve the SAS Uri that will be used to grant access to the storage containers. string storageAccountSasUri = GetStorageAccountSasUriForCleanupJob(_blobContainerClient).ToString(); // Step 1: Export all device identities. JobProperties exportAllDevicesProperties = JobProperties .CreateForExportJob( outputBlobContainerUri: storageAccountSasUri, excludeKeysInExport: true, storageAuthenticationType: StorageAuthenticationType.KeyBased); JobProperties exportAllDevicesJob = null; int tryCount = 0; while (true) { try { exportAllDevicesJob = await _registryManager.ExportDevicesAsync(exportAllDevicesProperties); break; } // Wait for pending jobs to finish. catch (JobQuotaExceededException) when(++tryCount < MaxIterationWait) { Console.WriteLine($"JobQuotaExceededException... waiting."); await Task.Delay(WaitDuration); } } if (exportAllDevicesJob == null) { throw new Exception("Export devices job failed."); } // Wait until the export job is finished. while (true) { exportAllDevicesJob = await _registryManager.GetJobAsync(exportAllDevicesJob.JobId); if (s_completedJobs.Contains(exportAllDevicesJob.Status)) { // Job has finished executing. break; } Console.WriteLine($"Job {exportAllDevicesJob.JobId} is {exportAllDevicesJob.Status} with progress {exportAllDevicesJob.Progress}%"); await Task.Delay(s_waitDuration); } Console.WriteLine($"Job {exportAllDevicesJob.JobId} is {exportAllDevicesJob.Status}."); if (exportAllDevicesJob.Status != JobStatus.Completed) { throw new Exception("Exporting devices failed, exiting."); } // Step 2: Download the exported devices list from the blob create in Step 1. BlobClient blobClient = _blobContainerClient.GetBlobClient(ImportExportDevicesFileName); BlobDownloadInfo download = await blobClient.DownloadAsync(); IEnumerable <ExportImportDevice> exportedDevices = ImportExportDevicesHelpers.BuildExportImportDeviceFromStream(download.Content); // Step 3: Collect the devices that need to be deleted and update their ImportMode to be Delete. // Thie step will create an ExportImportDevice identity for each device/ module identity registered on hub. // If you hub instance has IoT Hub module or Edge module instances registered, then they will be counted as separate entities // from the corresponding IoT Hub device/ Edge device that they are associated with. // As a result, the count of ExportImportDevice identities to be deleted might be greater than the // count of IoT hub devices retrieved in PrintDeviceCountAsync(). var devicesToBeDeleted = new List <ExportImportDevice>(); foreach (var device in exportedDevices) { string deviceId = device.Id; foreach (string prefix in _deleteDevicesWithPrefix) { if (deviceId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)) { devicesToBeDeleted.Add(device); } } } devicesToBeDeleted .ForEach(device => device.ImportMode = ImportMode.Delete); Console.WriteLine($"Retrieved {devicesToBeDeleted.Count} devices for deletion."); if (devicesToBeDeleted.Count > 0) { // Step 3a: Write the new import data back to the blob. using Stream devicesFile = ImportExportDevicesHelpers.BuildDevicesStream(devicesToBeDeleted); await blobClient.UploadAsync(devicesFile, overwrite : true); // Step 3b: Call import using the same blob to delete all devices. JobProperties importDevicesToBeDeletedProperties = JobProperties .CreateForImportJob( inputBlobContainerUri: storageAccountSasUri, outputBlobContainerUri: storageAccountSasUri, storageAuthenticationType: StorageAuthenticationType.KeyBased); JobProperties importDevicesToBeDeletedJob = null; tryCount = 0; while (true) { try { importDevicesToBeDeletedJob = await _registryManager.ImportDevicesAsync(importDevicesToBeDeletedProperties); break; } // Wait for pending jobs to finish. catch (JobQuotaExceededException) when(++tryCount < MaxIterationWait) { Console.WriteLine($"JobQuotaExceededException... waiting."); await Task.Delay(WaitDuration); } } if (importDevicesToBeDeletedJob == null) { throw new Exception("Import devices job failed."); } // Wait until job is finished. while (true) { importDevicesToBeDeletedJob = await _registryManager.GetJobAsync(importDevicesToBeDeletedJob.JobId); if (s_completedJobs.Contains(importDevicesToBeDeletedJob.Status)) { // Job has finished executing. break; } Console.WriteLine($"Job {importDevicesToBeDeletedJob.JobId} is {importDevicesToBeDeletedJob.Status} with progress {importDevicesToBeDeletedJob.Progress}%"); await Task.Delay(s_waitDuration); } Console.WriteLine($"Job {importDevicesToBeDeletedJob.JobId} is {importDevicesToBeDeletedJob.Status}."); } // Step 4: Delete the storage container created. await _blobContainerClient.DeleteAsync(); Console.WriteLine($"Storage container {_blobContainerClient.Name} deleted."); }
// This shows how to delete all of the devices for the IoT Hub. // First, export the list to devices.txt (ExportDevices). // Next, read in that file. Each row is a serialized object; // read them into the generic list serializedDevices. // Delete the devices.txt in blob storage, because you're going to recreate it. // For each serializedDevice, deserialize it, set ImportMode to Delete, // reserialize it, and write it to a StringBuilder. The ImportMode field is what // tells the job framework to delete each one. // Write the new StringBuilder to the block blob. // This essentially replaces the list with a list of devices that have ImportJob = Delete. // Call ImportDevicesAsync, which will read in the list in devices.txt, then delete each one. public static async Task DeleteAllDevicesFromHub(string hubConnectionString, CloudBlobContainer cloudBlobContainer, string containerURI, string deviceListFile) { // Read the devices from the hub and write them to devices.txt in blob storage. await ExportDevices(containerURI, hubConnectionString); // Read devices.txt which contains serialized objects. // Write each line to the serializedDevices list. (List<string>). CloudBlockBlob blockBlob = cloudBlobContainer.GetBlockBlobReference(deviceListFile); // Get the URI for the blob. string blobURI = blockBlob.Uri.ToString(); // Instantiate the generic list. var serializedDevices = new List <string>(); // Read the blob file of devices, import each row into serializedDevices. using (var streamReader = new StreamReader(await blockBlob.OpenReadAsync(AccessCondition.GenerateIfExistsCondition(), null, null), Encoding.UTF8)) { while (streamReader.Peek() != -1) { string line = await streamReader.ReadLineAsync(); serializedDevices.Add(line); } } // Delete the blob containing the list of devices, // because you're going to recreate it. CloudBlockBlob blobToDelete = cloudBlobContainer.GetBlockBlobReference("devices.txt"); // Step 1: Update each device's ImportMode to be Delete StringBuilder sb = new StringBuilder(); serializedDevices.ForEach(serializedDevice => { // Deserialize back to an ExportImportDevice. var device = JsonConvert.DeserializeObject <ExportImportDevice>(serializedDevice); // Update the property. device.ImportMode = ImportMode.Delete; // Re-serialize the object now that you're updated the property. sb.AppendLine(JsonConvert.SerializeObject(device)); }); // Step 2: Delete the blob if it already exists, then write the list in memory to the blob. await blobToDelete.DeleteIfExistsAsync(); using (CloudBlobStream stream = await blobToDelete.OpenWriteAsync()) { byte[] bytes = Encoding.UTF8.GetBytes(sb.ToString()); for (var i = 0; i < bytes.Length; i += 500) { int length = Math.Min(bytes.Length - i, 500); await stream.WriteAsync(bytes, i, length); } } // Step 3: Call import using the same blob to delete all devices. // Loads devices.txt and applies that change. RegistryManager registryManager = RegistryManager.CreateFromConnectionString(hubConnectionString); JobProperties importJob = await registryManager.ImportDevicesAsync(containerURI, containerURI); // Wait until job is finished while (true) { importJob = await registryManager.GetJobAsync(importJob.JobId); if (importJob.Status == JobStatus.Completed || importJob.Status == JobStatus.Failed || importJob.Status == JobStatus.Cancelled) { // Job has finished executing break; } await Task.Delay(TimeSpan.FromSeconds(5)); } }
public void Test_OutputOperations_EventHub() { BasicDelegatingHandler handler = new BasicDelegatingHandler(); using (var undoContext = UndoContext.Current) { undoContext.Start(); string resourceGroupName = TestUtilities.GenerateName("StreamAnalytics"); string resourceName = TestUtilities.GenerateName("MyStreamingJobSubmittedBySDK"); string serviceLocation = TestHelper.GetDefaultLocation(); var resourceClient = TestHelper.GetResourceClient(handler); var client = TestHelper.GetStreamAnalyticsManagementClient(handler); try { ResourceGroup resourceGroup = new ResourceGroup() { Location = serviceLocation }; resourceClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup); Job job = new Job(); job.Name = resourceName; job.Location = serviceLocation; // Construct the general properties for JobProperties JobProperties jobProperties = new JobProperties(); jobProperties.Sku = new Sku() { Name = "standard" }; jobProperties.EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Drop; jobProperties.EventsOutOfOrderMaxDelayInSeconds = 0; job.Properties = jobProperties; // Construct the JobCreateProperties JobCreateOrUpdateParameters jobCreateOrUpdateParameters = new JobCreateOrUpdateParameters(); jobCreateOrUpdateParameters.Job = job; // Create a streaming job JobCreateOrUpdateResponse jobCreateOrUpdateResponse = client.StreamingJobs.CreateOrUpdate(resourceGroupName, jobCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, jobCreateOrUpdateResponse.StatusCode); // Get a streaming job to check JobGetParameters jobGetParameters = new JobGetParameters(string.Empty); JobGetResponse jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(serviceLocation, jobGetResponse.Job.Location); Assert.Equal(resourceName, jobGetResponse.Job.Name); // Construct the Output OutputProperties outputProperties = new OutputProperties(); string outputName = TestUtilities.GenerateName("outputtest"); string partitionKey = "partitionKey"; EventHubOutputDataSource eventHubOutputDataSource = new EventHubOutputDataSource() { Properties = new EventHubOutputDataSourceProperties() { ServiceBusNamespace = "sdktest", EventHubName = "sdkeventhub", SharedAccessPolicyName = TestHelper.SharedAccessPolicyName, SharedAccessPolicyKey = TestHelper.SharedAccessPolicyKey, PartitionKey = partitionKey } }; JsonSerialization jsonSerialization = new JsonSerialization() { Properties = new JsonSerializationProperties() { Encoding = "UTF8", Format = Format.LineSeparated } }; outputProperties.DataSource = eventHubOutputDataSource; outputProperties.Serialization = jsonSerialization; Output output1 = new Output(outputName) { Properties = outputProperties }; // Add an output OutputCreateOrUpdateParameters outputCreateOrUpdateParameters = new OutputCreateOrUpdateParameters(); outputCreateOrUpdateParameters.Output = output1; OutputCreateOrUpdateResponse outputCreateOrUpdateResponse = client.Outputs.CreateOrUpdate(resourceGroupName, resourceName, outputCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, outputCreateOrUpdateResponse.StatusCode); Assert.Equal(outputName, outputCreateOrUpdateResponse.Output.Name); Assert.True(outputCreateOrUpdateResponse.Output.Properties.Serialization is JsonSerialization); JsonSerialization jsonSerializationInResponse1 = (JsonSerialization)outputCreateOrUpdateResponse.Output.Properties.Serialization; Assert.Equal(Format.LineSeparated, jsonSerializationInResponse1.Properties.Format); Assert.True(outputCreateOrUpdateResponse.Output.Properties.DataSource is EventHubOutputDataSource); EventHubOutputDataSource eventHubOutputDataSourceInResponse1 = (EventHubOutputDataSource)outputCreateOrUpdateResponse.Output.Properties.DataSource; Assert.Equal(partitionKey, eventHubOutputDataSourceInResponse1.Properties.PartitionKey); Assert.NotNull(outputCreateOrUpdateResponse.Output.Properties.Etag); // Get the output OutputGetResponse outputGetResponse = client.Outputs.Get(resourceGroupName, resourceName, outputName); Assert.Equal(HttpStatusCode.OK, outputGetResponse.StatusCode); Assert.Equal(outputName, outputGetResponse.Output.Name); Assert.True(outputGetResponse.Output.Properties.Serialization is JsonSerialization); JsonSerialization jsonSerializationInResponse2 = (JsonSerialization)outputGetResponse.Output.Properties.Serialization; Assert.Equal(Format.LineSeparated, jsonSerializationInResponse2.Properties.Format); Assert.True(outputGetResponse.Output.Properties.DataSource is EventHubOutputDataSource); EventHubOutputDataSource eventHubOutputDataSourceInResponse2 = (EventHubOutputDataSource)outputGetResponse.Output.Properties.DataSource; Assert.Equal(partitionKey, eventHubOutputDataSourceInResponse2.Properties.PartitionKey); // Test output connectivity DataSourceTestConnectionResponse response = client.Outputs.TestConnection(resourceGroupName, resourceName, outputName); Assert.Equal(OperationStatus.Succeeded, response.Status); Assert.Equal(DataSourceTestStatus.TestSucceeded, response.DataSourceTestStatus); // Update the output jsonSerialization = new JsonSerialization() { Properties = new JsonSerializationProperties() { Encoding = "UTF8", Format = Format.Array } }; string newPartitionKey = TestUtilities.GenerateName("NewPartitionKey"); eventHubOutputDataSource.Properties.PartitionKey = newPartitionKey; outputProperties.DataSource = eventHubOutputDataSource; outputProperties.Serialization = jsonSerialization; outputProperties.Etag = outputCreateOrUpdateResponse.Output.Properties.Etag; OutputPatchParameters outputPatchParameters = new OutputPatchParameters(outputProperties); OutputPatchResponse outputPatchResponse = client.Outputs.Patch(resourceGroupName, resourceName, outputName, outputPatchParameters); Assert.Equal(HttpStatusCode.OK, outputPatchResponse.StatusCode); Assert.True(outputPatchResponse.Properties.Serialization is JsonSerialization); JsonSerialization jsonSerializationInResponse3 = (JsonSerialization)outputPatchResponse.Properties.Serialization; Assert.Equal(Format.Array, jsonSerializationInResponse3.Properties.Format); Assert.True(outputPatchResponse.Properties.DataSource is EventHubOutputDataSource); EventHubOutputDataSource eventHubOutputDataSourceInResponse3 = (EventHubOutputDataSource)outputPatchResponse.Properties.DataSource; Assert.Equal(newPartitionKey, eventHubOutputDataSourceInResponse3.Properties.PartitionKey); Assert.NotNull(outputPatchResponse.Properties.Etag); Assert.NotEqual(outputCreateOrUpdateResponse.Output.Properties.Etag, outputPatchResponse.Properties.Etag); // Delete the output AzureOperationResponse deleteInputOperationResponse = client.Outputs.Delete(resourceGroupName, resourceName, outputName); Assert.Equal(HttpStatusCode.OK, deleteInputOperationResponse.StatusCode); // Check that there are 0 outputs in the job jobGetParameters = new JobGetParameters("outputs"); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(0, jobGetResponse.Job.Properties.Outputs.Count); } finally { client.StreamingJobs.Delete(resourceGroupName, resourceName); resourceClient.ResourceGroups.Delete(resourceGroupName); } } }
public void Test_JobOperations_E2E() { BasicDelegatingHandler handler = new BasicDelegatingHandler(); using (var undoContext = UndoContext.Current) { undoContext.Start(); string resourceGroupName = TestUtilities.GenerateName("StreamAnalytics"); string resourceName = TestUtilities.GenerateName("MyStreamingJobSubmittedBySDK"); string serviceLocation = TestHelper.GetDefaultLocation(); var resourceClient = TestHelper.GetResourceClient(handler); var client = TestHelper.GetStreamAnalyticsManagementClient(handler); try { ResourceGroup resourceGroup = new ResourceGroup() { Location = serviceLocation }; resourceClient.ResourceGroups.CreateOrUpdate(resourceGroupName, resourceGroup); Job job = new Job(); job.Name = resourceName; job.Location = serviceLocation; // Construct the general properties for JobProperties JobProperties jobProperties = new JobProperties(); jobProperties.Sku = new Sku() { Name = "standard" }; jobProperties.EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Drop; jobProperties.EventsOutOfOrderMaxDelayInSeconds = 0; // Construct the Input StorageAccount storageAccount = new StorageAccount { AccountName = TestHelper.AccountName, AccountKey = TestHelper.AccountKey }; InputProperties inputProperties = new StreamInputProperties() { Serialization = new CsvSerialization() { Properties = new CsvSerializationProperties() { FieldDelimiter = ",", Encoding = "UTF8" } }, DataSource = new BlobStreamInputDataSource() { Properties = new BlobStreamInputDataSourceProperties() { StorageAccounts = new[] { storageAccount }, Container = "state", PathPattern = "" } } }; Input input1 = new Input("inputtest") { Properties = inputProperties }; jobProperties.Inputs = new[] { input1 }; // Construct the Output OutputProperties outputProperties = new OutputProperties(); SqlAzureOutputDataSource sqlAzureOutputDataSource = new SqlAzureOutputDataSource() { Properties = new SqlAzureOutputDataSourceProperties() { Server = TestHelper.Server, Database = TestHelper.Database, User = TestHelper.User, Password = TestHelper.Password, Table = "StateInfo" } }; outputProperties.DataSource = sqlAzureOutputDataSource; Output output1 = new Output("outputtest") { Properties = outputProperties }; jobProperties.Outputs = new Output[] { output1 }; // Construct the transformation Transformation transformation = new Transformation() { Name = "transformationtest", Properties = new TransformationProperties() { Query = "Select Id, Name from inputtest", StreamingUnits = 1 } }; jobProperties.Transformation = transformation; job.Properties = jobProperties; // Construct the JobCreateProperties JobCreateOrUpdateParameters jobCreateOrUpdateParameters = new JobCreateOrUpdateParameters(); jobCreateOrUpdateParameters.Job = job; // Create a streaming job JobCreateOrUpdateResponse jobCreateOrUpdateResponse = client.StreamingJobs.CreateOrUpdate(resourceGroupName, jobCreateOrUpdateParameters); Assert.Equal(HttpStatusCode.OK, jobCreateOrUpdateResponse.StatusCode); Assert.NotNull(jobCreateOrUpdateResponse.Job.Properties.Etag); // Get a streaming job to check JobGetParameters jobGetParameters = new JobGetParameters("inputs,transformation,outputs"); JobGetResponse jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(serviceLocation, jobGetResponse.Job.Location); Assert.Equal(resourceName, jobGetResponse.Job.Name); Assert.True(jobGetResponse.Job.Properties.Inputs[0].Properties is StreamInputProperties); StreamInputProperties streamInputProperties = jobGetResponse.Job.Properties.Inputs[0].Properties as StreamInputProperties; Assert.Equal("Stream", jobGetResponse.Job.Properties.Inputs[0].Properties.Type); Assert.Equal("Microsoft.Storage/Blob", streamInputProperties.DataSource.Type); Assert.Equal("Csv", streamInputProperties.Serialization.Type); Assert.Equal(EventsOutOfOrderPolicy.Drop, jobGetResponse.Job.Properties.EventsOutOfOrderPolicy); Assert.NotNull(jobGetResponse.Job.Properties.Etag); Assert.Equal(jobCreateOrUpdateResponse.Job.Properties.Etag, jobGetResponse.Job.Properties.Etag); // Patch the streaming job JobPatchParameters jobPatchParameters = new JobPatchParameters() { JobPatchRequest = new JobPatchRequest() { Properties = new JobProperties() { EventsOutOfOrderPolicy = EventsOutOfOrderPolicy.Adjust } } }; var jobPatchResponse = client.StreamingJobs.Patch(resourceGroupName, resourceName, jobPatchParameters); jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobPatchResponse.StatusCode); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(EventsOutOfOrderPolicy.Adjust, jobPatchResponse.Job.Properties.EventsOutOfOrderPolicy); Assert.Equal(EventsOutOfOrderPolicy.Adjust, jobGetResponse.Job.Properties.EventsOutOfOrderPolicy); JobListParameters parameters = new JobListParameters(string.Empty); JobListResponse response = client.StreamingJobs.ListJobsInResourceGroup(resourceGroupName, parameters); Assert.Equal(HttpStatusCode.OK, response.StatusCode); // Start a streaming job JobStartParameters jobStartParameters = new JobStartParameters() { OutputStartMode = OutputStartMode.LastOutputEventTime }; CloudException cloudException = Assert.Throws <CloudException>(() => client.StreamingJobs.Start(resourceGroupName, resourceName, jobStartParameters)); Assert.Equal("LastOutputEventTime must be available when OutputStartMode is set to LastOutputEventTime. Please make sure at least one output event has been processed. ", cloudException.Error.Message); jobStartParameters.OutputStartMode = OutputStartMode.CustomTime; jobStartParameters.OutputStartTime = DateTime.Now; AzureOperationResponse jobStartOperationResponse = client.StreamingJobs.Start(resourceGroupName, resourceName, jobStartParameters); Assert.Equal(HttpStatusCode.OK, jobStartOperationResponse.StatusCode); // Get a streaming job to check jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.True(IsRunning(jobGetResponse.Job.Properties.JobState)); // Check diagnostics InputListResponse inputListResponse = client.Inputs.ListInputInJob(resourceGroupName, resourceName, new InputListParameters("*")); Assert.Equal(HttpStatusCode.OK, inputListResponse.StatusCode); Assert.NotEqual(0, inputListResponse.Value.Count); Assert.NotNull(inputListResponse.Value[0].Properties.Diagnostics); Assert.NotEqual(0, inputListResponse.Value[0].Properties.Diagnostics.Conditions.Count); Assert.NotNull(inputListResponse.Value[0].Properties.Diagnostics.Conditions[0].Code); Assert.NotNull(inputListResponse.Value[0].Properties.Diagnostics.Conditions[0].Message); // Stop a streaming job AzureOperationResponse jobStopOperationResponse = client.StreamingJobs.Stop(resourceGroupName, resourceName); Assert.Equal(HttpStatusCode.OK, jobStopOperationResponse.StatusCode); // Get a streaming job to check jobGetResponse = client.StreamingJobs.Get(resourceGroupName, resourceName, jobGetParameters); Assert.Equal(HttpStatusCode.OK, jobGetResponse.StatusCode); Assert.Equal(JobRunningState.Stopped, jobGetResponse.Job.Properties.JobState); // Delete a streaming job AzureOperationResponse jobDeleteOperationResponse = client.StreamingJobs.Delete(resourceGroupName, resourceName); Assert.Equal(HttpStatusCode.OK, jobDeleteOperationResponse.StatusCode); } finally { client.StreamingJobs.Delete(resourceGroupName, resourceName); resourceClient.ResourceGroups.Delete(resourceGroupName); } } }
public async Task RegistryManager_ImportDevices(StorageAuthenticationType storageAuthenticationType) { // arrange StorageContainer storageContainer = null; string deviceId = $"{nameof(RegistryManager_ImportDevices)}-{StorageContainer.GetRandomSuffix(4)}"; var registryManager = RegistryManager.CreateFromConnectionString(Configuration.IoTHub.ConnectionString); Logger.Trace($"Using deviceId {deviceId}"); try { string containerName = StorageContainer.BuildContainerName(nameof(RegistryManager_ImportDevices)); storageContainer = await StorageContainer .GetInstanceAsync(containerName) .ConfigureAwait(false); Logger.Trace($"Using container {storageContainer.Uri}"); Uri containerUri = storageAuthenticationType == StorageAuthenticationType.KeyBased ? storageContainer.SasUri : storageContainer.Uri; Stream devicesFile = ImportExportDevicesHelpers.BuildDevicesStream( new List <ExportImportDevice> { new ExportImportDevice( new Device(deviceId) { Authentication = new AuthenticationMechanism { Type = AuthenticationType.Sas } }, ImportMode.Create), }); await UploadFileAndConfirmAsync(storageContainer, devicesFile).ConfigureAwait(false); // act JobProperties importJobResponse = null; int tryCount = 0; while (true) { try { importJobResponse = await registryManager .ImportDevicesAsync( JobProperties.CreateForImportJob( containerUri.ToString(), containerUri.ToString(), null, storageAuthenticationType)) .ConfigureAwait(false); break; } // Concurrent jobs can be rejected, so implement a retry mechanism to handle conflicts with other tests catch (JobQuotaExceededException) when(++tryCount < MaxIterationWait) { Logger.Trace($"JobQuotaExceededException... waiting."); await Task.Delay(s_waitDuration).ConfigureAwait(false); continue; } } // wait for job to complete for (int i = 0; i < MaxIterationWait; ++i) { await Task.Delay(1000).ConfigureAwait(false); importJobResponse = await registryManager.GetJobAsync(importJobResponse.JobId).ConfigureAwait(false); Logger.Trace($"Job {importJobResponse.JobId} is {importJobResponse.Status} with progress {importJobResponse.Progress}%"); if (!s_incompleteJobs.Contains(importJobResponse.Status)) { break; } } // assert importJobResponse.Status.Should().Be(JobStatus.Completed, "Otherwise import failed"); importJobResponse.FailureReason.Should().BeNullOrEmpty("Otherwise import failed"); // should not throw due to 404, but device may not immediately appear in registry Device device = null; for (int i = 0; i < MaxIterationWait; ++i) { await Task.Delay(s_waitDuration).ConfigureAwait(false); try { device = await registryManager.GetDeviceAsync(deviceId).ConfigureAwait(false); break; } catch (Exception ex) { Logger.Trace($"Could not find device on iteration {i} due to [{ex.Message}]"); } } if (device == null) { Assert.Fail($"Device {deviceId} not found in registry manager"); } } finally { try { storageContainer?.Dispose(); await registryManager.RemoveDeviceAsync(deviceId).ConfigureAwait(false); } catch { } } }
/// <summary> /// Create a job. /// </summary> /// <param name="createJobParams">Job properties entered via powershell.</param> /// <returns>The Job definition.</returns> public PSSchedulerJobDefinition CreateJob(PSJobParams createJobParams) { if (string.IsNullOrWhiteSpace(createJobParams.ResourceGroupName)) { throw new PSManagement.PSArgumentNullException(paramName: "ResourceGroupName"); } if (string.IsNullOrWhiteSpace(createJobParams.JobCollectionName)) { throw new PSManagement.PSArgumentNullException(paramName: "JobCollectionName"); } if (string.IsNullOrWhiteSpace(createJobParams.JobName)) { throw new PSManagement.PSArgumentNullException(paramName: "JobName"); } if (!DoesResourceGroupExists(createJobParams.ResourceGroupName)) { throw new PSManagement.PSArgumentException(Resources.SchedulerInvalidResourceGroup); } IList<JobCollectionDefinition> jobCollection = ListJobCollection(createJobParams.ResourceGroupName, createJobParams.JobCollectionName); if (jobCollection == null || jobCollection.Count < 1) { throw new PSManagement.PSInvalidOperationException(string.Format(Resources.JobCollectionDoesnotExist, createJobParams.JobCollectionName, createJobParams.ResourceGroupName)); } else { if (JobExists(createJobParams.ResourceGroupName, createJobParams.JobCollectionName, createJobParams.JobName)) { throw new PSManagement.PSArgumentException(string.Format(Resources.SchedulerExistingJob, createJobParams.JobName, createJobParams.JobCollectionName)); } IList<JobDefinition> listOfJobs = ListJobs(createJobParams.ResourceGroupName, createJobParams.JobCollectionName, jobState: null); if (listOfJobs != null) { Validate(jobCollection[0], listOfJobs.Count); } } JobAction jobAction = this.GetJobAction(createJobParams); JobRecurrence jobRecurrence = this.GetJobRecurrence(createJobParams.JobRecurrence); var properties = new JobProperties() { Action = jobAction, Recurrence = jobRecurrence, StartTime = createJobParams.StartTime, State = createJobParams.JobState.GetValueOrDefaultEnum<JobState?>(defaultValue: null) }; var jobDefinition = new JobDefinition(name: createJobParams.JobName) { Properties = properties }; JobDefinition jobDefinitionResult = this.SchedulerManagementClient.Jobs.CreateOrUpdate(createJobParams.ResourceGroupName, createJobParams.JobCollectionName, createJobParams.JobName, jobDefinition); return Converter.ConvertJobDefinitionToPS(jobDefinitionResult); }