internal bool CheckDiagnosticsTable(string storageAccountName, string resId, string host, string waitChar, string osType, int TimeoutinMinutes = 15) { var tableExists = true; StorageAccount account = null; if (!String.IsNullOrEmpty(storageAccountName)) { account = this.GetStorageAccountFromCache(storageAccountName); } if (account != null) { var endpoint = this.GetCoreEndpoint(storageAccountName); var key = this.GetAzureStorageKeyFromCache(storageAccountName); var credentials = new StorageCredentials(storageAccountName, key); var cloudStorageAccount = new CloudStorageAccount(credentials, endpoint, true); var tableClient = cloudStorageAccount.CreateCloudTableClient(); var checkStart = DateTime.Now; var searchTime = DateTime.UtcNow.AddMinutes(-5); foreach (var tableName in AEMExtensionConstants.WADTablesV2[osType]) { var query = TableQuery.CombineFilters( TableQuery.GenerateFilterCondition("DeploymentId", QueryComparisons.Equal, resId), TableOperators.And, TableQuery.CombineFilters( TableQuery.GenerateFilterCondition("Host", QueryComparisons.Equal, host), TableOperators.And, TableQuery.GenerateFilterConditionForDate("Timestamp", QueryComparisons.GreaterThanOrEqual, searchTime))); var perfCounterTable = tableClient.GetTableReference(tableName); bool wait = true; while (wait) { var results = perfCounterTable.ExecuteQuerySegmentedAsync(new TableQuery() { FilterString = query }, token: null) .ConfigureAwait(false).GetAwaiter().GetResult(); if (results.Count() > 0) { tableExists &= true; break; } else { WriteHost(waitChar, newLine: false); TestMockSupport.Delay(5000); } wait = ((DateTime.Now) - checkStart).TotalMinutes < TimeoutinMinutes; } if (!wait) { WriteVerbose("PerfCounter Table " + tableName + " not found"); tableExists = false; break; } } } return(tableExists); }
/// <summary> /// Tracks the upload progress in the PowerShell console. /// </summary> /// <param name="uploadTask">The task that tracks the upload.</param> /// <param name="uploadProgress">The upload progress that will be displayed in the console.</param> private void TrackUploadProgress(Task uploadTask, ProgressRecord uploadProgress, Cmdlet commandToUpdateProgressFor, CancellationToken token) { // Update the UI with the progress. var lastUpdate = DateTime.Now.Subtract(TimeSpan.FromSeconds(2)); while (!uploadTask.IsCompleted && !uploadTask.IsCanceled) { if (token.IsCancellationRequested) { // we are done tracking progress and will just break and let the task clean itself up. try { uploadTask.Wait(); } catch (OperationCanceledException) { if (uploadTask.IsCanceled) { uploadTask.Dispose(); } } catch (AggregateException ex) { if (ex.InnerExceptions.OfType <OperationCanceledException>().Any()) { if (uploadTask.IsCanceled) { uploadTask.Dispose(); } } else { throw; } } break; } if (DateTime.Now - lastUpdate > TimeSpan.FromSeconds(1)) { lock (ConsoleOutputLock) { if (commandToUpdateProgressFor != null && !token.IsCancellationRequested && !commandToUpdateProgressFor.Stopping) { commandToUpdateProgressFor.WriteProgress(uploadProgress); } } } TestMockSupport.Delay(250); } if (uploadTask.IsCanceled || token.IsCancellationRequested) { uploadProgress.RecordType = ProgressRecordType.Completed; } else if (uploadTask.IsFaulted && uploadTask.Exception != null) { // If there are errors, raise them to the user. if (uploadTask.Exception.InnerException != null) { // we only go three levels deep. This is the Inception rule. if (uploadTask.Exception.InnerException.InnerException != null) { throw uploadTask.Exception.InnerException.InnerException; } throw uploadTask.Exception.InnerException; } throw uploadTask.Exception; } else { // finally execution is finished, set progress state to completed. uploadProgress.PercentComplete = 100; uploadProgress.RecordType = ProgressRecordType.Completed; if (commandToUpdateProgressFor != null) { commandToUpdateProgressFor.WriteProgress(uploadProgress); } } }
internal bool CheckTableAndContent(string StorageAccountName, string TableName, string FilterString, string WaitChar, bool UseNewTableNames, int TimeoutinMinutes = 15) { var tableExists = false; StorageAccount account = null; if (!String.IsNullOrEmpty(StorageAccountName)) { account = this.GetStorageAccountFromCache(StorageAccountName); } if (account != null) { var endpoint = this.GetCoreEndpoint(StorageAccountName); var key = this.GetAzureStorageKeyFromCache(StorageAccountName); var credentials = new StorageCredentials(StorageAccountName, key); var cloudStorageAccount = new CloudStorageAccount(credentials, endpoint, true); var tableClient = cloudStorageAccount.CreateCloudTableClient(); var checkStart = DateTime.Now; var wait = true; CloudTable table = null; if (UseNewTableNames) { try { table = tableClient.ListTablesSegmentedAsync(currentToken: null) .ConfigureAwait(false).GetAwaiter().GetResult() .FirstOrDefault(tab => tab.Name.StartsWith("WADMetricsPT1M")); } catch { } //#table name should be sorted } else { try { table = tableClient.GetTableReference(TableName); } catch { } } while (wait) { if (table != null && table.ExistsAsync().ConfigureAwait(false).GetAwaiter().GetResult()) { TableQuery query = new TableQuery(); query.FilterString = FilterString; var results = table.ExecuteQuerySegmentedAsync(query, token: null) .ConfigureAwait(false).GetAwaiter().GetResult(); if (results.Count() > 0) { tableExists = true; break; } } WriteHost(WaitChar, newLine: false); TestMockSupport.Delay(5000); if (UseNewTableNames) { try { table = tableClient.ListTablesSegmentedAsync(currentToken: null) .ConfigureAwait(false).GetAwaiter().GetResult().FirstOrDefault(tab => tab.Name.StartsWith("WADMetricsPT1M")); } catch { } //#table name should be sorted } else { try { table = tableClient.GetTableReference(TableName); } catch { } } wait = ((DateTime.Now) - checkStart).TotalMinutes < TimeoutinMinutes; } } return(tableExists); }
public void RaUserPermissions() { User newUser = null; ResourceGroup resourceGroup = null; string roleAssignmentId = "A807281A-2F74-44B9-B862-C0D3683ADCC9"; string userName = null; string userPass = null; string userPermission = "*/read"; string roleDefinitionName = "Reader"; string newUserObjectId = null; var controllerAdmin = ResourcesController.NewInstance; try { // Generate new user under admin account controllerAdmin.RunPsTestWorkflow( _logger, // scriptBuilder () => { userName = TestUtilities.GenerateName("aduser"); userPass = TestUtilities.GenerateName("adpass") + "0#$"; var upn = userName + "@" + controllerAdmin.UserDomain; var parameter = new UserCreateParameters { UserPrincipalName = upn, DisplayName = userName, AccountEnabled = true, MailNickname = userName + "test", PasswordProfile = new PasswordProfile { ForceChangePasswordNextLogin = false, Password = userPass } }; newUser = controllerAdmin.GraphClient.Users.Create(parameter); newUserObjectId = newUser.ObjectId; resourceGroup = controllerAdmin.ResourceManagementClient.ResourceGroups .List() .First(); // Wait to allow newly created object changes to propagate TestMockSupport.Delay(20000); return(new[] { string.Format( "CreateRoleAssignment '{0}' '{1}' '{2}' '{3}'", roleAssignmentId, newUserObjectId, roleDefinitionName, resourceGroup.Name) }); }, // cleanup null, MethodBase.GetCurrentMethod().ReflectedType?.ToString(), MethodBase.GetCurrentMethod().Name + "_Setup"); // login as different user and run the test var controllerUser = ResourcesController.NewInstance; controllerUser.RunPsTestWorkflow( _logger, // scriptBuilder () => { // Wait to allow for the role assignment to propagate TestMockSupport.Delay(20000); return(new[] { string.Format( "Test-RaUserPermissions '{0}' '{1}'", resourceGroup.Name, userPermission) }); }, // cleanup null, MethodBase.GetCurrentMethod().ReflectedType?.ToString(), MethodBase.GetCurrentMethod().Name + "_Test"); } finally { // remove created user and assignment controllerAdmin = ResourcesController.NewInstance; controllerAdmin.RunPsTestWorkflow( // scriptBuilder null, // initialize null, // cleanup () => { if (newUser != null) { controllerAdmin.GraphClient.Users.Delete(newUser.ObjectId); } if (resourceGroup != null) { controllerAdmin.AuthorizationManagementClient.RoleAssignments.Delete(resourceGroup.Id, roleAssignmentId).ToString(); } }, MethodBase.GetCurrentMethod().ReflectedType?.ToString(), MethodBase.GetCurrentMethod().Name + "_Cleanup"); } }
public override void ExecuteCmdlet() { ExecutionBlock(() => { base.ExecuteCmdlet(); List <string> jobsToWaitOn = new List <string>(); List <JobBase> finalJobs = new List <JobBase>(); object castedObj; if (GetCastedObjFromPSObj <JobBase>(Job, out castedObj)) { JobBase justJob = castedObj as JobBase; jobsToWaitOn.Add(justJob.JobId); } else if (GetCastedObjFromPSObj <List <JobBase> >(Job, out castedObj)) { List <JobBase> jobsList = castedObj as List <JobBase>; foreach (var job in jobsList) { jobsToWaitOn.Add(job.JobId); } } else if (Job.GetType() == typeof(object[])) { object[] castedJobsList = Job as object[]; object castedJob; foreach (var job in castedJobsList) { if (GetCastedObjFromPSObj <JobBase>(job, out castedJob)) { jobsToWaitOn.Add((castedJob as JobBase).JobId); } else { throw new Exception(string.Format(Resources.JobWaitJobInvalidInput, Job.GetType().FullName)); } } } else { // not a valid object. throw exception. throw new Exception(string.Format(Resources.JobWaitJobInvalidInput, Job.GetType().FullName)); } // now wait until timeout happens or all jobs complete execution DateTime waitBeginning = DateTime.UtcNow; while (true) { if (Timeout.HasValue) { if (DateTime.UtcNow.Subtract(waitBeginning) >= TimeSpan.FromSeconds(Timeout.Value)) { break; } } bool hasUnfinishedJob = false; finalJobs.Clear(); for (int i = 0; i < jobsToWaitOn.Count; i++) { string jobId = jobsToWaitOn[i]; var updatedJob = JobConversions.GetPSJob( ServiceClientAdapter.GetJob(jobId) ); if (IsJobInProgress(updatedJob)) { hasUnfinishedJob = true; } else { // removing finished job from list jobsToWaitOn.RemoveAt(i); i--; } finalJobs.Add(updatedJob); } if (!hasUnfinishedJob) { break; } // sleep for 30 seconds before checking again TestMockSupport.Delay(30 * 1000); } WriteObject(finalJobs, enumerateCollection: true); }); }
public bool DeleteServiceBusMigrationConfiguration(string resourceGroupName, string namespaceName) { Client.MigrationConfigs.Revert(resourceGroupName, namespaceName); TestMockSupport.Delay(5000); return(true); }
public void SetServiceBusRevertMigrationConfiguration(string resourceGroupName, string namespaceName) { Client.MigrationConfigs.Revert(resourceGroupName, namespaceName); TestMockSupport.Delay(5000); }
public bool DeleteServiceBusDRConfiguration(string resourceGroupName, string namespaceName, string alias) { Client.DisasterRecoveryConfigs.Delete(resourceGroupName, namespaceName, alias); TestMockSupport.Delay(5000); return(true); }
public void SetServiceBusDRConfigurationBreakPairing(string resourceGroupName, string namespaceName, string alias) { Client.DisasterRecoveryConfigs.BreakPairing(resourceGroupName, namespaceName, alias); TestMockSupport.Delay(5000); }
public void RaUserPermissions() { User newUser = null; ResourceGroup resourceGroup = null; string roleAssignmentId = "1BAF0B29-608A-424F-B54F-92FCDB343FFF"; string userName = null; string userPass = null; string userPermission = "*/read"; string roleDefinitionName = "Reader"; var controllerAdmin = ResourcesController.NewInstance; // Generate new user under admin account controllerAdmin.RunPsTestWorkflow( // scriptBuilder () => { userName = TestUtilities.GenerateName("aduser"); userPass = TestUtilities.GenerateName("adpass") + "0#$"; var upn = userName + "@" + controllerAdmin.UserDomain; var parameter = new UserCreateParameters { UserPrincipalName = upn, DisplayName = userName, AccountEnabled = true, MailNickname = userName + "test", PasswordProfile = new PasswordProfile { ForceChangePasswordNextLogin = false, Password = userPass } }; newUser = controllerAdmin.GraphClient.Users.Create(parameter); resourceGroup = controllerAdmin.ResourceManagementClient.ResourceGroups .List() .First(); // Wait to allow newly created object changes to propagate TestMockSupport.Delay(20000); return(new[] { string.Format( "CreateRoleAssignment '{0}' '{1}' '{2}' '{3}'", roleAssignmentId, newUser.ObjectId, roleDefinitionName, resourceGroup.Name) }); }, // initialize null, // cleanup null, TestUtilities.GetCallingClass(), TestUtilities.GetCurrentMethodName() + "_Setup"); // login as different user and run the test var controllerUser = ResourcesController.NewInstance; controllerUser.RunPsTestWorkflow( // scriptBuilder () => { return(new[] { string.Format( "Test-RaUserPermissions '{0}' '{1}'", resourceGroup.Name, userPermission) }); }, // initialize (testFactory) => { if (newUser != null) { testFactory.CustomEnvValues[TestEnvironment.UserIdKey] = userName + "@" + controllerAdmin.UserDomain; testFactory.CustomEnvValues[TestEnvironment.AADPasswordKey] = userPass; } }, // cleanup null, TestUtilities.GetCallingClass(), TestUtilities.GetCurrentMethodName() + "_Test"); // remove created user controllerAdmin = ResourcesController.NewInstance; controllerAdmin.RunPsTestWorkflow( // scriptBuilder null, // initialize null, // cleanup () => { if (newUser != null) { controllerAdmin.GraphClient.Users.Delete(newUser.ObjectId); } controllerAdmin.AuthorizationManagementClient.RoleAssignments.Delete(resourceGroup.Id, new Guid(roleAssignmentId)); }, TestUtilities.GetCallingClass(), TestUtilities.GetCurrentMethodName() + "_Cleanup"); }
private void CreateSimpleServicePrincipal() { var subscriptionId = DefaultProfile.DefaultContext.Subscription.Id; if (!this.IsParameterBound(c => c.Scope)) { Scope = string.Format("/subscriptions/{0}", subscriptionId); WriteVerbose(string.Format("No scope provided - using the default scope '{0}'", Scope)); } AuthorizationClient.ValidateScope(Scope, true); if (!this.IsParameterBound(c => c.Role)) { Role = "Contributor"; WriteVerbose(string.Format("No role provided - using the default role '{0}'", Role)); } if (!this.IsParameterBound(c => c.StartDate)) { DateTime currentTime = DateTime.UtcNow; StartDate = currentTime; WriteVerbose("No start date provided - using the current time as default."); } if (!this.IsParameterBound(c => c.EndDate)) { EndDate = StartDate.AddYears(1); WriteVerbose("No end date provided - using the default value of one year after the start date."); } if (!this.IsParameterBound(c => c.DisplayName)) { DisplayName = "azure-powershell-" + StartDate.ToString("MM-dd-yyyy-HH-mm-ss"); WriteVerbose(string.Format("No display name provided - using the default display name of '{0}'", DisplayName)); } var identifierUri = "http://" + DisplayName; // Handle credentials if (!this.IsParameterBound(c => c.Password)) { // If no credentials provided, set the password to a randomly generated GUID Password = Guid.NewGuid().ToString().ConvertToSecureString(); } // Create an application and get the applicationId var passwordCredential = new PSADPasswordCredential() { StartDate = StartDate, EndDate = EndDate, KeyId = Guid.NewGuid(), Password = SecureStringExtensions.ConvertToString(Password) }; if (!this.IsParameterBound(c => c.ApplicationId)) { CreatePSApplicationParameters appParameters = new CreatePSApplicationParameters { DisplayName = DisplayName, IdentifierUris = new[] { identifierUri }, HomePage = identifierUri, PasswordCredentials = new PSADPasswordCredential[] { passwordCredential } }; if (ShouldProcess(target: appParameters.DisplayName, action: string.Format("Adding a new application for with display name '{0}'", appParameters.DisplayName))) { var application = ActiveDirectoryClient.CreateApplication(appParameters); ApplicationId = application.ApplicationId; WriteVerbose(string.Format("No application id provided - created new AD application with application id '{0}'", ApplicationId)); } } CreatePSServicePrincipalParameters createParameters = new CreatePSServicePrincipalParameters { ApplicationId = ApplicationId, AccountEnabled = true, PasswordCredentials = new PSADPasswordCredential[] { passwordCredential } }; if (ShouldProcess(target: createParameters.ApplicationId.ToString(), action: string.Format("Adding a new service principal to be associated with an application having AppId '{0}'", createParameters.ApplicationId))) { var servicePrincipal = ActiveDirectoryClient.CreateServicePrincipal(createParameters); WriteObject(servicePrincipal); if (this.IsParameterBound(c => c.SkipAssignment)) { WriteVerbose("Skipping role assignment for the service principal."); return; } FilterRoleAssignmentsOptions parameters = new FilterRoleAssignmentsOptions() { Scope = this.Scope, RoleDefinitionName = this.Role, ADObjectFilter = new ADObjectFilterOptions { SPN = servicePrincipal.ApplicationId.ToString(), Id = servicePrincipal.Id.ToString() }, ResourceIdentifier = new ResourceIdentifier() { Subscription = subscriptionId } }; for (var i = 0; i < 6; i++) { try { TestMockSupport.Delay(5000); PoliciesClient.CreateRoleAssignment(parameters); var ra = PoliciesClient.FilterRoleAssignments(parameters, subscriptionId); if (ra != null) { WriteVerbose(string.Format("Role assignment with role '{0}' and scope '{1}' successfully created for the created service principal.", this.Role, this.Scope)); break; } } catch (Exception) { // Do nothing } } } }
/// <summary> /// Track resource upgrade operation. /// </summary> /// <param name="details">StartVaultUpgrade response.</param> private void WaitForJobCompletion(ResourceUpgradeDetails details) { TrackResourceUpgradeResponse response = null; DateTime startTime = DateTime.Now; double taskTimeoutInSeconds = 6000; double elapsedSeconds = 0; ProgressRecord record = new ProgressRecord( 0, Properties.Resources.VaultUpgradeInProgress, Properties.Resources.WaitingForCompletion); do { TestMockSupport.Delay(PSRecoveryServicesClient.TimeToSleepBeforeFetchingJobDetailsAgain); response = this.RecoveryServicesClient.TrackVaultUpgrade( this.VaultName, this.Location, this.ResourceType); elapsedSeconds = DateTime.Now.Subtract(startTime).TotalSeconds; record.PercentComplete = (int)elapsedSeconds * 100 / (int)taskTimeoutInSeconds; this.WriteProgress(record); }while (response.OperationStatus == Constants.InProgress && elapsedSeconds < taskTimeoutInSeconds && !this.StopProcessingFlag); record.RecordType = ProgressRecordType.Completed; this.WriteProgress(record); string operationResult = string.Empty; string operationStatus = string.Empty; string message = string.Empty; if (response.OperationStatus == Constants.Completed) { operationResult = response.OperationResult; operationStatus = response.OperationStatus; if (response.OperationResult == Constants.Succeeded) { message = string.Format( Properties.Resources.VaultUpgradeSucceded, this.ResourceType, this.VaultName); } else { message = Properties.Resources.VaultUpgradeNotSucceded; } } else if (this.StopProcessingFlag) { operationResult = VaultUpgradeOperationResult.Unavailable.ToString(); operationStatus = VaultUpgradeOperationResult.InProgress.ToString(); message = Properties.Resources.VaultUpgradeTerminated; } else { operationResult = VaultUpgradeOperationResult.Failed.ToString(); operationStatus = VaultUpgradeOperationResult.TimedOut.ToString(); message = Properties.Resources.VaultUpgradeTimedOut; } this.WriteObject( new ASRVaultUpgradeResponse(details, operationResult, operationStatus, message)); }
/// <summary> /// Tracks the task and shows the task progress or debug nessages after a regular interval in the PowerShell console. /// Call this method only if you want to do something for a task - like show progress, show debug messages /// </summary> /// <param name="task">The task that tracks the upload.</param> /// <param name="commandToUpdateProgressFor">Commandlet to write to</param> /// <param name="taskProgress">The upload progress that will be displayed in the console.</param> /// <param name="token">Cancellation token</param> private void TrackTaskProgress(Task task, Cmdlet commandToUpdateProgressFor, ProgressRecord taskProgress, CancellationToken token) { var pscommandToUpdateProgressFor = (DataLakeStoreFileSystemCmdletBase)commandToUpdateProgressFor; // Update the UI with the progress. var lastUpdate = DateTime.Now.Subtract(TimeSpan.FromSeconds(2)); while (!task.IsCompleted && !task.IsCanceled) { if (token.IsCancellationRequested) { // we are done tracking progress and will just break and let the task clean itself up. WaitForTask(task, token); break; } if (DateTime.Now - lastUpdate > TimeSpan.FromSeconds(1)) { if (taskProgress != null && !token.IsCancellationRequested && !commandToUpdateProgressFor.Stopping) { lock (ConsoleOutputLock) { commandToUpdateProgressFor.WriteProgress(taskProgress); } } } // If debug is enabled then flush debug messsages if (_isDebugEnabled) { if (!token.IsCancellationRequested && !commandToUpdateProgressFor.Stopping && pscommandToUpdateProgressFor.DebugMessages.Count > DebugMessageFlushThreshold) { lock (ConsoleOutputLock) { FlushDebugMessages(DebugMessageFlushThreshold, pscommandToUpdateProgressFor); } } } TestMockSupport.Delay(250); } if (taskProgress != null && (task.IsCanceled || token.IsCancellationRequested)) { taskProgress.RecordType = ProgressRecordType.Completed; } else if (task.IsFaulted && task.Exception != null) { // If there are errors, raise them to the user. if (task.Exception.InnerException != null) { // we only go three levels deep. This is the Inception rule. if (task.Exception.InnerException.InnerException != null) { throw task.Exception.InnerException.InnerException; } throw task.Exception.InnerException; } throw task.Exception; } else if (taskProgress != null) { // finally execution is finished, set progress state to completed. taskProgress.PercentComplete = 100; taskProgress.RecordType = ProgressRecordType.Completed; commandToUpdateProgressFor?.WriteProgress(taskProgress); } }
private void CreateSimpleServicePrincipal() { var subscriptionId = DefaultContext.Subscription?.Id; if (!this.IsParameterBound(c => c.StartDate)) { DateTime currentTime = DateTime.UtcNow; StartDate = currentTime; WriteVerbose("No start date provided - using the current time as default."); } if (!this.IsParameterBound(c => c.EndDate)) { EndDate = StartDate.AddYears(1); WriteVerbose(Resources.Properties.Resources.DefaultEndDateUsed); } if (!this.IsParameterBound(c => c.DisplayName)) { DisplayName = "azure-powershell-" + StartDate.ToString("MM-dd-yyyy-HH-mm-ss"); WriteVerbose(string.Format("No display name provided - using the default display name of '{0}'", DisplayName)); } var identifierUri = "http://" + DisplayName; bool printPassword = false; bool printUseExistingSecret = true; // Handle credentials var Password = Guid.NewGuid().ToString().ConvertToSecureString(); // Create an application and get the applicationId if (!this.IsParameterBound(c => c.ApplicationId)) { printUseExistingSecret = false; CreatePSApplicationParameters appParameters = new CreatePSApplicationParameters { DisplayName = DisplayName, IdentifierUris = new[] { identifierUri }, HomePage = identifierUri, PasswordCredentials = new PSADPasswordCredential[] { new PSADPasswordCredential() { StartDate = StartDate, EndDate = EndDate, KeyId = Guid.NewGuid(), Password = SecureStringExtensions.ConvertToString(Password) } } }; if (ShouldProcess(target: appParameters.DisplayName, action: string.Format("Adding a new application for with display name '{0}'", appParameters.DisplayName))) { var application = ActiveDirectoryClient.CreateApplication(appParameters); ApplicationId = application.ApplicationId; WriteVerbose(string.Format("No application id provided - created new AD application with application id '{0}'", ApplicationId)); printPassword = true; } } CreatePSServicePrincipalParameters createParameters = new CreatePSServicePrincipalParameters { ApplicationId = ApplicationId, AccountEnabled = true, }; var shouldProcessMessage = string.Format("Adding a new service principal to be associated with an application " + "having AppId '{0}' with no permissions.", createParameters.ApplicationId); if (!SkipRoleAssignment()) { if (!this.IsParameterBound(c => c.Scope)) { Scope = string.Format("/subscriptions/{0}", subscriptionId); WriteVerbose(string.Format("No scope provided - using the default scope '{0}'", Scope)); } AuthorizationClient.ValidateScope(Scope, true); if (!this.IsParameterBound(c => c.Role)) { Role = "Contributor"; WriteVerbose(string.Format("No role provided - using the default role '{0}'", Role)); } shouldProcessMessage = string.Format("Adding a new service principal to be associated with an application " + "having AppId '{0}' with '{1}' role over scope '{2}'.", createParameters.ApplicationId, this.Role, this.Scope); } if (ShouldProcess(target: createParameters.ApplicationId.ToString(), action: shouldProcessMessage)) { PSADServicePrincipalWrapper servicePrincipal = new PSADServicePrincipalWrapper(ActiveDirectoryClient.CreateServicePrincipal(createParameters)); if (printPassword) { servicePrincipal.Secret = Password; } else if (printUseExistingSecret) { WriteVerbose(String.Format(ProjectResources.ServicePrincipalCreatedWithCredentials, ApplicationId)); } WriteObject(servicePrincipal); if (SkipRoleAssignment()) { WriteVerbose("Skipping role assignment for the service principal."); return; } WriteWarning(string.Format("Assigning role '{0}' over scope '{1}' to the new service principal.", this.Role, this.Scope)); FilterRoleAssignmentsOptions parameters = new FilterRoleAssignmentsOptions() { Scope = this.Scope, RoleDefinitionName = this.Role, ADObjectFilter = new ADObjectFilterOptions { SPN = servicePrincipal.ApplicationId.ToString(), Id = servicePrincipal.Id.ToString() }, ResourceIdentifier = new ResourceIdentifier() { Subscription = subscriptionId }, CanDelegate = false }; for (var i = 0; i < 6; i++) { try { TestMockSupport.Delay(5000); PoliciesClient.CreateRoleAssignment(parameters); var ra = PoliciesClient.FilterRoleAssignments(parameters, subscriptionId); if (ra != null) { WriteVerbose(string.Format("Role assignment with role '{0}' and scope '{1}' successfully created for the created service principal.", this.Role, this.Scope)); break; } } catch (Exception) { // Do nothing } } } }
public override void ExecuteCmdlet() { List <string> specifiedJobs = new List <string>(); AzureRMBackupVault Vault = null; if (Job != null) { if ((Job is PSObject) && (((PSObject)Job).ImmediateBaseObject is List <AzureRMBackupJob>)) { foreach (AzureRMBackupJob jobToWait in (((PSObject)Job).ImmediateBaseObject as List <AzureRMBackupJob>)) { Vault = new AzureRMBackupVault(jobToWait.ResourceGroupName, jobToWait.ResourceName, jobToWait.Location); specifiedJobs.Add(jobToWait.InstanceId); } } else if (Job is List <AzureRMBackupJob> ) { WriteDebug(Resources.AzureBackupJobInputType); foreach (AzureRMBackupJob jobToWait in (Job as List <AzureRMBackupJob>)) { Vault = new AzureRMBackupVault(jobToWait.ResourceGroupName, jobToWait.ResourceName, jobToWait.Location); specifiedJobs.Add(jobToWait.InstanceId); } } else if ((Job is PSObject) && (((PSObject)Job).ImmediateBaseObject is AzureRMBackupJob)) { AzureRMBackupJob azureJob = ((Job as PSObject).ImmediateBaseObject as AzureRMBackupJob); Vault = new AzureRMBackupVault(azureJob.ResourceGroupName, azureJob.ResourceName, azureJob.Location); specifiedJobs.Add(azureJob.InstanceId); } else if (Job is AzureRMBackupJob) { Vault = new AzureRMBackupVault((Job as AzureRMBackupJob).ResourceGroupName, (Job as AzureRMBackupJob).ResourceName, (Job as AzureRMBackupJob).Location); specifiedJobs.Add((Job as AzureRMBackupJob).InstanceId); } else if ((Job is PSObject) && (((PSObject)Job).ImmediateBaseObject is AzureRMBackupJobDetails)) { AzureRMBackupJob azureJob = ((Job as PSObject).ImmediateBaseObject as AzureRMBackupJobDetails); Vault = new AzureRMBackupVault(azureJob.ResourceGroupName, azureJob.ResourceName, azureJob.Location); specifiedJobs.Add(azureJob.InstanceId); } else if (Job is AzureRMBackupJobDetails) { Vault = new AzureRMBackupVault((Job as AzureRMBackupJobDetails).ResourceGroupName, (Job as AzureRMBackupJobDetails).ResourceName, (Job as AzureRMBackupJobDetails).Location); specifiedJobs.Add((Job as AzureRMBackupJobDetails).InstanceId); } } WriteDebug(String.Format(Resources.NumberOfJobsForWaiting, specifiedJobs.Count)); if (specifiedJobs.Count == 0) { WriteDebug(Resources.QuittingWaitJob); return; } InitializeAzureBackupCmdlet(Vault); ExecutionBlock(() => { if (!TimeOut.HasValue) { TimeOut = Int64.MaxValue; } List <string> pendingJobs = new List <string>(specifiedJobs); DateTime waitingStartTime = DateTime.UtcNow; while (true) { WriteDebug(Resources.QueryingJobs); if (DateTime.UtcNow.Subtract(waitingStartTime).TotalSeconds >= TimeOut) { WriteDebug(Resources.TimeOutWaitInJob); break; } bool areJobsRunning = false; for (int i = 0; i < pendingJobs.Count; i++) { Mgmt.CSMJobDetailsResponse retrievedJob = AzureBackupClient.GetJobDetails(Vault.ResourceGroupName, Vault.Name, pendingJobs[i]); if (AzureBackupJobHelper.IsJobRunning(retrievedJob.JobDetailedProperties.Status)) { areJobsRunning = true; } else { pendingJobs.RemoveAt(i); i--; } } if (!areJobsRunning) { WriteDebug(Resources.AllJobsCompleted); break; } TestMockSupport.Delay(30 * 1000); } IList <AzureRMBackupJob> finalJobs = new List <AzureRMBackupJob>(); foreach (string jobId in specifiedJobs) { Mgmt.CSMJobDetailsResponse retrievedJob = AzureBackupClient.GetJobDetails(Vault.ResourceGroupName, Vault.Name, jobId); finalJobs.Add(new AzureRMBackupJob(Vault, retrievedJob.JobDetailedProperties, retrievedJob.Name)); } if (finalJobs.Count == 1) { WriteObject(finalJobs.First()); } else { WriteObject(finalJobs); } }); }
public void CopyDirectory( string destinationFolderPath, string accountName, string sourceFolderPath, CancellationToken cmdletCancellationToken, int folderThreadCount = -1, int perFileThreadCount = -1, bool recursive = false, bool overwrite = false, bool resume = false, bool forceBinaryOrText = false, bool isBinary = false, Cmdlet cmdletRunningRequest = null) { var allDirectories = new Stack <string>(); var allFailedFiles = new ConcurrentDictionary <string, string>(); var allFailedDirs = new List <string>(); var fileCount = 0; var testFileCountChanged = 0; var totalBytes = GetByteCountInDirectory(sourceFolderPath, recursive); var totalFiles = GetFileCountInDirectory(sourceFolderPath, recursive); var folderPathStartIndex = Path.GetDirectoryName(sourceFolderPath).Length; if (folderPathStartIndex < 1) { // this is the scenario where the user is copying from the root of a drive // such as C:\ or .\. In these cases, we simply indicate the "beginning" as the // end of the root. folderPathStartIndex = sourceFolderPath.Length; } allDirectories.Push(sourceFolderPath); var progress = new ProgressRecord( uniqueActivityIdGenerator.Next(0, 10000000), string.Format("Copying Folder: {0}{1}. Total bytes to be copied: {2}. Total files to be copied: {3}", sourceFolderPath, recursive ? " recursively" : string.Empty, totalBytes, totalFiles), "Copy in progress...") { PercentComplete = 0 }; UpdateProgress(progress, cmdletRunningRequest); var internalFolderThreads = folderThreadCount <= 0 ? Environment.ProcessorCount : folderThreadCount; var internalFileThreads = perFileThreadCount <= 0 ? Environment.ProcessorCount : perFileThreadCount; // we need to override the default .NET value for max connections to a host to our number of threads, if necessary (otherwise we won't achieve the parallelism we want) var previousDefaultConnectionLimit = ServicePointManager.DefaultConnectionLimit; var previousExpect100 = ServicePointManager.Expect100Continue; try { ServicePointManager.DefaultConnectionLimit = Math.Max((internalFolderThreads * internalFileThreads) + internalFolderThreads, ServicePointManager.DefaultConnectionLimit); ServicePointManager.Expect100Continue = false; //TODO: defect: 4259238 (located here: http://vstfrd:8080/Azure/RD/_workitems/edit/4259238) needs to be resolved or the tracingadapter work around needs to be put back in while (allDirectories.Count > 0) { var currentDir = allDirectories.Pop(); string[] files; try { files = Directory.GetFiles(currentDir); if (recursive) { // Push the subdirectories onto the stack for traversal. // This could also be done before handing the files. foreach (var str in Directory.GetDirectories(currentDir)) { allDirectories.Push(str); } } } catch { // update the list of folders that could not be accessed // for later reporting to the user. allFailedDirs.Add(currentDir); continue; } // Execute in parallel if there are enough files in the directory. // Otherwise, execute sequentially.Files are opened and processed // synchronously but this could be modified to perform async I/O. // NOTE: in order to write progress in a meaningful way, we have // wrapped the parallel execution in a container task, which is // then monitored from the main thread. // TODO: enable resumability in the event that copy fails somewhere in the middle var folderOptions = new ParallelOptions { CancellationToken = cmdletCancellationToken }; if (folderThreadCount > 0) { folderOptions.MaxDegreeOfParallelism = folderThreadCount; } var task = Task.Run( () => { Parallel.ForEach( files, folderOptions, () => 0, (file, loopState, localCount) => { cmdletCancellationToken.ThrowIfCancellationRequested(); var dataLakeFilePath = string.Format( "{0}/{1}", destinationFolderPath, file.Substring(folderPathStartIndex).TrimStart('\\').Replace('\\', '/')); // for each file we will either honor a force conversion // to either binary or text, or attempt to determine // if the file is either binary or text, with a default // behavior of text. isBinary = forceBinaryOrText ? isBinary : GlobalMembers.BinaryFileExtension.Contains( Path.GetExtension(file).ToLowerInvariant()); try { CopyFile(dataLakeFilePath, accountName, file, cmdletCancellationToken, internalFileThreads, overwrite, resume, isBinary, null, progress); } catch (Exception e) { allFailedFiles.GetOrAdd(file, e.Message); } // note: we will always increment the count, since the file was seen and attempted // this does not necessarily mean the file was successfully uploaded, as indicated by // the warning messages that can be written out. return(++localCount); }, c => Interlocked.Add(ref fileCount, c)); }, cmdletCancellationToken); while (!task.IsCompleted && !task.IsCanceled) { // if we somehow made it in here prior to the cancel, I want to issue a throw cmdletCancellationToken.ThrowIfCancellationRequested(); // only update progress if the percentage has changed. if ((int)Math.Ceiling((decimal)testFileCountChanged / totalFiles * 100) < (int)Math.Ceiling((decimal)fileCount / totalFiles * 100)) { testFileCountChanged = fileCount; var percentComplete = (int)Math.Ceiling((decimal)fileCount / totalFiles * 100); if (percentComplete > 100) { // in some cases we can get 101 percent complete using ceiling, however we want to be // able to round up to full percentage values, instead of down. percentComplete = 100; } progress.PercentComplete = percentComplete; UpdateProgress(progress, cmdletRunningRequest); } // sleep for a half of a second. TestMockSupport.Delay(500); } if (task.IsFaulted && !task.IsCanceled) { var ae = task.Exception; if (ae != null) { if (cmdletRunningRequest != null) { cmdletRunningRequest.WriteWarning( "The following errors were encountered during the copy:"); } else { Console.WriteLine(@"The following errors were encountered during the copy:"); } ae.Handle( ex => { if (ex is AggregateException) { var secondLevel = ex as AggregateException; secondLevel.Handle( secondEx => { if (cmdletRunningRequest != null) { cmdletRunningRequest.WriteWarning(secondEx.ToString()); } else { Console.WriteLine(secondEx); } return(true); }); } else { if (cmdletRunningRequest != null) { cmdletRunningRequest.WriteWarning(ex.ToString()); } else { Console.WriteLine(ex); } } return(true); }); } } } if (allFailedDirs.Count > 0 && !cmdletCancellationToken.IsCancellationRequested) { var errString = "The following {0} directories could not be opened and their contents must be copied up with the single file copy command: {1}"; if (cmdletRunningRequest != null) { cmdletRunningRequest.WriteWarning( string.Format(errString, allFailedDirs.Count, string.Join(",\r\n", allFailedDirs))); } else { Console.WriteLine(errString, allFailedDirs.Count, string.Join(",\r\n", allFailedDirs)); } } if (allFailedFiles.Count > 0 && !cmdletCancellationToken.IsCancellationRequested) { var errString = "The following {0} files could not be copied and must be copied up with the single file copy command: {1}"; if (cmdletRunningRequest != null) { cmdletRunningRequest.WriteWarning( string.Format(errString, allFailedFiles.Count, string.Join(",\r\n", allFailedFiles))); } else { Console.WriteLine(errString, allFailedFiles.Count, string.Join(",\r\n", allFailedFiles)); } } if (!cmdletCancellationToken.IsCancellationRequested) { progress.PercentComplete = 100; progress.RecordType = ProgressRecordType.Completed; UpdateProgress(progress, cmdletRunningRequest); } } finally { ServicePointManager.DefaultConnectionLimit = previousDefaultConnectionLimit; ServicePointManager.Expect100Continue = previousExpect100; } }