public void PublishAzureServiceSimpleDeployTest() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. bool createdHostedService = false; bool createdOrUpdatedDeployment = false; SimpleServiceManagement channel = new SimpleServiceManagement(); channel.GetStorageServiceThunk = ar => new StorageService(); channel.CreateHostedServiceThunk = ar => createdHostedService = true; channel.GetHostedServiceWithDetailsThunk = ar => { throw new EndpointNotFoundException(); }; channel.GetStorageKeysThunk = ar => new StorageService() { StorageServiceKeys = new StorageServiceKeys() { Primary = "VGVzdEtleSE=" } }; channel.CreateOrUpdateDeploymentThunk = ar => createdOrUpdatedDeployment = true; channel.GetDeploymentBySlotThunk = ar => new Deployment() { Status = DeploymentStatus.Starting, RoleInstanceList = new RoleInstanceList( new RoleInstance[] { new RoleInstance() { InstanceName = "Role_IN_0", InstanceStatus = RoleInstanceStatus.Ready } }) }; // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Get the publishing process started by creating the package PublishAzureServiceCommand publishService = new PublishAzureServiceCommand(channel); publishService.ShareChannel = true; publishService.SkipUpload = true; publishService.PublishService(servicePath); AzureService service = new AzureService(Path.Combine(files.RootPath, serviceName), null); // Verify the publish service attempted to create and update // the service through the mock. Assert.IsTrue(createdHostedService); Assert.IsTrue(createdOrUpdatedDeployment); Assert.AreEqual <string>(serviceName, service.ServiceName); } }
private void WriteSiteNodesToFileSystem(IEnumerable <SiteNode> siteNodes, string outPath) { FileSystemHelper.CreateDirectory(outPath); var totalNodesCount = siteNodes.Count(); var nodeNumber = 1; foreach (var node in siteNodes) { try { var uriParts = SiteNodeHelper.GetSiteNodeUriParts(node); var writePath = SiteNodeHelper.CreateAndGetWritePathForAUri(uriParts, outPath); SiteNodeHelper.WriteSiteNodeToFileSystem(node, writePath); OnSiteNodeCopiedToFileSystem(this, new SiteNodeCopiedToFileSystemEventArgs(node.Uri, nodeNumber, totalNodesCount)); } catch (InvalidOperationException exc) { OnSiteNodeCopiedToFileSystem(this, new SiteNodeCopiedToFileSystemEventArgs(exc.Message, nodeNumber, totalNodesCount)); } finally { nodeNumber++; } } }
public void SavePackageWithOneNodeWebRoleTest() { //Create a temp directory for monitoring and cleaning up the output of our test using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { //Create a new service that we're going to pack locally string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); //Add a Node web role to the solution string roleName = "TEST_WEB_ROLE"; int instanceCount = 2; AddAzureNodeWebRoleCommand addAzureNodeWebRole = new AddAzureNodeWebRoleCommand(); addAzureNodeWebRole.AddAzureNodeWebRoleProcess(roleName, instanceCount, servicePath); //Run our packaging command SaveAzureServicePackageCommand saveServicePackage = new SaveAzureServicePackageCommand(); saveServicePackage.CreatePackage(servicePath); //Assert that the service structure is as expected AzureAssert.ScaffoldingExists(Path.Combine(files.RootPath, serviceName, roleName), Path.Combine(Resources.NodeScaffolding, Resources.WebRole)); // Verify the generated files files.AssertFiles(new Dictionary <string, Action <string> >() { { serviceName + @"\deploymentSettings.json", null }, { serviceName + @"\ServiceDefinition.csdef", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Cloud.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Local.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\cloud_package.cspkg", p => { using (Package package = Package.Open(p)) { Assert.AreEqual(6, package.GetParts().Count()); } } } }); } }
private void SaveMessage(Email email) { attachmentPath = Path.Combine(OutputPath, "OriginalMessage"); attachmentFile = GetAttachmentFileName("MailMessage.eml"); FileSystemHelper.CreateDirectory(attachmentPath, true); File.Copy(email.MessageFilePath, attachmentFile, true); }
public void PublishAzureServiceCreateBasicPackageTest() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. SimpleServiceManagement channel = new SimpleServiceManagement(); // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Get the publishing process started by creating the package PublishAzureServiceCommand publishService = new PublishAzureServiceCommand(channel); publishService.InitializeSettingsAndCreatePackage(servicePath); // Verify the generated files files.AssertFiles(new Dictionary <string, Action <string> >() { { serviceName + @"\deploymentSettings.json", null }, { serviceName + @"\ServiceDefinition.csdef", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Cloud.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Local.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\cloud_package.cspkg", p => { using (Package package = Package.Open(p)) { Assert.AreEqual(5, package.GetParts().Count()); } } } }); } }
private void SaveState() { FileSystemHelper.CreateDirectory(filePath); File.WriteAllText(filePath, converter.Serialize(new State() { Enabled = cbEnabled.Checked, Barcodes = barcodes.ToArray() })); }
public void Write(string path, IEnumerable <TRecord> records, bool hasHeaderRecord = true) { FileSystemHelper.CreateDirectory(path); using (var csv = new CsvWriter(new StreamWriter(path, true, encoding))) { csv.Configuration.HasHeaderRecord = hasHeaderRecord; csv.Configuration.RegisterClassMap <TMap>(); csv.WriteRecords(records); csv.Flush(); } }
/// <summary> /// 保存,相关类,需加 [Serializable]特性 /// </summary> /// <param name="filePath">文件路径</param> /// <param name="value">持久化对象</param> public static void SaveBin(string filePath, object value) { FileSystemHelper.CreateDirectory(filePath); using (var file = File.Open(filePath, FileMode.Create, FileAccess.Write)) { BinaryFormatter bf = new BinaryFormatter(); file.Seek(0, SeekOrigin.Begin); bf.Serialize(file, value); file.Flush(); } }
public void CreateLocalPackageWithOnePHPWebRoleTest() { using (FileSystemHelper files = new FileSystemHelper(this)) { CloudServiceProject service = new CloudServiceProject(files.RootPath, serviceName, null); RoleInfo webRoleInfo = service.AddWebRole(Data.PHPWebRoleScaffoldingPath); string logsDir = Path.Combine(service.Paths.RootPath, webRoleInfo.Name, "server.js.logs"); string logFile = Path.Combine(logsDir, "0.txt"); string targetLogsFile = Path.Combine(service.Paths.LocalPackage, "roles", webRoleInfo.Name, @"approot\server.js.logs\0.txt"); files.CreateDirectory(logsDir); files.CreateEmptyFile(logFile); service.CreatePackage(DevEnv.Local); AzureAssert.ScaffoldingExists(Path.Combine(service.Paths.LocalPackage, @"roles\WebRole1\approot"), Path.Combine(Resources.PHPScaffolding, Resources.WebRole)); Assert.IsTrue(File.Exists(targetLogsFile)); } }
public void CreateLocalPackageWithOnePHPWebRoleTest() { using (FileSystemHelper files = new FileSystemHelper(this)) { CloudServiceProject service = new CloudServiceProject(files.RootPath, serviceName, null); RoleInfo webRoleInfo = service.AddWebRole(Test.Utilities.Common.Data.PHPWebRoleScaffoldingPath); string logsDir = Path.Combine(service.Paths.RootPath, webRoleInfo.Name, "server.js.logs"); string logFile = Path.Combine(logsDir, "0.txt"); string targetLogsFile = Path.Combine(service.Paths.LocalPackage, "roles", webRoleInfo.Name, @"approot\server.js.logs\0.txt"); files.CreateDirectory(logsDir); files.CreateEmptyFile(logFile); service.CreatePackage(DevEnv.Local); AzureAssert.ScaffoldingExists(Path.Combine(service.Paths.LocalPackage, @"roles\WebRole1\approot"), Path.Combine(Resources.PHPScaffolding, Resources.WebRole)); Assert.True(File.Exists(targetLogsFile)); } }
public void ValidateSiteApplications(Site site) { for (int i = 0; i < site.Applications.Count; i++) { var application = site.Applications[i]; if (!application.Path.StartsWith("/")) { application.Path = "/" + application.Path; } if (string.IsNullOrWhiteSpace(application.DiskPath)) { AddPropertyError("diskpath[" + i + "]", "Disk Path is required."); } else { if (!FileSystemHelper.DirectoryExists(application.DiskPath)) { FileSystemHelper.CreateDirectory(application.DiskPath); } } if (string.IsNullOrWhiteSpace(application.Path)) { AddPropertyError("path[" + i + "]", "Path is required."); } if (!FileSystemHelper.IsPathValid(application.Path)) { AddPropertyError("path[" + i + "]", "Path cannot contain the following characters: ?, ;, :, @, &, =, +, $, ,, |, \", <, >, *."); } var existingApplicationByPath = site.Applications.SingleOrDefault(x => x != site.Applications[i] && x.Path == site.Applications[i].Path); if (site.SitePath != null && existingApplicationByPath != null) { AddPropertyError("path[" + i + "]", "There's already an application with this path."); } if (!FileSystemHelper.IsPathValid(application.DiskPath)) { AddPropertyError("diskpath[" + i + "]", "Path cannot contain the following characters: ?, ;, :, @, &, =, +, $, ,, |, \", <, >, *."); } } }
public void Write(string path, IEnumerable <string> head, IEnumerable <string> records) { try { FileSystemHelper.CreateDirectory(path); if (!FileSystemHelper.IsFileExists(path)) { File.WriteAllLines(path, new string[] { ConvertCsvFormat(head) }, Encoding.Default); } File.AppendAllText(path, ConvertCsvFormat(records) + Environment.NewLine, Encoding.Default); } catch (Exception e) { throw new ApplicationException($"保存Csv文件错误", e); } }
public void NewAzureRoleTemplateWithDirectoryExists() { using (FileSystemHelper files = new FileSystemHelper(this)) { string outputPath = files.CreateDirectory("test"); addTemplateCmdlet = new NewAzureRoleTemplateCommand() { Worker = true, CommandRuntime = mockCommandRuntime, Output = outputPath }; addTemplateCmdlet.ExecuteCmdlet(); Assert.AreEqual<string>( outputPath, ((PSObject)mockCommandRuntime.OutputPipeline[0]).GetVariableValue<string>(Parameters.Path)); Testing.AssertDirectoryIdentical( Path.Combine(Resources.GeneralScaffolding, RoleType.WorkerRole.ToString()), outputPath); } }
public void CreateLocalPackageWithOneWebRoleTest() { using (FileSystemHelper files = new FileSystemHelper(this)) { string standardOutput; string standardError; AzureService service = new AzureService(files.RootPath, serviceName, null); RoleInfo webRoleInfo = service.AddWebRole(); string logsDir = Path.Combine(service.Paths.RootPath, webRoleInfo.Name, "server.js.logs"); string logFile = Path.Combine(logsDir, "0.txt"); string targetLogsFile = Path.Combine(service.Paths.LocalPackage, "roles", webRoleInfo.Name, @"approot\server.js.logs\0.txt"); files.CreateDirectory(logsDir); files.CreateEmptyFile(logFile); service.CreatePackage(DevEnv.Local, out standardOutput, out standardError); AzureAssert.ScaffoldingExists(Path.Combine(service.Paths.LocalPackage, @"roles\WebRole1\approot"), Path.Combine(Resources.NodeScaffolding, Resources.WebRole)); Assert.IsTrue(File.Exists(targetLogsFile)); } }
public void CreateLocalPackageWithOneNodeWebRoleTest() { using (FileSystemHelper files = new FileSystemHelper(this)) { string standardOutput; string standardError; AzureService service = new AzureService(files.RootPath, serviceName, null); RoleInfo webRoleInfo = service.AddWebRole(Resources.NodeScaffolding); string logsDir = Path.Combine(service.Paths.RootPath, webRoleInfo.Name, "server.js.logs"); string logFile = Path.Combine(logsDir, "0.txt"); string targetLogsFile = Path.Combine(service.Paths.LocalPackage, "roles", webRoleInfo.Name, @"approot\server.js.logs\0.txt"); files.CreateDirectory(logsDir); files.CreateEmptyFile(logFile); service.CreatePackage(DevEnv.Local, out standardOutput, out standardError); AzureAssert.ScaffoldingExists(Path.Combine(service.Paths.LocalPackage, @"roles\WebRole1\approot"), Path.Combine(Resources.NodeScaffolding, Resources.WebRole)); Assert.IsTrue(File.Exists(targetLogsFile)); } }
public void NewAzureRoleTemplateWithDirectoryExists() { using (FileSystemHelper files = new FileSystemHelper(this)) { string outputPath = files.CreateDirectory("test"); addTemplateCmdlet = new NewAzureRoleTemplateCommand() { Worker = true, CommandRuntime = mockCommandRuntime, Output = outputPath }; addTemplateCmdlet.ExecuteCmdlet(); Assert.Equal <string>( outputPath, ((PSObject)mockCommandRuntime.OutputPipeline[0]).GetVariableValue <string>(Parameters.Path)); Testing.AssertDirectoryIdentical( Path.Combine(Resources.GeneralScaffolding, RoleType.WorkerRole.ToString()), outputPath); } }
private void WriteSiteNodesToFileSystem(IEnumerable <SiteNode> siteNodes, string outPath) { NLogger.Logger.Info("Successfully writing site nodes to file system..."); FileSystemHelper.CreateDirectory(outPath); var totalNodesCount = siteNodes.Count(); var nodeNumber = 1; foreach (var node in siteNodes) { try { var uriParts = SiteNodeHelper.GetSiteNodeUriParts(node); var writePath = SiteNodeHelper.CreateAndGetWritePathForAUri(uriParts, outPath); SiteNodeHelper.WriteSiteNodeToFileSystem(node, writePath); OnSiteNodeCopiedToFileSystem(this, new SiteNodeCopiedToFileSystemEventArgs(node.Uri, nodeNumber, totalNodesCount)); NLogger.Logger.Info($"Copied a site node with uri {node.Uri}"); } catch (InvalidOperationException exc) { NLogger.Logger.Error($"Copying a site node with uri {node.Uri} finished with exception: {exc.Message}"); OnSiteNodeCopiedToFileSystem(this, new SiteNodeCopiedToFileSystemEventArgs(exc.Message, nodeNumber, totalNodesCount)); } finally { nodeNumber++; } } NLogger.Logger.Info("Successfully finished writing site nodes to file system."); }
public void PublishAzureServiceManifestTest() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. SimpleServiceManagement channel = new SimpleServiceManagement(); // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceProjectCommand newService = new NewAzureServiceProjectCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Add web and worker roles AddAzureNodeWebRoleCommand newWebRole = new AddAzureNodeWebRoleCommand(); string defaultWebRoleName = "WebRoleDefault"; string defaultWebRolePath = newWebRole.AddAzureNodeWebRoleProcess(defaultWebRoleName, 2, servicePath); AddAzureNodeWorkerRoleCommand newWorkerRole = new AddAzureNodeWorkerRoleCommand(); string defaultWorkerRoleName = "WorkerRoleDefault"; string defaultWorkerRolePath = newWorkerRole.AddAzureNodeWorkerRoleProcess(defaultWorkerRoleName, 2, servicePath); AddAzureNodeWebRoleCommand matchWebRole = new AddAzureNodeWebRoleCommand(); string matchWebRoleName = "WebRoleExactMatch"; string matchWebRolePath = matchWebRole.AddAzureNodeWebRoleProcess(matchWebRoleName, 2, servicePath); AddAzureNodeWorkerRoleCommand matchWorkerRole = new AddAzureNodeWorkerRoleCommand(); string matchWorkerRoleName = "WorkerRoleExactMatch"; string matchWorkerRolePath = matchWorkerRole.AddAzureNodeWorkerRoleProcess(matchWorkerRoleName, 2, servicePath); AddAzureNodeWebRoleCommand overrideWebRole = new AddAzureNodeWebRoleCommand(); string overrideWebRoleName = "WebRoleOverride"; string overrideWebRolePath = overrideWebRole.AddAzureNodeWebRoleProcess(overrideWebRoleName, 2, servicePath); AddAzureNodeWorkerRoleCommand overrideWorkerRole = new AddAzureNodeWorkerRoleCommand(); string overrideWorkerRoleName = "WorkerRoleOverride"; string overrideWorkerRolePath = matchWorkerRole.AddAzureNodeWorkerRoleProcess(overrideWorkerRoleName, 2, servicePath); AzureService testService = new AzureService(Path.Combine(files.RootPath, serviceName), null); RuntimePackageHelper.SetRoleRuntime(testService.Components.Definition, matchWebRoleName, testService.Paths, version: "0.8.2"); RuntimePackageHelper.SetRoleRuntime(testService.Components.Definition, matchWorkerRoleName, testService.Paths, version: "0.8.2"); RuntimePackageHelper.SetRoleRuntime(testService.Components.Definition, overrideWebRoleName, testService.Paths, overrideUrl: "http://OVERRIDE"); RuntimePackageHelper.SetRoleRuntime(testService.Components.Definition, overrideWorkerRoleName, testService.Paths, overrideUrl: "http://OVERRIDE"); testService.Components.Save(testService.Paths); // Get the publishing process started by creating the package PublishAzureServiceProjectCommand publishService = new PublishAzureServiceProjectCommand(channel); publishService.InitializeSettingsAndCreatePackage(servicePath, RuntimePackageHelper.GetTestManifest(files)); AzureService updatedService = new AzureService(testService.Paths.RootPath, null); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, defaultWebRoleName, "http://DATACENTER/node/default.exe;http://DATACENTER/iisnode/default.exe", null); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, defaultWorkerRoleName, "http://DATACENTER/node/default.exe", null); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, matchWorkerRoleName, "http://DATACENTER/node/foo.exe", null); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, matchWebRoleName, "http://DATACENTER/node/foo.exe;http://DATACENTER/iisnode/default.exe", null); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, overrideWebRoleName, null, "http://OVERRIDE"); RuntimePackageHelper.ValidateRoleRuntime(updatedService.Components.Definition, overrideWorkerRoleName, null, "http://OVERRIDE"); } }
/// <summary> /// 保存 /// </summary> /// <param name="filePath">文件路径</param> /// <param name="value">持久化对象</param> public static void SaveJson(string filePath, object value) { FileSystemHelper.CreateDirectory(filePath); File.WriteAllText(filePath, ConverterManager.GetConverter().Serialize(value)); }
static Settings() { #region General Settings using (var ctx = new EmailImportDataContext()) { try { ImapCollectorInterval = TimeSpan.Parse(ctx.Settings.Single(s => s.Name == "Interval.ImapCollector").Value); } catch { ImapCollectorInterval = TimeSpan.FromMinutes(5); } try { EmailMonitorInterval = TimeSpan.Parse(ctx.Settings.Single(s => s.Name == "Interval.EmailMonitor").Value); } catch { EmailMonitorInterval = TimeSpan.FromMinutes(1); } try { ConcurrencyLevel = int.Parse(ctx.Settings.Single(s => s.Name == "ConcurrencyLevel").Value); } catch { ConcurrencyLevel = Environment.ProcessorCount; } try { SmtpSizeLimit = int.Parse(ctx.Settings.Single(s => s.Name == "SmtpSizeLimit").Value) * 1024 * 1024; } catch { } try { LiteViewerAuthorisedPCs = ctx.Settings.Single(s => s.Name == "LiteViewerAuthorisedPCs").Value; } catch { LiteViewerAuthorisedPCs = null; } if (SmtpSizeLimit <= 0) { SmtpSizeLimit = int.MaxValue; } var setting = ctx.Settings.SingleOrDefault(s => s.Name == "DefaultStoragePath"); DefaultStoragePath = (setting == null) ? null : setting.Value; FileSystemHelper.CreateDirectory(DefaultStoragePath); setting = ctx.Settings.SingleOrDefault(s => s.Name == "DefaultEscalationEmail"); DefaultEscalationEmail = (setting == null) ? null : setting.Value; if (String.IsNullOrWhiteSpace(DefaultEscalationEmail)) { throw new Exception("DefaultEscalationEmail setting must contain a valid email address."); } } #endregion #region Mailboxes MailboxProfiles = new Dictionary <Guid, MailboxProfile>(); LoadMailboxProfiles(); #endregion }
public void ValidateSite(ref Site site) { string[] bindingsUserInputs = Request.Form.BindingsUserInput.ToString().Split(','); string[] bindingsCertificateThumbprint = Request.Form.BindingsCertificateThumbprint.ToString().Split(','); string[] bindingsIpAddresses = Request.Form.BindingsIpAddress.ToString().Split(','); site.Bindings = new List <Binding>(); var certificates = SiteManager.GetCertificates(); for (var i = 0; i < bindingsUserInputs.Length; i++) { if (string.IsNullOrWhiteSpace(bindingsUserInputs[i])) { continue; } var isValid = true; var userinput = bindingsUserInputs[i]; var finalizedHost = BindingHelper.SafeFinializeBinding(userinput); var ip = bindingsIpAddresses[i]; if (string.IsNullOrWhiteSpace(ip)) { ip = "*"; } if (finalizedHost == null) { AddPropertyError("bindingsuserinput[" + i + "]", "The binding is invalid."); isValid = false; } else if (!BindingHelper.IsIpValid(ip)) { AddPropertyError("bindingsipaddress[" + i + "]", string.Format("The IP {0} is not valid.", ip)); isValid = false; } else if (SiteManager.IsBindingInUse(finalizedHost, bindingsIpAddresses[i], site.IisId)) { AddPropertyError("bindingsuserinput[" + i + "]", string.Format("The binding {0} is already in use.", finalizedHost)); isValid = false; } Binding binding; if (isValid) { var certificate = certificates.SingleOrDefault(x => x.Thumbprint == bindingsCertificateThumbprint[i]); binding = BindingHelper.ConvertToBinding(finalizedHost, ip, certificate); } else { binding = new Binding() { CertificateName = bindingsCertificateThumbprint[i], UserInput = bindingsUserInputs[i], IpAddress = ip }; } site.Bindings.Add(binding); } if (!site.Bindings.Any()) { AddPropertyError("bindingsipaddress[0]", "Minimum one binding is required."); site.Bindings.Add(new Binding() { UserInput = "" }); } if (string.IsNullOrWhiteSpace(site.Name)) { AddPropertyError("name", "Name is required."); } var existingSite = SiteManager.GetSiteByName(site.Name); if (site.Name != null && existingSite != null && existingSite.IisId != site.IisId) { AddPropertyError("name", "There's already a site with this name."); } if (string.IsNullOrWhiteSpace(site.SitePath)) { AddPropertyError("sitepath", "Site path is required."); } else { if (!FileSystemHelper.IsPathValid(site.SitePath)) { AddPropertyError("sitepath", "Path cannot contain the following characters: ?, ;, :, @, &, =, +, $, ,, |, \", <, >, *."); } else { if (!FileSystemHelper.DirectoryExists(site.SitePath)) { FileSystemHelper.CreateDirectory(site.SitePath); } } } }
private int DownloadEmail(Imap imap, MailboxProfile profile, String storagePath) { int count = 0; // Build the MailQuery var query = new MailQuery(String.IsNullOrEmpty(profile.ImapQuery) ? "('Deleted' = 'False')" : String.Format("({0}&('Deleted' = 'False'))", profile.ImapQuery)); // Get all messages matching to the query var infos = imap.ListMessages(query); // If there are any messages to process, then process them if (infos.Any()) { ConfigLogger.Instance.LogInfo("ImapCollector", String.Format("Downloading {0} message{1} from {2}.", infos.Count, infos.Count == 1 ? "" : "s", profile.Description)); // Download each message foreach (var info in infos) { if (!timer.Enabled) { break; } // Just check to ensure its valid if (info.Deleted || String.IsNullOrWhiteSpace(info.UniqueId)) { continue; } // Track the start time for debug purposes var start = DateTime.Now; MailMessage message = null; try { // Download the message message = imap.FetchMessage(info.UniqueId); // Calculate the time taken to fetch the message var fetchTime = DateTime.Now.Subtract(start); // Process the message (So long as the fetch succeeded) if (message != null) { // Setup the data context using (var ctx = new EmailImportDataContext()) { long emailID = 0; // Truncate the subject to avoid data commit errors message.Subject = Truncate(message.Subject, 500); // Check for duplicate if (IsDuplicate(ctx, profile.MailboxGUID, message, ref emailID)) { // Log the duplicate error ConfigLogger.Instance.LogWarning("ImapCollector", String.Format("Message already downloaded, moved to duplicate folder (existing EmailID = {0}).", emailID)); // Move the message to the duplicate sub folder imap.MoveMessage(info.UniqueId, "Duplicate", true, false); } else { // Create an instance of the email database object var email = new Email(); // Assign properties email.MailboxGUID = profile.MailboxGUID; email.DateSent = message.DateSent(); email.DateReceived = message.DateReceived(); email.From = message.From.GetAddressOrDisplayName(); email.MessageID = message.MessageId; if (CreditCardHelper.ExistsCCNumber(message.Subject)) { email.Subject = CreditCardHelper.MaskCCNumbers(message.Subject, '#'); } else { email.Subject = message.Subject; } email.Timestamp = DateTime.Now; // Create the dated storage path var path = Path.Combine(storagePath, email.Timestamp.Value.ToString("yyyyMMdd")); FileSystemHelper.CreateDirectory(path); // Insert the new record ctx.Emails.InsertOnSubmit(email); // Submit the email (we need to get the email ID) using (TransactionScope scope = new TransactionScope()) { // Initial submit of changes ctx.SubmitChanges(); // Build the mail message file name email.MessageFilePath = Path.Combine(path, String.Format("{0:00000000}.eml", email.EmailID)); // Save to disk (delete anything that already exists) message.Save(email.MessageFilePath, MessageFormat.Eml); // Get the batch number - THIS SHOULD NEVER HAPPEN IN A MULTI THREAD SCENARIO WITHOUT A LOCK var batchNumber = ctx.BatchNumbers.SingleOrDefault(b => b.Group == profile.Group); // If there is no batchNumber defined yet, create and insert one if (batchNumber == null) { batchNumber = new BatchNumber(); batchNumber.Group = profile.Group; ctx.BatchNumbers.InsertOnSubmit(batchNumber); } // Init to 0 if null if (!batchNumber.Value.HasValue) { batchNumber.Value = 0; } // Set the new batch number to this email email.BatchNumber = String.Format(String.IsNullOrWhiteSpace(profile.BatchNumberFormat) ? "{0:00000000}" : profile.BatchNumberFormat, ++batchNumber.Value); // Final submit of updates ctx.SubmitChanges(); // Finally, commit to the database scope.Complete(); } // Move the email to the archive (if this fails, but the download is complete this // will just result in a duplicate next time round if the deleted flag is not set) imap.MoveMessage(info.UniqueId, "Archive", true, false); // Log message level download stats ConfigLogger.Instance.LogDebug("ImapCollector", String.Format("Message downloaded (EmailID = {0}, Fetch Time = {1}s, Total Time = {2}s).", email.EmailID, (int)fetchTime.TotalSeconds, (int)DateTime.Now.Subtract(start).TotalSeconds)); // Increment the download count count++; } } } } catch (OutOfMemoryException) { throw; } catch (Exception e) { ConfigLogger.Instance.LogError("ImapCollector", e); } finally { if (message != null) { message.Dispose(); } } } } return(count); }
public void PublishAzureServiceRemovesNodeLogs() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. SimpleServiceManagement channel = new SimpleServiceManagement(); // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Add a web role AddAzureNodeWebRoleCommand newWebRole = new AddAzureNodeWebRoleCommand(); string webRoleName = "NODE_WEB_ROLE"; newWebRole.AddAzureNodeWebRoleProcess(webRoleName, 2, servicePath); string webRolePath = Path.Combine(servicePath, webRoleName); // Add a worker role AddAzureNodeWorkerRoleCommand newWorkerRole = new AddAzureNodeWorkerRoleCommand(); string workerRoleName = "NODE_WORKER_ROLE"; newWorkerRole.AddAzureNodeWorkerRoleProcess(workerRoleName, 2, servicePath); string workerRolePath = Path.Combine(servicePath, workerRoleName); // Add second web and worker roles that we won't add log // entries to new AddAzureNodeWebRoleCommand() .AddAzureNodeWebRoleProcess("SECOND_WEB_ROLE", 2, servicePath); new AddAzureNodeWorkerRoleCommand() .AddAzureNodeWorkerRoleProcess("SECOND_WORKER_ROLE", 2, servicePath); // Add fake logs directories for server.js string logName = "server.js.logs"; string logPath = Path.Combine(webRolePath, logName); Directory.CreateDirectory(logPath); File.WriteAllText(Path.Combine(logPath, "0.txt"), "secret web role debug details were logged here"); logPath = Path.Combine(Path.Combine(workerRolePath, "NestedDirectory"), logName); Directory.CreateDirectory(logPath); File.WriteAllText(Path.Combine(logPath, "0.txt"), "secret worker role debug details were logged here"); // Get the publishing process started by creating the package PublishAzureServiceCommand publishService = new PublishAzureServiceCommand(channel); publishService.InitializeSettingsAndCreatePackage(servicePath); // Rip open the package and make sure we can't find the log string packagePath = Path.Combine(servicePath, "cloud_package.cspkg"); using (Package package = Package.Open(packagePath)) { // Make sure the web role and worker role packages don't // have any files with server.js.logs in the name Action <string> validateRole = roleName => { PackagePart rolePart = package.GetParts().Where(p => p.Uri.ToString().Contains(roleName)).First(); using (Package rolePackage = Package.Open(rolePart.GetStream())) { Assert.IsFalse( rolePackage.GetParts().Any(p => p.Uri.ToString().Contains(logName)), "Found {0} part in {1} package!", logName, roleName); } }; validateRole(webRoleName); validateRole(workerRoleName); } } }
public void PublishAzureServiceUpgradeTest() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. bool createdHostedService = false; bool createdOrUpdatedDeployment = false; bool upgradedDeployment = false; SimpleServiceManagement channel = new SimpleServiceManagement(); channel.GetStorageServiceThunk = ar => new StorageService(); channel.CreateHostedServiceThunk = ar => createdHostedService = true; channel.GetHostedServiceWithDetailsThunk = ar => new HostedService { Deployments = new DeploymentList() { new Deployment { DeploymentSlot = "Production" } } }; channel.GetStorageKeysThunk = ar => new StorageService() { StorageServiceKeys = new StorageServiceKeys() { Primary = "VGVzdEtleSE=" } }; channel.CreateOrUpdateDeploymentThunk = ar => createdOrUpdatedDeployment = true; channel.UpgradeDeploymentThunk = ar => upgradedDeployment = true; channel.GetDeploymentBySlotThunk = ar => new Deployment() { Status = DeploymentStatus.Starting, RoleInstanceList = new RoleInstanceList( new RoleInstance[] { new RoleInstance() { InstanceName = "Role_IN_0", InstanceStatus = RoleInstanceStatus.Ready } }) }; channel.ListCertificatesThunk = ar => new CertificateList(); // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Get the publishing process started by creating the package PublishAzureServiceCommand publishService = new PublishAzureServiceCommand(channel); publishService.ShareChannel = true; publishService.SkipUpload = true; publishService.PublishService(servicePath); // Verify the publish service upgraded the deployment Assert.IsFalse(createdHostedService); Assert.IsFalse(createdOrUpdatedDeployment); Assert.IsTrue(upgradedDeployment); } }
public void PublishAzureServiceCreateWorkersPackageTest() { // Create a temp directory that we'll use to "publish" our service using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { // Import our default publish settings files.CreateAzureSdkDirectoryAndImportPublishSettings(); // Create a new channel to mock the calls to Azure and // determine all of the results that we'll need. SimpleServiceManagement channel = new SimpleServiceManagement(); // Create a new service that we're going to publish string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); // Add web and worker roles AddAzureNodeWebRoleCommand newWebRole = new AddAzureNodeWebRoleCommand(); string webRoleName = "NODE_WEB_ROLE"; string webRolePath = newWebRole.AddAzureNodeWebRoleProcess(webRoleName, 2, servicePath); AddAzureNodeWorkerRoleCommand newWorkerRole = new AddAzureNodeWorkerRoleCommand(); string workerRoleName = "NODE_WORKER_ROLE"; string workerRolePath = newWorkerRole.AddAzureNodeWorkerRoleProcess(workerRoleName, 2, servicePath); // Get the publishing process started by creating the package PublishAzureServiceCommand publishService = new PublishAzureServiceCommand(channel); publishService.InitializeSettingsAndCreatePackage(servicePath); // Verify the generated files Action <string> verifyContainsNames = p => { string contents = File.ReadAllText(p); Assert.IsTrue(contents.Contains(webRoleName)); Assert.IsTrue(contents.Contains(workerRoleName)); }; files.AssertFiles(new Dictionary <string, Action <string> >() { { serviceName + @"\deploymentSettings.json", null }, { serviceName + '\\' + webRoleName + @"\server.js", null }, { serviceName + '\\' + workerRoleName + @"\server.js", null }, { serviceName + @"\ServiceDefinition.csdef", verifyContainsNames }, { serviceName + @"\ServiceConfiguration.Cloud.cscfg", verifyContainsNames }, { serviceName + @"\ServiceConfiguration.Local.cscfg", verifyContainsNames }, { serviceName + @"\cloud_package.cspkg", p => { using (Package package = Package.Open(p)) { Assert.AreEqual(7, package.GetParts().Count()); } } } }); } }
public static ManageSiteResult ValidateSite(Site site, Site originalSite) { var certificates = SiteManager.GetCertificates(); var result = new ManageSiteResult(); if (!site.Bindings.Any()) { result.Errors.Add("Minimum one binding is required."); } if (string.IsNullOrWhiteSpace(site.Name)) { result.Errors.Add("Name is required."); } var existingSite = SiteManager.GetSiteByName(site.Name); if (originalSite == null) { originalSite = new Site() { IisId = 0 }; } if (site.Name != null && existingSite != null && site.Name.ToLower() == existingSite.Name.ToLower() && existingSite.IisId != originalSite.IisId) { result.Errors.Add("There's already a site with this name."); } if (string.IsNullOrWhiteSpace(site.SitePath)) { result.Errors.Add("Site path is required."); } else { if (!FileSystemHelper.IsPathValid(site.SitePath)) { result.Errors.Add("Path cannot contain the following characters: ?, ;, :, @, &, =, +, $, ,, |, \", <, >, *."); } else { if (!FileSystemHelper.DirectoryExists(site.SitePath)) { FileSystemHelper.CreateDirectory(site.SitePath); } } if (!FileSystemHelper.IsPathValid(site.LogFileDirectory)) { result.Errors.Add("Log File Directory cannot contain the following characters: ?, ;, :, @, &, =, +, $, ,, |, \", <, >, *."); } else { if (!FileSystemHelper.DirectoryExists(site.LogFileDirectory)) { FileSystemHelper.CreateDirectory(site.LogFileDirectory); } } } result.Result = result.Errors.Any() ? SiteResult.ValidationError : SiteResult.Success; return(result); }
public void SavePackageWithMultipleRolesTest() { //Create a temp directory for monitoring and cleaning up the output of our test using (FileSystemHelper files = new FileSystemHelper(this) { EnableMonitoring = true }) { //Create a new service that we're going to pack locally string serviceName = "TEST_SERVICE_NAME"; NewAzureServiceCommand newService = new NewAzureServiceCommand(); newService.NewAzureServiceProcess(files.RootPath, serviceName); string servicePath = files.CreateDirectory(serviceName); //Add a Node web role to the solution string webRoleName = "TEST_WEB_ROLE"; int webRoleInstanceCount = 2; AddAzureNodeWebRoleCommand addAzureNodeWebRole = new AddAzureNodeWebRoleCommand(); addAzureNodeWebRole.AddAzureNodeWebRoleProcess(webRoleName, webRoleInstanceCount, servicePath); //Add a Node web role to the solution string workerRoleName = "TEST_WORKER_ROLE"; int workerRoleInstanceCount = 2; AddAzureNodeWorkerRoleCommand addAzureNodeWorkerRole = new AddAzureNodeWorkerRoleCommand(); addAzureNodeWorkerRole.AddAzureNodeWorkerRoleProcess(workerRoleName, workerRoleInstanceCount, servicePath); //Run our packaging command SaveAzureServicePackageCommand saveServicePackage = new SaveAzureServicePackageCommand(); saveServicePackage.CreatePackage(servicePath); //Assert that the service structure is as expected AzureAssert.ScaffoldingExists(Path.Combine(files.RootPath, serviceName, workerRoleName), Path.Combine(Resources.NodeScaffolding, Resources.WorkerRole)); AzureAssert.ScaffoldingExists(Path.Combine(files.RootPath, serviceName, webRoleName), Path.Combine(Resources.NodeScaffolding, Resources.WebRole)); // Verify the generated files files.AssertFiles(new Dictionary<string, Action<string>>() { { serviceName + @"\deploymentSettings.json", null }, { serviceName + @"\ServiceDefinition.csdef", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Cloud.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\ServiceConfiguration.Local.cscfg", p => File.ReadAllText(p).Contains(serviceName) }, { serviceName + @"\cloud_package.cspkg", p => { using (Package package = Package.Open(p)) { Assert.AreEqual(7, package.GetParts().Count()); } } } }); } }