public string BuildSass(int id) { try { var themeNode = _contentService.GetById(id); if (themeNode == null) { _logger.Error <SassService>("Theme doesn't exist."); return($"Sass Build Error: doesn't exist."); } var theme = themeNode as Theme; if (theme == null) { return(string.Empty); } var result = WriteCustomSassVariables(theme); if (!string.IsNullOrEmpty(result)) { return(result); } return(BuildThemeSass(theme)); } catch (Exception ex) { _logger.Error <SassService>(ex); return($"Sass Build Error: {ex.Message}"); } }
private async Task <bool> GetTaskByHttpAync(string url, CancellationToken token) { if (_httpClient == null) { _httpClient = new HttpClient { BaseAddress = _runtime.ApplicationUrl } } ; var request = new HttpRequestMessage(HttpMethod.Get, url); //TODO: pass custom the authorization header, currently these aren't really secured! //request.Headers.Authorization = AdminTokenAuthorizeAttribute.GetAuthenticationHeaderValue(_appContext); try { var result = await _httpClient.SendAsync(request, token).ConfigureAwait(false); // ConfigureAwait(false) is recommended? http://blog.stephencleary.com/2012/07/dont-block-on-async-code.html return(result.StatusCode == HttpStatusCode.OK); } catch (Exception ex) { _logger.Error <ScheduledTasks>(ex, "An error occurred calling web task for url: {Url}", url); } return(false); }
protected override void PerformIndexItems(IEnumerable <ValueSet> op, Action <IndexOperationEventArgs> onComplete) { var indexesMappedToAlias = _client.Value.GetAlias(descriptor => descriptor.Name(indexAlias)) .Indices.Select(x => x.Key).ToList(); var indexTarget = isReindexing ? indexName : indexAlias; if (isReindexing) { var index = _client.Value.CreateIndex(indexName , c => c .Mappings(ms => ms.Map <Document>( m => m.AutoMap() .Properties(ps => CreateFieldsMapping(ps, FieldDefinitionCollection)) )) ); } var indexer = GetIndexClient(); var totalResults = 0; //batches can only contain 1000 records foreach (var rowGroup in op.InGroupsOf(1)) { var batch = ToElasticSearchDocs(rowGroup, indexTarget); var indexResult = indexer.Bulk(e => batch); totalResults += indexResult.Items.Count; if (indexResult.Errors) { foreach (var itemWithError in indexResult.ItemsWithErrors) { _logger.Error <ElasticSearchIndex>("Failed to index document {NodeID}: {Error}", itemWithError.Id, itemWithError.Error); } } } if (isReindexing) { indexer.Alias(ba => ba .Remove(remove => remove.Index("*").Alias(indexAlias)) .Add(add => add.Index(indexName).Alias(indexAlias)) ); indexesMappedToAlias.Where(e => e.Name != indexName).ToList().ForEach(e => _client.Value.DeleteIndex(e)); } onComplete(new IndexOperationEventArgs(this, totalResults)); }
public IEnumerable <Zone> ListZones(string domainName = null, bool throwExceptionOnFail = false) { try { using (var client = new HttpClient()) { client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); string url = CLOUDFLARE_API_BASE_URL + "zones"; if (!String.IsNullOrEmpty(domainName)) { url += "?name=" + HttpUtility.UrlEncode(domainName); } HttpRequestMessage request = new HttpRequestMessage() { RequestUri = new Uri(url), Method = HttpMethod.Get, }; //Add in the ApiKey and AccountEmail AddRequestHeaders(request); var responseContent = client.SendAsync(request).Result.Content; ListZonesResponse response = responseContent.ReadAsAsync <ListZonesResponse>().Result; if (!response.Success) { //Something went wrong log the response logger.Error <CloudflareService>(String.Format("Could not get the list of zones for name {0} because of {1}", domainName, response.Messages.ToString())); return(Enumerable.Empty <Zone>()); } //Return the zones from the response. return(response.Zones); } } catch (Exception e) { logger.Error <CloudflareService>(String.Format("Could not get the List of zones for name {0}", domainName), e); if (throwExceptionOnFail) { throw e; } else { //We didn't want to throw an exception so just return an empty list return(Enumerable.Empty <Zone>()); } } }
public StatusWithMessage PurgeEverything(string domain) { //If the setting is turned off, then don't do anything. if (!configuration.PurgeCacheOn) { return new StatusWithMessage() { Success = false, Message = "Clould flare for umbraco is turned of as indicated in the config file." } } ; //We only want the host and not the scheme or port number so just to ensure that is what we are getting we will //proccess it as a uri. Uri domainAsUri; try { domainAsUri = new Uri(domain); domain = domainAsUri.Authority; } catch (Exception e) { logger.Error <CloudflareManager>(e); //So if we are here it didn't parse as an uri so we will assume that it was given in the correct format (without http://) } //Get the zone for the given domain Zone websiteZone = GetZone(domain); if (websiteZone == null) { //this will already be logged in the GetZone method so just relay that it was bad. return(new StatusWithMessage(false, String.Format("We could not purge the cache because the domain {0} is not valid with the provided api key and email combo. Please ensure this domain is registered under these credentials on your cloudflare dashboard.", domain))); } bool statusFromApi = this.cloudflareService.PurgeCache(websiteZone.Id, null, true); if (!statusFromApi) { return(new StatusWithMessage(false, CloudflareMessages.CLOUDFLARE_API_ERROR)); } else { return(new StatusWithMessage(true, "")); } }
private void CleanupFolder(DirectoryInfo dir) { dir.Refresh(); //in case it's changed during runtime if (!dir.Exists) { _logger.Debug <TempFileCleanup, string>("The cleanup folder doesn't exist {Folder}", dir.FullName); return; } var files = dir.GetFiles("*.*", SearchOption.AllDirectories); foreach (var file in files) { if (DateTime.UtcNow - file.LastWriteTimeUtc > _age) { try { file.IsReadOnly = false; file.Delete(); } catch (Exception ex) { _logger.Error <TempFileCleanup, string>(ex, "Could not delete temp file {FileName}", file.FullName); } } } }
public void RebuildIndexes(bool onlyEmptyIndexes) { var indexes = (onlyEmptyIndexes ? ExamineManager.Indexes.Where(x => !x.IndexExists()) : ExamineManager.Indexes).ToArray(); if (indexes.Length == 0) { return; } foreach (var index in indexes) { index.CreateIndex(); // clear the index } // run each populator over the indexes foreach (var populator in _populators) { try { populator.Populate(indexes); } catch (Exception e) { _logger.Error <IndexRebuilder>(e, "Index populating failed for populator {Populator}", populator.GetType()); } } }
private IEnumerable <Type> PrepareComposerTypes() { // create a list, remove those that cannot be enabled due to runtime level var composerTypeList = _composerTypes .Where(x => { // use the min/max levels specified by the attribute if any // otherwise, min: user composers are Run, anything else is Unknown (always run) // max: everything is Run (always run) var attr = x.GetCustomAttribute <RuntimeLevelAttribute>(); var minLevel = attr?.MinLevel ?? (x.Implements <IUserComposer>() ? RuntimeLevel.Run : RuntimeLevel.Unknown); var maxLevel = attr?.MaxLevel ?? RuntimeLevel.Run; return(_composition.RuntimeState.Level >= minLevel && _composition.RuntimeState.Level <= maxLevel); }) .ToList(); // enable or disable composers EnableDisableComposers(composerTypeList); // sort the composers according to their dependencies var requirements = new Dictionary <Type, List <Type> >(); foreach (var type in composerTypeList) { requirements[type] = null; } foreach (var type in composerTypeList) { GatherRequirementsFromRequireAttribute(type, composerTypeList, requirements); GatherRequirementsFromRequiredByAttribute(type, composerTypeList, requirements); } // only for debugging, this is verbose //_logger.Debug<Composers>(GetComposersReport(requirements)); // sort composers var graph = new TopoGraph <Type, KeyValuePair <Type, List <Type> > >(kvp => kvp.Key, kvp => kvp.Value); graph.AddItems(requirements); List <Type> sortedComposerTypes; try { sortedComposerTypes = graph.GetSortedItems().Select(x => x.Key).ToList(); } catch (Exception e) { // in case of an error, force-dump everything to log _logger.Info <Composers>("Composer Report:\r\n{ComposerReport}", GetComposersReport(requirements)); _logger.Error <Composers>(e, "Failed to sort composers."); throw; } // bit verbose but should help for troubleshooting //var text = "Ordered Composers: " + Environment.NewLine + string.Join(Environment.NewLine, sortedComposerTypes) + Environment.NewLine; _logger.Debug <Composers>("Ordered Composers: {SortedComposerTypes}", sortedComposerTypes); return(sortedComposerTypes); }
public override Task <InstallSetupResult> ExecuteAsync(object model) { // Generate GUID var telemetrySiteIdentifier = Guid.NewGuid(); // Modify the XML to add a new GUID site identifier // hack: ensure this does not trigger a restart using (ChangesMonitor.Suspended()) { var umbracoSettingsPath = IOHelper.MapPath(SystemFiles.UmbracoSettings); if (File.Exists(umbracoSettingsPath) == false) { // Log an error _logger.Error <TelemetryIdentifierStep>("Unable to find umbracoSettings.config file to add telemetry site identifier"); return(Task.FromResult <InstallSetupResult>(null)); } try { var umbracoConfigXml = XDocument.Load(umbracoSettingsPath, LoadOptions.PreserveWhitespace); if (umbracoConfigXml.Root != null) { var backofficeElement = umbracoConfigXml.Root.Element("backOffice"); if (backofficeElement == null) { return(Task.FromResult <InstallSetupResult>(null)); } // Will add ID attribute if it does not exist backofficeElement.SetAttributeValue("id", telemetrySiteIdentifier.ToString()); // Save file back down umbracoConfigXml.Save(umbracoSettingsPath, SaveOptions.DisableFormatting); } } catch (Exception ex) { _logger.Error <TelemetryIdentifierStep>(ex, "Couldn't update umbracoSettings.config with a backoffice with a telemetry site identifier"); } } return(Task.FromResult <InstallSetupResult>(null)); }
public override async Task <bool> PerformRunAsync(CancellationToken token) { // not on replicas nor unknown role servers switch (_runtime.ServerRole) { case ServerRole.Replica: _logger.Debug <KeepAlive>("Does not run on replica servers."); return(true); // role may change! case ServerRole.Unknown: _logger.Debug <KeepAlive>("Does not run on servers with unknown role."); return(true); // role may change! } // ensure we do not run if not main domain, but do NOT lock it if (_runtime.IsMainDom == false) { _logger.Debug <KeepAlive>("Does not run if not MainDom."); return(false); // do NOT repeat, going down } using (_logger.DebugDuration <KeepAlive>("Keep alive executing", "Keep alive complete")) { var keepAlivePingUrl = _keepAliveSection.KeepAlivePingUrl; try { if (keepAlivePingUrl.Contains("{umbracoApplicationUrl}")) { var umbracoAppUrl = _runtime.ApplicationUrl.ToString(); if (umbracoAppUrl.IsNullOrWhiteSpace()) { _logger.Warn <KeepAlive>("No umbracoApplicationUrl for service (yet), skip."); return(true); // repeat } keepAlivePingUrl = keepAlivePingUrl.Replace("{umbracoApplicationUrl}", umbracoAppUrl.TrimEnd(Constants.CharArrays.ForwardSlash)); } var request = new HttpRequestMessage(HttpMethod.Get, keepAlivePingUrl); var result = await _httpClient.SendAsync(request, token); } catch (Exception ex) { _logger.Error <KeepAlive>(ex, "Keep alive failed (at '{keepAlivePingUrl}').", keepAlivePingUrl); } } return(true); // repeat }
// maximum age, in minutes private int GetLogScrubbingMaximumAge(IUmbracoSettingsSection settings) { var maximumAge = 24 * 60; // 24 hours, in minutes try { if (settings.Logging.MaxLogAge > -1) { maximumAge = settings.Logging.MaxLogAge; } } catch (Exception ex) { _logger.Error <LogScrubber>(ex, "Unable to locate a log scrubbing maximum age. Defaulting to 24 hours."); } return(maximumAge); }
internal IEnumerable <Type> SortComposers(Dictionary <Type, List <Type> > requirements) { // sort composers var graph = new TopoGraph <Type, KeyValuePair <Type, List <Type> > >(kvp => kvp.Key, kvp => kvp.Value); graph.AddItems(requirements); List <Type> sortedComposerTypes; try { sortedComposerTypes = graph.GetSortedItems().Select(x => x.Key).Where(x => !x.IsInterface).ToList(); } catch (Exception e) { // in case of an error, force-dump everything to log _logger.Info <Composers>("Composer Report:\r\n{ComposerReport}", GetComposersReport(requirements)); _logger.Error <Composers>(e, "Failed to sort composers."); throw; } return(sortedComposerTypes); }
public void Terminate() { using (_logger.DebugDuration <ComponentCollection>($"Terminating. (log components when >{LogThresholdMilliseconds}ms)", "Terminated.")) { foreach (var component in this.Reverse()) // terminate components in reverse order { var componentType = component.GetType(); using (_logger.DebugDuration <ComponentCollection>($"Terminating {componentType.FullName}.", $"Terminated {componentType.FullName}.", thresholdMilliseconds: LogThresholdMilliseconds)) { try { component.Terminate(); component.DisposeIfDisposable(); } catch (Exception ex) { _logger.Error <ComponentCollection, string>(ex, "Error while terminating component {ComponentType}.", componentType.FullName); } } } } }
protected override void PerformDeleteFromIndex(IEnumerable <string> itemIds, Action <IndexOperationEventArgs> onComplete) { var descriptor = new BulkDescriptor(); foreach (var id in itemIds.Where(x => !string.IsNullOrWhiteSpace(x))) { descriptor.Index(indexName).Delete <Document>(x => x .Id(id)) .Refresh(Refresh.WaitFor); } var response = _client.Value.Bulk(descriptor); if (response.Errors) { foreach (var itemWithError in response.ItemsWithErrors) { _logger.Error <ElasticSearchBaseIndex>("Failed to remove from index document {NodeID}: {Error}", itemWithError.Id, itemWithError.Error); } } }
// ensure that the factory is running with the lastest generation of models internal Infos EnsureModels() { if (_debugLevel > 0) { _logger.Debug <PureLiveModelFactory>("Ensuring models."); } // don't use an upgradeable lock here because only 1 thread at a time could enter it try { _locker.EnterReadLock(); if (_hasModels) { return(_infos); } } finally { if (_locker.IsReadLockHeld) { _locker.ExitReadLock(); } } var buildManagerLocked = false; try { // always take the BuildManager lock *before* taking the _locker lock // to avoid possible deadlock situations (see notes above) Monitor.Enter(TheBuildManager, ref buildManagerLocked); _locker.EnterUpgradeableReadLock(); if (_hasModels) { return(_infos); } _locker.EnterWriteLock(); // we don't have models, // either they haven't been loaded from the cache yet // or they have been reseted and are pending a rebuild using (_logger.DebugDuration <PureLiveModelFactory>("Get models.", "Got models.")) { try { var assembly = GetModelsAssembly(_pendingRebuild); // the one below can be used to simulate an issue with BuildManager, ie it will register // the models with the factory but NOT with the BuildManager, which will not recompile views. // this is for U4-8043 which is an obvious issue but I cannot replicate //_modelsAssembly = _modelsAssembly ?? assembly; // the one below is the normal one _modelsAssembly = assembly; var types = assembly.ExportedTypes.Where(x => x.Inherits <PublishedContentModel>() || x.Inherits <PublishedElementModel>()); _infos = RegisterModels(types); _errors.Clear(); } catch (Exception e) { try { _logger.Error <PureLiveModelFactory>(e, "Failed to build models."); _logger.Warn <PureLiveModelFactory>("Running without models."); // be explicit _errors.Report("Failed to build PureLive models.", e); } finally { _modelsAssembly = null; _infos = new Infos { ModelInfos = null, ModelTypeMap = new Dictionary <string, Type>() }; } } // don't even try again _hasModels = true; } return(_infos); } finally { if (_locker.IsWriteLockHeld) { _locker.ExitWriteLock(); } if (_locker.IsUpgradeableReadLockHeld) { _locker.ExitUpgradeableReadLock(); } if (buildManagerLocked) { Monitor.Exit(TheBuildManager); } } }
public override bool PerformRun() { const string spreakerApiEpisodesUrlFormat = "https://api.spreaker.com/v2/shows/{0}/episodes"; const string spreakerApiEpisodeUrlFormat = "https://api.spreaker.com/v2/episodes/{0}"; if (!AppSettings.CandidContribs.SpreakerApi.Enabled) { _logger.Info <SpreakerFeed>("Spreaker episode import disabled"); return(false); } _logger.Info <SpreakerFeed>("Spreaker episode import started"); using (var cref = _context.EnsureUmbracoContext()) { // get episodes folder to add episodes to var cache = cref.UmbracoContext.Content; var cmsEpisodesFolder = (EpisodesFolder)cache.GetByXPath("//episodesFolder").FirstOrDefault(); if (cmsEpisodesFolder == null) { _logger.Error <SpreakerFeed>("Spreaker episode import failed: no EpisodesFolder found"); return(false); } var episodesApiUrl = string.Format(spreakerApiEpisodesUrlFormat, AppSettings.CandidContribs.SpreakerApi.ShowId); var response = client.GetAsync(episodesApiUrl).Result; if (!response.IsSuccessStatusCode) { _logger.Error <SpreakerFeed>("Spreaker episode import failed: response code {0}, url {1}", response.StatusCode, episodesApiUrl); return(false); } // get API response for all episodes var episodesString = response.Content.ReadAsStringAsync().Result; var convertedEps = JsonConvert.DeserializeObject <APIResponse>(episodesString); // get episodes in ascending date order before trying to add to CMS var episodes = convertedEps.Response.Items.OrderBy(x => x.PublishedDate); foreach (var episode in episodes) { // is this the best way to find by API id? var cmsEpisode = cmsEpisodesFolder.SearchChildren(episode.Id.ToString()).FirstOrDefault(); if (cmsEpisode != null) { // already exists so nothing to do continue; } var episodeDetailsUrl = string.Format(spreakerApiEpisodeUrlFormat, episode.Id); var episodeDetailsResponse = client.GetAsync(episodeDetailsUrl).Result; if (!episodeDetailsResponse.IsSuccessStatusCode) { _logger.Error <SpreakerFeed>("Spreaker episode import failed: response code {0}, url {1}", response.StatusCode, episodeDetailsUrl); continue; } var episodeString = episodeDetailsResponse.Content.ReadAsStringAsync().Result; var convertedEp = JsonConvert.DeserializeObject <APIResponse>(episodeString); AddNewEpisode(convertedEp.Response.Episode, cmsEpisodesFolder); } } _logger.Info <SpreakerFeed>("Spreaker episode import finished"); return(true); }