public void Convert(ufDateNode node, Stream stream, bool multiples, Urls urls, Errors errors, bool reporting) { m_bReporting = reporting; m_oErrors = errors; m_oUrls = urls; Convert(node, stream, multiples); }
public override void Test(Row row, Errors errors) { AssertSum(row, "1YR_PS_GRAD_DSAG_PTS_EARN", new[] { "1YR_PS_GRAD_ELL_PTS_EARN", "1YR_PS_GRAD_IEP_PTS_EARN", "1YR_PS_GRAD_MIN_PTS_EARN", "1YR_PS_GRAD_FRL_PTS_EARN" }, errors); AssertSum(row, "1YR_PS_GRAD_DSAG_PTS_ELIG", new[] { "1YR_PS_GRAD_ELL_PTS_ELIG", "1YR_PS_GRAD_IEP_PTS_ELIG", "1YR_PS_GRAD_MIN_PTS_ELIG", "1YR_PS_GRAD_FRL_PTS_ELIG" }, errors); AssertSum(row, "3YR_PS_GRAD_DSAG_PTS_EARN", new[] { "3YR_PS_GRAD_ELL_PTS_EARN", "3YR_PS_GRAD_IEP_PTS_EARN", "3YR_PS_GRAD_MIN_PTS_EARN", "3YR_PS_GRAD_FRL_PTS_EARN" }, errors); AssertSum(row, "3YR_PS_GRAD_DSAG_PTS_ELIG", new[] { "3YR_PS_GRAD_ELL_PTS_ELIG", "3YR_PS_GRAD_IEP_PTS_ELIG", "3YR_PS_GRAD_MIN_PTS_ELIG", "3YR_PS_GRAD_FRL_PTS_ELIG" }, errors); }
public override void Test(Row row, Errors errors) { AssertSum(row, "1YR_ACH_PTS_EARN_TTL", new[] { "1YR_ACH_PTS_EARN_READ", "1YR_ACH_PTS_EARN_MATH", "1YR_ACH_PTS_EARN_WRITE", "1YR_ACH_PTS_EARN_SCI" }, errors); AssertSum(row, "3YR_ACH_PTS_EARN_TTL", new[] { "3YR_ACH_PTS_EARN_READ", "3YR_ACH_PTS_EARN_MATH", "3YR_ACH_PTS_EARN_WRITE", "3YR_ACH_PTS_EARN_SCI" }, errors); AssertSum(row, "1YR_ACH_PTS_ELIG_TTL", new[] { "1YR_ACH_PTS_ELIG_READ", "1YR_ACH_PTS_ELIG_MATH", "1YR_ACH_PTS_ELIG_WRITE", "1YR_ACH_PTS_ELIG_SCI" }, errors); AssertSum(row, "3YR_ACH_PTS_ELIG_TTL", new[] { "3YR_ACH_PTS_ELIG_READ", "3YR_ACH_PTS_ELIG_MATH", "3YR_ACH_PTS_ELIG_WRITE", "3YR_ACH_PTS_ELIG_SCI" }, errors); }
bool CheckPointsEarned(Row row, string percentPointsColumn, string pointsEligibleColumn, string pointsEarnedColumn, Errors errors) { if (!Defined(row, percentPointsColumn, errors) && (!Defined(row, pointsEligibleColumn, errors) || !Defined(row, pointsEarnedColumn, errors))) { return true; } if (!AssertDefined(row, pointsEligibleColumn, errors) || !AssertDefined(row, pointsEarnedColumn, errors)) { return false; } if(!Defined(row, percentPointsColumn, errors) && (row[pointsEligibleColumn] == "0" || row[pointsEarnedColumn] == "0")) { return true; } if (AssertDefined(row, new[] { percentPointsColumn, pointsEligibleColumn, pointsEarnedColumn }, errors)) { var percentPoints = double.Parse(row[percentPointsColumn]); var pointsEligible = double.Parse(row[pointsEligibleColumn]); var pointsEarned = double.Parse(row[pointsEarnedColumn]); var pointsEarnedExpected = percentPoints * pointsEligible / 100.0; return AssertTrue(row, pointsEarnedExpected.Format() == pointsEarned.Format(), percentPointsColumn + ", " + pointsEligibleColumn + ", " + pointsEarnedColumn, errors); } return false; }
public void Add_Test2_Adding_string() { Errors target = new Errors(); string newError = "Test"; target.Add(newError); Assert.AreEqual(target[0].Message, newError); }
void AssertSubtract(Row row, string excdColumn, string panColumn, string pctnColumn, string nexcdColumn, Errors errors) { if (row[excdColumn].ToLower() == "yes") { AssertSubtract(row, panColumn, pctnColumn, nexcdColumn, errors); } }
public override void Test(Row row, Errors errors) { AssertSum(row, "1YR_PS_PTS_EARN_TTL", new[] { "1YR_PS_GRAD_PTS_EARN", "1YR_PS_GRAD_DSAG_PTS_EARN", "1YR_PS_DROP_PTS_EARN", "1YR_PS_ACT_PTS_EARN" }, errors); AssertSum(row, "1YR_PS_PTS_ELIG_TTL", new[] { "1YR_PS_GRAD_PTS_ELIG", "1YR_PS_GRAD_DSAG_PTS_ELIG", "1YR_PS_DROP_PTS_ELIG", "1YR_PS_ACT_PTS_ELIG" }, errors); AssertSum(row, "3YR_PS_PTS_EARN_TTL", new[] { "3YR_PS_GRAD_PTS_EARN", "3YR_PS_GRAD_DSAG_PTS_EARN", "3YR_PS_DROP_PTS_EARN", "3YR_PS_ACT_PTS_EARN" }, errors); AssertSum(row, "3YR_PS_PTS_ELIG_TTL", new[] { "3YR_PS_GRAD_PTS_ELIG", "3YR_PS_GRAD_DSAG_PTS_ELIG", "3YR_PS_DROP_PTS_ELIG", "3YR_PS_ACT_PTS_ELIG" }, errors); }
public void Add_Test1_Adding_string() { Errors target = new Errors(); string newError = string.Empty; target.Add(newError); Assert.AreEqual(target.Count,1); }
public void When_ToString_is_called_and_the_errors_are_not_initialized_then_the_result_contains_an_empty_string() { var errors = new Errors(); var errorsString = errors.ToString(); Assert.IsTrue(string.IsNullOrEmpty(errorsString)); }
void AssertGreaterThan(Row row, string panColumn, string pctnColumn, string excdColumn, Errors errors) { if(row[excdColumn].ToLower() == "yes") { AssertGreaterThan(row, panColumn, pctnColumn, errors); } }
internal void ReportError(ExplorerNode node, Errors error) { ErrorNode parent = (ErrorNode)this[error.ToString()]; if (parent == null) parent = new ErrorNode(this, error.ToString(), getMessage(error)); Activator.CreateInstance(node.GetType(), (ExplorerNode)parent, node); }
bool AssertAGP(Row row, string madeAgpColumn, string ratingColumn, string mgpColumn, Errors errors) { if(!Defined(row, madeAgpColumn, errors) && !Defined(row, ratingColumn, errors) && !Defined(row, mgpColumn, errors)) { return true; } if(AssertDefined(row, madeAgpColumn, errors)) { var madeMgp = row[madeAgpColumn]; Func<double, string> rating = null; if (madeMgp.ToLower() == "yes") rating = RatingYes; if (madeMgp.ToLower() == "no") rating = RatingNo; if(rating != null) { return AssertRating(row, ratingColumn, mgpColumn, rating, errors); } else { errors.Add(row, "Unknown Made MGP State (should be 'Yes' or 'No'):" + madeMgp, GetPrettyName()); } } return false; }
public void Add_Test2_Adding_Non_Empty_Collection() { Errors target = new Errors(); Errors newErrors = new Errors {"Test Error"}; target.Add(newErrors); Assert.AreEqual(target.Count, 1); }
/// <summary> /// Report errors to the subsriber. /// </summary> /// <param name="errors"></param> internal virtual void NotifyError(Errors errors) { EventHandler<ErrorArgs> handler = OnError; if (handler != null) { handler(this, new ErrorArgs(errors)); } }
protected override bool AssertRating(Row row, string ratingColumn, string valueColumn, Func<double, string> ratingLookup, Errors errors, bool passIfBlank = false) { if(!Defined(row, valueColumn, errors) && !Defined(row, valueColumn, errors)) { return true; } return base.AssertRating(row, ratingColumn, valueColumn, ratingLookup, errors, passIfBlank); }
public override void Test(Row row, Errors errors) { var oneYear = new[] { "1YR_ACHIEVE_RATING", "1YR_GROWTH_RATING", "1YR_GRO_GAPS_RATING", "1YR_POST_SEC_RATING" }; AssertRating(row, "1YR_INDICATOR_NCOUNT", oneYear, errors, GetIndicatorCount); var threeYear = new[] { "3YR_ACHIEVE_RATING", "3YR_GROWTH_RATING", "3YR_GRO_GAPS_RATING", "3YR_POST_SEC_RATING" }; AssertRating(row, "3YR_INDICATOR_NCOUNT", threeYear, errors, GetIndicatorCount); }
public void Add_Test1_Adding_Empty_Collection() { Errors target = new Errors(); Errors newErrors = new Errors(); target.Add(newErrors); Assert.AreEqual(target.Count,0); }
public override void Test(Row row, Errors errors) { var oneYear = new[] { "1YR_PARTIC_RATING_READ", "1YR_PARTIC_RATING_MATH", "1YR_PARTIC_RATING_WRITE", "1YR_PARTIC_RATING_SCI", "1YR_PARTIC_RATING_ACT" }; AssertRating(row, "1YR_TEST_PARTIC_DNM_COUNT", oneYear, errors); var threeYear = new[] { "3YR_PARTIC_RATING_READ", "3YR_PARTIC_RATING_MATH", "3YR_PARTIC_RATING_WRITE", "3YR_PARTIC_RATING_SCI", "3YR_PARTIC_RATING_ACT" }; AssertRating(row, "3YR_TEST_PARTIC_DNM_COUNT", threeYear, errors); }
bool AssertRating(Row row, string percentColumn, string ratingColumn, Dictionary<string, double[]> cutoffs, Errors errors) { if(!Defined(row, percentColumn, errors) && !Defined(row, ratingColumn, errors)) { return true; } return AssertRating(row, ratingColumn, percentColumn, v => GetRating(row.Level, v, cutoffs), errors); }
protected override bool AssertEqual(Row row, string column1, string column2, Errors errors) { if (!Defined(row, column1, errors) && !Defined(row, column2, errors)) { return true; } return base.AssertEqual(row, column1, column2, errors); }
public override void Test(Row row, Errors errors) { if (row.Type == EDataType.School || row.Level != "A") { return; } Columns.ForEach(t => AssertRating(row, t.Item1, t.Item2, RatingDistrict, errors)); }
protected override bool AssertSubtract(Row row, string column1, string column2, string resultColumn, Errors errors) { if(!Defined(row, resultColumn, errors)) { row[resultColumn] = "0"; } return base.AssertSubtract(row, column1, column2, resultColumn, errors); }
protected override bool AssertDivide(Row row, string resultColumn, string numeratorColumn, string denominatorColumn, Errors errors) { if (!Defined(row, resultColumn, errors)) { return true; } return base.AssertDivide(row, resultColumn, numeratorColumn, denominatorColumn, errors); }
public XmlLangDefinition(Tab tab, Errors errors) { this.tab = tab; this.errors = errors; useVector = new bool[Enum.GetValues(typeof(Options)).Length]; Tags = new Dictionary<string, TagInfo>(); Attrs = new Dictionary<string, int>(); PInstructions = new Dictionary<string, int>(); }
protected override bool AssertRating(Row row, string ratingColumn, string valueColumn, Func<double, string> ratingLookup, Errors errors, bool passIfBlank = false) { if (!Defined(row, ratingColumn, errors) && ratingColumn.Contains("_SPF_")) { ratingColumn = ratingColumn.Replace("_SPF_", "_DPF_"); } return base.AssertRating(row, ratingColumn, valueColumn, ratingLookup, errors, passIfBlank); }
public override bool AssertSum(Row row, string resultColumn, IEnumerable<string> partColumns, Errors errors) { if (!Defined(row, resultColumn, errors) && partColumns.All(c => !Defined(row, c, errors))) { return true; } return base.AssertSum(row, resultColumn, partColumns, errors); }
public override void Test(Row row, Errors errors) { AssertSum(row, "1_3_TOTAL_PTS_EARN", new[] { "1_3_ACHIEVE_PTS_EARN", "1_3_GROWTH_PTS_EARN", "1_3_GRO_GAPS_PTS_EARN", "1_3_POST_SEC_PTS_EARN" }, errors); AssertSum(row, "1YR_TOTAL_PTS_EARN", new[] { "1YR_ACHIEVE_PTS_EARN", "1YR_GROWTH_PTS_EARN", "1YR_GRO_GAPS_PTS_EARN", "1YR_POST_SEC_PTS_EARN" }, errors); AssertSum(row, "3YR_TOTAL_PTS_EARN", new[] { "3YR_ACHIEVE_PTS_EARN", "3YR_GROWTH_PTS_EARN", "3YR_GRO_GAPS_PTS_EARN", "3YR_POST_SEC_PTS_EARN" }, errors); AssertDivide(row, "1_3_TOTAL_PCT_PTS_EARN", "1_3_TOTAL_PTS_EARN", "1_3_TOTAL_PTS_ELIG", errors); AssertDivide(row, "1YR_TOTAL_PCT_PTS_EARN", "1YR_TOTAL_PTS_EARN", "1YR_TOTAL_PTS_ELIG", errors); AssertDivide(row, "3YR_TOTAL_PCT_PTS_EARN", "3YR_TOTAL_PTS_EARN", "3YR_TOTAL_PTS_ELIG", errors); }
public override void Test(Row row, Errors errors) { AssertEqual(row, "1YR_ACHIEVE_PCT_PTS_EARN", "1YR_ACH_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "3YR_ACHIEVE_PCT_PTS_EARN", "3YR_ACH_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "1YR_GROWTH_PCT_PTS_EARN", "1YR_GRO_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "3YR_GROWTH_PCT_PTS_EARN", "3YR_GRO_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "1YR_GRO_GAPS_PCT_PTS_EARN", "1YR_GG_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "3YR_GRO_GAPS_PCT_PTS_EARN", "3YR_GG_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "1YR_POST_SEC_PCT_PTS_EARN", "1YR_PS_PCT_PTS_EARN_TTL", errors); AssertEqual(row, "3YR_POST_SEC_PCT_PTS_EARN", "3YR_PS_PCT_PTS_EARN_TTL", errors); }
public Errors Run(Row row, string testName) { var errors = new Errors(); var testsToRun = Tests; if(testName != "All") { testsToRun = testsToRun.Where(t => t.GetPrettyName() == testName); } testsToRun.ForEach(t => t.Test(row, errors)); return errors; }
/// <summary> /// Creates and Logs the error context information for the occurred error /// </summary> /// <param name="error">Error code</param> /// <param name="method">Method where the error occurred</param> /// <param name="additionalInfo">Additional information for the error</param> /// <param name="exception">Inner Exception</param> /// <param name="descriptionParameters">Description Parameters</param> /// <param name="additionalDescriptionParameters">Additional Description Parameters</param> /// <param name="shortName">Shortname of the business error occured for</param> /// <param name="arguments">Arguments to log of the method where the error occurred</param> /// <returns>Error context information</returns> public static ErrorContextInformation CreateAndLogError(Errors error, string method, string additionalInfo = null, Exception exception = null, object[] descriptionParameters = null, object[] additionalDescriptionParameters = null, string shortName = "", params object[] arguments) { return new ErrorContextInformation(error.ToString(), error.GetDescription(descriptionParameters), method, additionalInformation: string.IsNullOrEmpty(additionalInfo) ? error.GetAdditionalDescription(additionalDescriptionParameters) : (additionalDescriptionParameters == null ? additionalInfo : string.Format(additionalInfo, additionalDescriptionParameters)), innerException: exception, shortName: shortName, arguments: arguments); }
private void InitializeOutputFile(TOptions analyzeOptions, TContext context, HashSet <string> targets) { string filePath = analyzeOptions.OutputFilePath; AggregatingLogger aggregatingLogger = (AggregatingLogger)context.Logger; if (!string.IsNullOrEmpty(filePath)) { InvokeCatchingRelevantIOExceptions ( () => { LoggingOptions loggingOptions; loggingOptions = analyzeOptions.ConvertToLoggingOptions(); OptionallyEmittedData dataToInsert = analyzeOptions.DataToInsert.ToFlags(); // This code is required in order to support the obsoleted ComputeFileHashes argument // on the analyze command-line; if (analyzeOptions.ComputeFileHashes) { dataToInsert |= OptionallyEmittedData.Hashes; } SarifLogger sarifLogger; if (analyzeOptions.SarifVersion == SarifVersion.Current) { sarifLogger = new SarifLogger( analyzeOptions.OutputFilePath, loggingOptions, dataToInsert, tool: null, run: null, analysisTargets: targets, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog); } else { sarifLogger = new SarifOneZeroZeroLogger( analyzeOptions.OutputFilePath, loggingOptions, dataToInsert, tool: null, run: null, analysisTargets: targets, invocationTokensToRedact: GenerateSensitiveTokensList(), invocationPropertiesToLog: analyzeOptions.InvocationPropertiesToLog); } sarifLogger.AnalysisStarted(); aggregatingLogger.Loggers.Add(sarifLogger); }, (ex) => { Errors.LogExceptionCreatingLogFile(context, filePath, ex); ThrowExitApplicationException(context, ExitReason.ExceptionCreatingLogFile, ex); } ); } }
private void Branches_Validating(object sender, System.ComponentModel.CancelEventArgs e) { e.Cancel = Branches.SelectedIndex == -1 || !Branches.Items.Contains(Branches.Text); Errors.SetError(Branches, e.Cancel ? _invalidBranchName.ToString() : ""); }
private void threadCSVValues(object type) { Errors errRes = Errors.NoError; Thread.CurrentThread.CurrentCulture = Thread.CurrentThread.CurrentUICulture = ProgramBase.ss_MainCultureInfo; //new System.Globalization.CultureInfo(@"en-US") //Определить тип загружаемых значений CONN_SETT_TYPE typeValues = (CONN_SETT_TYPE)type; int indxEv = -1 , prevIndxTECComponents = indxTECComponents; string strPBRNumber = string.Empty; // ...только для ПБР if (typeValues == CONN_SETT_TYPE.PBR) {//Только для ПБР //Противоположные операции при завершении потока 'threadPPBRCSVValues' //Разрешить запись ПБР-значений if (m_markSavedValues.IsMarked((int)INDEX_MARK_PPBRVALUES.PBR_ENABLED) == true) { m_markSavedValues.Marked((int)INDEX_MARK_PPBRVALUES.PBR_SAVED); } else { ; } //Запретить запись Админ-значений if (m_markSavedValues.IsMarked((int)INDEX_MARK_PPBRVALUES.ADMIN_ENABLED) == true) { m_markSavedValues.UnMarked((int)INDEX_MARK_PPBRVALUES.ADMIN_SAVED); } else { ; } strPBRNumber = getNamePBRNumber((int)GetPropertiesOfNameFilePPBRCSVValues()[1] - 1); } else { ; } //Снять все признаки причин прекращения выполнения обработки событий for (HHandler.INDEX_WAITHANDLE_REASON i = HHandler.INDEX_WAITHANDLE_REASON.ERROR; i < (HHandler.INDEX_WAITHANDLE_REASON.ERROR + 1); i++) { ((ManualResetEvent)m_waitHandleState[(int)i]).Reset(); } foreach (TECComponent comp in allTECComponents) { if (comp.IsGTP == true) //Является ГТП { indxEv = WaitHandle.WaitAny(m_waitHandleState); if (indxEv == 0) { switch (typeValues) { case CONN_SETT_TYPE.ADMIN: errRes = saveCSVValues(allTECComponents.IndexOf(comp), typeValues); break; case CONN_SETT_TYPE.PBR: errRes = saveCSVValues(allTECComponents.IndexOf(comp), strPBRNumber); break; default: break; } //if (! (errRes == Errors.NoError)) // ; //Ошибка ??? //else // ; } else { //Ошибка ??? //break; //completeHandleStates(); ; } } else { ; } } //Очистить таблицу, полученную из CSV-файла m_tableValuesResponse.Clear(); m_tableValuesResponse = null; if (typeValues == CONN_SETT_TYPE.PBR) {//Только для ПБР //Противоположные операции в 'ImpPPBRCSVValuesRequest' //Запретить запись ПБР-значений // , запрет устанавливается автоматически //Разрешить запись Админ-значений if (m_markSavedValues.IsMarked((int)INDEX_MARK_PPBRVALUES.ADMIN_ENABLED) == true) { m_markSavedValues.Marked((int)INDEX_MARK_PPBRVALUES.ADMIN_SAVED); } else { ; } } else { ; } //Обновить значения на вкладке GetRDGValues(/*m_typeFields,*/ prevIndxTECComponents); }
public void PreventExtensions() { Errors.ThrowIfIs(api_.JsPreventExtension(handle_)); }
public static Result <FuelingEditDto> ValidateFuelingDto(FuelingEditDto fueling) => fueling .ToResult(() => Errors.Error("Fueling cannot be null")) .Bind(f => f.FuelAmount <= 0 ? Error <FuelingEditDto>(Errors.Error("Fuel amount must be > 0")) : Ok(f)) .Bind(f => f.FuelPrice <= 0 ? Error <FuelingEditDto>(Errors.Error("Fuel price must be > 0")) : Ok(f));
public override RepositoryResponse <bool> SaveSubModels(MixTheme parent, MixCmsContext _context = null, IDbContextTransaction _transaction = null) { RepositoryResponse <bool> result = new RepositoryResponse <bool>() { IsSucceed = true }; // import templates + assets if (TemplateAsset.Content != null || TemplateAsset.FileStream != null) { result = ImportTheme(_context, _transaction); } // Create default template if create new without import template assets if (result.IsSucceed && Id == 0 && TemplateAsset.Content == null) { string defaultFolder = CommonHelper.GetFullPath(new string[] { MixConstants.Folder.TemplatesFolder, Name == "Default" ? "Default" : MixService.GetConfig <string>(MixConstants.ConfigurationKeyword.DefaultTemplateFolder) }); bool copyResult = FileRepository.Instance.CopyDirectory(defaultFolder, TemplateFolder); var files = copyResult ? FileRepository.Instance.GetFilesWithContent(TemplateFolder) : new List <FileViewModel>(); //TODO: Create default asset foreach (var file in files) { MixTemplates.InitViewModel template = new MixTemplates.InitViewModel( new MixTemplate() { FileFolder = file.FileFolder, FileName = file.Filename, Content = file.Content, Extension = file.Extension, CreatedDateTime = DateTime.UtcNow, LastModified = DateTime.UtcNow, ThemeId = Model.Id, ThemeName = Model.Name, FolderType = file.FolderName, ModifiedBy = CreatedBy }, _context, _transaction); var saveResult = template.SaveModel(true, _context, _transaction); result.IsSucceed = result.IsSucceed && saveResult.IsSucceed; if (!saveResult.IsSucceed) { result.Exception = saveResult.Exception; result.Errors.AddRange(saveResult.Errors); break; } } } // Actived Theme if (result.IsSucceed && IsActived) { SystemConfigurationViewModel config = (SystemConfigurationViewModel.Repository.GetSingleModel( c => c.Keyword == MixConstants.ConfigurationKeyword.ThemeName && c.Specificulture == Specificulture , _context, _transaction)).Data; if (config == null) { config = new SystemConfigurationViewModel(new MixConfiguration() { Keyword = MixConstants.ConfigurationKeyword.ThemeName, Specificulture = Specificulture, Category = "Site", DataType = (int)DataType.Text, Description = "Cms Theme", Value = Name }, _context, _transaction) ; } else { config.Value = Name; } var saveConfigResult = config.SaveModel(false, _context, _transaction); if (!saveConfigResult.IsSucceed) { Errors.AddRange(saveConfigResult.Errors); } else { //MixCmsService.Instance.RefreshConfigurations(_context, _transaction); } result.IsSucceed = result.IsSucceed && saveConfigResult.IsSucceed; SystemConfigurationViewModel configId = (SystemConfigurationViewModel.Repository.GetSingleModel( c => c.Keyword == MixConstants.ConfigurationKeyword.ThemeId && c.Specificulture == Specificulture, _context, _transaction)).Data; if (configId == null) { configId = new SystemConfigurationViewModel(new MixConfiguration() { Keyword = MixConstants.ConfigurationKeyword.ThemeId, Specificulture = Specificulture, Category = "Site", DataType = (int)DataType.Text, Description = "Cms Theme Id", Value = Model.Id.ToString() }, _context, _transaction) ; } else { configId.Value = Model.Id.ToString(); } var saveResult = configId.SaveModel(false, _context, _transaction); if (!saveResult.IsSucceed) { Errors.AddRange(saveResult.Errors); } else { //MixCmsService.Instance.RefreshConfigurations(_context, _transaction); } result.IsSucceed = result.IsSucceed && saveResult.IsSucceed; } if (result.IsSucceed && TemplateAsset.Content != null || TemplateAsset.FileStream != null) { var files = FileRepository.Instance.GetWebFiles(AssetFolder); StringBuilder strStyles = new StringBuilder(); foreach (var css in files.Where(f => f.Extension == ".css")) { strStyles.Append($" <link href='{css.FileFolder}/{css.Filename}{css.Extension}' rel='stylesheet'/>"); } StringBuilder strScripts = new StringBuilder(); foreach (var js in files.Where(f => f.Extension == ".js")) { strScripts.Append($" <script src='{js.FileFolder}/{js.Filename}{js.Extension}'></script>"); } var layout = MixTemplates.InitViewModel.Repository.GetSingleModel( t => t.FileName == "_Layout" && t.ThemeId == Model.Id , _context, _transaction); layout.Data.Content = layout.Data.Content.Replace("<!--[STYLES]-->" , string.Format(@"{0}" , strStyles)); layout.Data.Content = layout.Data.Content.Replace("<!--[SCRIPTS]-->" , string.Format(@"{0}" , strScripts)); layout.Data.SaveModel(true, _context, _transaction); } return(result); }
internal async Task ReadResponseAttributes(CancellationToken cancellationToken) { if (_reader == null) { // Should not be possible throw new InvalidOperationException("_reader is null"); } if (MetaData == null) { MetaData = new QueryMetaData(); } _hasReadToResult = false; while (true) { cancellationToken.ThrowIfCancellationRequested(); var path = await _reader !.ReadToNextAttributeAsync(cancellationToken).ConfigureAwait(false); if (path == null) { // Reached the end break; } switch (path) { case "requestID" when _reader.ValueType == typeof(string): MetaData.RequestId = _reader.Value?.ToString(); break; case "status" when _reader.ValueType == typeof(string): if (Enum.TryParse(_reader.Value?.ToString(), true, out QueryStatus status)) { MetaData.Status = status; Success = status == QueryStatus.Success; } break; case "clientContextID" when _reader.ValueType == typeof(string): MetaData.ClientContextId = _reader.Value?.ToString(); break; case "signature": MetaData.Signature = (await _reader.ReadTokenAsync(cancellationToken).ConfigureAwait(false)) .ToDynamic(); break; case "prepared" when _reader.ValueType == typeof(string): PreparedPlanName = _reader.Value?.ToString();; break; case "profile": MetaData.Profile = (await _reader.ReadTokenAsync(cancellationToken).ConfigureAwait(false)) .ToDynamic(); break; case "metrics": MetaData.Metrics = (await _reader.ReadObjectAsync <MetricsData>(cancellationToken).ConfigureAwait(false)) .ToMetrics(); break; case "results": // We've reached the result rows, return now _hasReadToResult = true; return; case "warnings": await foreach (var warning in _reader.ReadObjectsAsync <QueryWarning>(cancellationToken) .ConfigureAwait(false)) { MetaData.Warnings.Add(warning); } break; case "errors": await foreach (var error in _reader.ReadObjectsAsync <Error>(cancellationToken) .ConfigureAwait(false)) { Errors.Add(error); } break; } } // We've reached the end of the object, mark that entire read is complete _hasFinishedReading = true; }
/// <summary> /// Adds initialization error to the Errors list /// </summary> /// <param name="message">The error message to be added</param> /// <param name="inner">The inner exception being wrapped</param> private void AddInitializationError(string message, Exception inner = null) { Errors.Add(new AlgorithmSetupException("During the algorithm initialization, the following exception has occurred: " + message, inner)); }
public virtual DataStoreResult CopyFrom(IDictionary <TKey, TVal> dbSource, bool replace) { try { lock (SyncRoot) { Errors.Free(true); } if (Status != DataStoreStatus.Idle) { return(DataStoreResult.Busy); } if (this == dbSource) { return(DataStoreResult.OK); } Status = DataStoreStatus.Copying; lock (SyncRoot) { foreach (var kvp in dbSource) { this[kvp.Key] = kvp.Value; } } try { lock (SyncRoot) { OnCopiedFrom(dbSource); } } catch (Exception e1) { lock (SyncRoot) { Errors.Add(e1); } Status = DataStoreStatus.Idle; return(DataStoreResult.Error); } Status = DataStoreStatus.Idle; return(DataStoreResult.OK); } catch (Exception e2) { lock (SyncRoot) { Errors.Add(e2); } Status = DataStoreStatus.Idle; return(DataStoreResult.Error); } }
private string ProcessError(Exception e, string requestURL) { string responseText = null; if (e != null) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Error message:"); sb.AppendLine(e.Message); if (!string.IsNullOrEmpty(requestURL)) { sb.AppendLine(); sb.AppendLine("Request URL:"); sb.AppendLine(requestURL); } if (e is WebException webException) { try { using (HttpWebResponse webResponse = (HttpWebResponse)webException.Response) { ResponseInfo responseInfo = ProcessWebResponse(webResponse); if (responseInfo != null) { responseText = responseInfo.ResponseText; sb.AppendLine(); sb.AppendLine("Status code:"); sb.AppendLine($"({(int)responseInfo.StatusCode}) {responseInfo.StatusDescription}"); if (!string.IsNullOrEmpty(requestURL) && !requestURL.Equals(responseInfo.ResponseURL)) { sb.AppendLine(); sb.AppendLine("Response URL:"); sb.AppendLine(responseInfo.ResponseURL); } if (responseInfo.Headers != null) { sb.AppendLine(); sb.AppendLine("Headers:"); sb.AppendLine(responseInfo.Headers.ToString().TrimEnd()); } sb.AppendLine(); sb.AppendLine("Response text:"); sb.AppendLine(responseInfo.ResponseText); } } } catch (Exception nested) { DebugHelper.WriteException(nested, "ProcessError() WebException handler"); } } sb.AppendLine(); sb.AppendLine("Stack trace:"); sb.Append(e.StackTrace); string errorText = sb.ToString(); if (Errors == null) { Errors = new List <string>(); } Errors.Add(errorText); DebugHelper.WriteLine("Error:\r\n" + errorText); } return(responseText); }
static partial void CheckArguments(IEqualityComparer<T> comparer) { Errors.ThrowArgumentNull(comparer, nameof(comparer)); }
static partial void CheckArguments(IEqualityComparer<T> comparer, int maxCapacity) { Errors.ThrowArgumentNull(comparer, nameof(comparer)); Errors.ThrowArgumentOutOfRangeIf(maxCapacity < 0, nameof(maxCapacity)); }
public static string ParseIncludes(ModuleDecl module, BuiltIns builtIns, IList <string> excludeFiles, Errors errs) { SortedSet <Include> includes = new SortedSet <Include>(new IncludeComparer()); DependencyMap dmap = new DependencyMap(); foreach (string fileName in excludeFiles) { includes.Add(new Include(null, null, fileName)); } dmap.AddIncludes(includes); bool newlyIncluded; do { newlyIncluded = false; List <Include> newFilesToInclude = new List <Include>(); dmap.AddIncludes(((LiteralModuleDecl)module).ModuleDef.Includes); foreach (Include include in ((LiteralModuleDecl)module).ModuleDef.Includes) { bool isNew = includes.Add(include); if (isNew) { newlyIncluded = true; newFilesToInclude.Add(include); } } foreach (Include include in newFilesToInclude) { DafnyFile file; try { file = new DafnyFile(include.includedFilename); } catch (IllegalDafnyFile) { return(String.Format("Include of file \"{0}\" failed.", include.includedFilename)); } string ret = ParseFile(file, include, module, builtIns, errs, false); if (ret != null) { return(ret); } } } while (newlyIncluded); if (DafnyOptions.O.PrintIncludesMode != DafnyOptions.IncludesModes.None) { dmap.PrintMap(); } return(null); // Success }
/// <summary> /// Binds the compiled expression. /// </summary> /// <returns></returns> public IBinaryDataListEntry BindCompiledExpression() { // very short circuit if no items ;) if (_internalKeyMap.Keys.Count == 0) { CompiledExpression = null; return(null); } // short circuit the long eval for mix mode data ;) if (_internalMap.Keys.Count <= 1 && FetchEvaluationIterationCount(Expression) == 1 && CompiledExpression.Length == 3) { return(_internalKeyMap.Values.FirstOrDefault()); } var replaceValue = string.Empty; // Right now we assume there are not ;) foreach (var idx in _internalMap.Keys) { var token = BuildSubToken(idx); var otherKey = _internalMap[idx]; IBinaryDataListEntry value; if (_internalKeyMap.TryGetValue(otherKey, out value)) { if (value != null) { if (!value.IsRecordset) { var scalar = value.FetchScalar(); if (scalar != null) { if (_result == null) { try { var toReplace = scalar.TheValue; CompiledExpression = CompiledExpression.Replace(token, toReplace); } catch (NullValueInVariableException) { CompiledExpression = CompiledExpression.Replace(token, null); } } else { var itr = _result.FetchRecordsetIndexes(); string replaceVal; try { replaceVal = scalar.TheValue; } catch (NullValueInVariableException) { replaceVal = null; } while (itr.HasMore()) { var val = itr.FetchNextIndex(); // Fetch the next value from result ;) try { string error; string template = _result.TryFetchRecordsetColumnAtIndex(GlobalConstants.EvaluationRsField, val, out error).TheValue; Errors.AddError(error); template = template.Replace(token, replaceVal); _result.TryPutRecordItemAtIndex(new BinaryDataListItem(template, _ns, GlobalConstants.EvaluationRsField, val), val, out error); Errors.AddError(error); } catch (NullValueInVariableException) { //Do nothing got null } } CompiledExpression = CompiledExpression.Replace(token, replaceVal); } } } else { string error; // build up the complex expression result - this means debug will be out of sync of complex expressions ;) if (_result == null) { IList <Dev2Column> cols = new List <Dev2Column> { new Dev2Column(GlobalConstants.EvaluationRsField, enDev2ColumnArgumentDirection.Both) }; _result = Dev2BinaryDataListFactory.CreateEntry(_ns, string.Empty, cols, BinaryDataList.UID); var max = _internalKeyMap.Values.OrderByDescending(c => c.ItemCollectionSize()).FirstOrDefault(); if (max != null) { var itrToVal = max.ItemCollectionSize(); if (itrToVal == 0) { itrToVal = 1; } for (int i = 0; i < itrToVal; i++) { int idxT = (i + 1); _result.TryPutRecordItemAtIndex(new BinaryDataListItem(CompiledExpression, _ns, GlobalConstants.EvaluationRsField, idxT), idxT, out error); Errors.AddError(error); } } if (IsDebug) { // attach audit object for debug ;) _result.ComplexExpressionAuditor = new ComplexExpressionAuditor(); } } var idxItr = value.FetchRecordsetIndexes(); int expIdx = 1; // we need to treat this as a scalar ;) if (idxItr.Count == 1) { int curVal = idxItr.FetchNextIndex(); int amt = _result.ItemCollectionSize(); // ensure we always iterate once ;) if (amt == 0) { amt = 1; } idxItr = new LoopedIndexIterator(curVal, amt); } // else iterate across the recordset cuz it had a star ;) while (idxItr.HasMore()) { try { var val = idxItr.FetchNextIndex(); // Fetch the next value from result ;) var template = _result.TryFetchRecordsetColumnAtIndex(GlobalConstants.EvaluationRsField, expIdx, out error).TheValue; Errors.AddError(error); var binaryValue = value.TryFetchIndexedRecordsetUpsertPayload(val, out error); Errors.AddError(error); // now bind this result row with the correct data list data ;) if (binaryValue != null) { var preTemplate = template; var toReplace = binaryValue.TheValue; template = template.Replace(token, toReplace); // In cases when [[[{0}]] is the result, we need to inject the template value // In cases when [[rec({0}).a]] we need to replace the template pattern ;) var tmp = CompiledExpression.Replace("[", "").Replace("]", "").Replace(token, string.Empty); // ReSharper disable ConvertIfStatementToConditionalTernaryExpression if (tmp.Length > 0) // ReSharper restore ConvertIfStatementToConditionalTernaryExpression { // we have a [[rec({0}.a]] case ;) replaceValue = toReplace; } else { replaceValue = template; } _result.TryPutRecordItemAtIndex(new BinaryDataListItem(template, _ns, GlobalConstants.EvaluationRsField, expIdx), expIdx, out error); Errors.AddError(error); if (IsDebug) { var displayValue = DataListUtil.AddBracketsToValueIfNotExist(binaryValue.DisplayValue); _result.ComplexExpressionAuditor.AddAuditStep(preTemplate, displayValue, token, idx, template, Expression); _result.ComplexExpressionAuditor.SetMaxIndex(expIdx); } } expIdx++; // inc result index ;) } catch (NullValueInVariableException) { //Do Nothing got null value } } replaceValue = DataListUtil.RemoveLanguageBrackets(replaceValue); CompiledExpression = CompiledExpression.Replace(token, replaceValue); } } else { CompiledExpression = CompiledExpression.Replace(token, string.Empty); } } } return(_result); }
/// <summary> /// Compiles the expression. /// </summary> /// <param name="tokens">The tokens.</param> public void CompileExpression(IEnumerable <IIntellisenseResult> tokens) { // first pass binding ;) // ReSharper disable PossibleMultipleEnumeration if (!string.IsNullOrEmpty(Expression) && tokens != null && tokens.Any()) // ReSharper restore PossibleMultipleEnumeration { int subVar = 0; var compiledExpression = Expression; HashSet <string> usedTokens = new HashSet <string>(); // ReSharper disable PossibleMultipleEnumeration foreach (var token in tokens) // ReSharper restore PossibleMultipleEnumeration { var subToken = token.Option.DisplayValue; // we may have dups avoid them ;) if (!usedTokens.Contains(subToken)) { if (compiledExpression.IndexOf(subToken, StringComparison.Ordinal) >= 0) { usedTokens.Add(subToken); compiledExpression = compiledExpression.Replace(subToken, BuildSubToken(subVar)); _internalMap[subVar] = token; subVar++; } else { Errors.AddError("Could not locate token { " + subToken + " }"); } } } if (subVar == 0) { CompiledExpression = null; Errors.ClearErrors(); // clear for relevant errors ;) } else { // save the compiled expression ;) CompiledExpression = compiledExpression; } } else { Errors.AddError("Invalid Data : Either empty expression or empty token list. Please check that your variable list does not contain errors."); CompiledExpression = null; } // multi-phase binding ;) if (tokens != null && _result != null && CompiledExpression != null) { int subVar = 0; var idxItr = _result.FetchRecordsetIndexes(); // foreach result to far ;) while (idxItr.HasMore()) { try { var val = idxItr.FetchNextIndex(); // Fetch the next value from result ;) string error; var compiledExpression = _result.TryFetchRecordsetColumnAtIndex(GlobalConstants.EvaluationRsField, val, out error).TheValue; Errors.AddError(error); HashSet <string> usedTokens = new HashSet <string>(); // now process each token ;) // ReSharper disable PossibleMultipleEnumeration foreach (var token in tokens) // ReSharper restore PossibleMultipleEnumeration { var subToken = token.Option.DisplayValue; // we may have dups avoid them ;) if (!usedTokens.Contains(subToken)) { if (compiledExpression.IndexOf(subToken, StringComparison.Ordinal) >= 0) { usedTokens.Add(subToken); compiledExpression = compiledExpression.Replace(subToken, BuildSubToken(subVar)); _internalMap[subVar] = token; subVar++; } else { Errors.AddError("Could not locate token { " + subToken + " }"); } } } _result.TryPutRecordItemAtIndex(new BinaryDataListItem(compiledExpression, _ns, GlobalConstants.EvaluationRsField, val), val, out error); Errors.AddError(error); } catch (NullValueInVariableException) { //Do nothing null value } } } }
/// <summary> /// Executes a set of objects as Insert or Update commands based on their property settings, within a transaction. /// These objects can be POCOs, Anonymous, NameValueCollections, or Expandos. Objects /// With a PK property (whatever PrimaryKeyField is set to) will be created at UPDATEs /// </summary> public virtual int Save(params object[] things) { foreach (var item in things) { if (!IsValid(item)) { throw new InvalidOperationException("Can't save this item: " + String.Join("; ", Errors.ToArray())); } } var commands = BuildCommands(things); return(Execute(commands)); }
/// <summary> /// Resolves namespaced queries gracefully. /// /// Example /// /// @context['hash'] = {"name" => 'tobi'} /// assert_equal 'tobi', @context['hash.name'] /// assert_equal 'tobi', @context['hash["name"]'] /// </summary> /// <param name="markup"></param> /// <param name="notifyNotFound"></param> /// <returns></returns> private object Variable(string markup, bool notifyNotFound) { List <string> parts = R.Scan(markup, VariableParserRegex); // first item in list, if any string firstPart = parts.TryGetAtIndex(0); Match firstPartSquareBracketedMatch = SquareBracketedRegex.Match(firstPart); if (firstPartSquareBracketedMatch.Success) { firstPart = Resolve(firstPartSquareBracketedMatch.Groups[1].Value).ToString(); } object @object; if ((@object = FindVariable(firstPart)) == null) { if (notifyNotFound) { Errors.Add(new VariableNotFoundException(string.Format(Liquid.ResourceManager.GetString("VariableNotFoundException"), markup))); } return(null); } // try to resolve the rest of the parts (starting from the second item in the list) for (int i = 1; i < parts.Count; ++i) { var forEachPart = parts[i]; Match partSquareBracketedMatch = SquareBracketedRegex.Match(forEachPart); bool partResolved = partSquareBracketedMatch.Success; object part = forEachPart; if (partResolved) { part = Resolve(partSquareBracketedMatch.Groups[1].Value); } // If object is a KeyValuePair, we treat it a bit differently - we might be rendering // an included template. if (@object is KeyValuePair <string, object> && ((KeyValuePair <string, object>)@object).Key == (string)part) { object res = ((KeyValuePair <string, object>)@object).Value; @object = Liquidize(res); } // If object is a hash- or array-like object we look for the // presence of the key and if its available we return it else if (IsHashOrArrayLikeObject(@object, part)) { // If its a proc we will replace the entry with the proc object res = LookupAndEvaluate(@object, part); @object = Liquidize(res); } // Some special cases. If the part wasn't in square brackets and // no key with the same name was found we interpret following calls // as commands and call them on the current object else if (!partResolved && (@object is IEnumerable) && ((part as string) == "size" || (part as string) == "first" || (part as string) == "last")) { var castCollection = ((IEnumerable)@object).Cast <object>(); if ((part as string) == "size") { @object = castCollection.Count(); } else if ((part as string) == "first") { @object = castCollection.FirstOrDefault(); } else if ((part as string) == "last") { @object = castCollection.LastOrDefault(); } } // No key was present with the desired value and it wasn't one of the directly supported // keywords either. The only thing we got left is to return nil else { if (notifyNotFound) { Errors.Add(new VariableNotFoundException(string.Format(Liquid.ResourceManager.GetString("VariableNotFoundException"), markup))); } return(null); } // If we are dealing with a drop here we have to if (@object is IContextAware contextAwareObject) { contextAwareObject.Context = this; } } return(@object); }
/// <summary> /// Adds initializaion error to the Errors list /// </summary> /// <param name="message">The error message to be added</param> private void AddInitializationError(string message) { Errors.Add("Failed to initialize algorithm: " + message); }
/// <summary> /// Initializes a new instance of the <see cref="DataLakeServiceClient"/> /// class. /// </summary> /// <param name="serviceUri"> /// A <see cref="Uri"/> referencing the Data Lake service. /// </param> /// <param name="credential"> /// The token credential used to sign requests. /// </param> /// <param name="options"> /// Optional client options that define the transport pipeline /// policies for authentication, retries, etc., that are applied to /// every request. /// </param> public DataLakeServiceClient(Uri serviceUri, TokenCredential credential, DataLakeClientOptions options) : this(serviceUri, credential.AsPolicy(), options, null) { Errors.VerifyHttpsTokenAuth(serviceUri); }
void LogError(string error) { Errors.Add(error); }
public IEnumerable GetErrors(string propertyName) => Errors.ContainsKey(propertyName) ? Errors[propertyName] : Enumerable.Empty <string>();
public Parser(Scanner scanner) { this.scanner = scanner; errors = new Errors(); }
private Errors saveCSVValues(int indx, object pbr_number) { Errors errRes = Errors.NoSet; RDGStruct[] curRDGValues = new RDGStruct[m_curRDGValues.Length]; int hour = -1; double val = -1F; string name_future = string.Empty; CONN_SETT_TYPE typeValues = CONN_SETT_TYPE.COUNT_CONN_SETT_TYPE; if (pbr_number is string) { typeValues = CONN_SETT_TYPE.PBR; } else if (pbr_number is CONN_SETT_TYPE) { typeValues = (CONN_SETT_TYPE)pbr_number; //ADMIN } else { ; } if ((typeValues == CONN_SETT_TYPE.PBR) || (typeValues == CONN_SETT_TYPE.ADMIN)) { List <DataRow> rowsTECComponent = null; //Получить значения для сохранения name_future = allTECComponents[indx].name_future; rowsTECComponent = new List <DataRow>(m_tableValuesResponse.Select(@"GTP_ID='" + name_future + @"'")); //Вариант №2 - тестовый //foreach (DataRow r in m_tableValuesResponse.Rows) // if (name_future.Equals(r["GTP_ID"]) == true) // rowsTECComponent.Add(r); // else // ; //Проверить наличие записей для ГТП if (rowsTECComponent.Count > 0) { // добавление недостающих строк путем копирования крайней if (rowsTECComponent.Count < 24) { // фрагмент кода выполняется при загрузке //while (rowsTECComponent.Count < 24) { // rowsTECComponent.Add(rowsTECComponent[rowsTECComponent.Count - 1]); // if (m_tableValuesResponse.Columns.Contains(@"SESSION_INTERVAL") == true) // rowsTECComponent[rowsTECComponent.Count - 1][@"SESSION_INTERVAL"] = rowsTECComponent.Count - 1; // else // ; //} } else { Logging.Logg().Error(string.Format(@"AdminTS_KomDisp::saveCSVValues () - для ГТП(ИД={0}) количество записей={1} ..." , name_future, rowsTECComponent.Count) , Logging.INDEX_MESSAGE.NOT_SET); } foreach (DataRow r in rowsTECComponent) { hour = int.Parse(r[@"SESSION_INTERVAL"].ToString()); try { switch (typeValues) { case CONN_SETT_TYPE.PBR: HMath.doubleParse(r[@"TotalBR"].ToString(), out curRDGValues[hour].pbr); HMath.doubleParse(r[@"PminBR"].ToString(), out curRDGValues[hour].pmin); HMath.doubleParse(r[@"PmaxBR"].ToString(), out curRDGValues[hour].pmax); curRDGValues[hour].pbr_number = pbr_number as string; ////Отладка //Console.WriteLine(@"GTP_ID=" + allTECComponents[indx].name_future + @"(" + hour + @") TotalBR=" + curRDGValues[hour].pbr + @"; PBRNumber=" + curRDGValues[hour].pbr_number); break; case CONN_SETT_TYPE.ADMIN: HMath.doubleParse(r[@"REC"].ToString(), out curRDGValues[hour].recomendation); curRDGValues[hour].deviationPercent = Int16.Parse(r[@"IS_PER"].ToString()) == 1; HMath.doubleParse(r[@"DIVIAT"].ToString(), out curRDGValues[hour].deviation); curRDGValues[hour].fc = Int16.Parse(r[@"FC"].ToString()) == 1; break; default: break; } } catch (Exception e) { Logging.Logg().Exception(e , @"AdminTS_KomDisp::saveCSVValues () - GTP_ID=" + allTECComponents[indx].name_future + @"(" + hour + @")" , Logging.INDEX_MESSAGE.NOT_SET); errRes = Errors.ParseError; } if (errRes == Errors.ParseError) { break; } else { ; } } if (errRes == Errors.NoSet) { //Очистить тек./массив с данными ClearValues(); //Копировать полученные значения в "текущий массив" curRDGValues.CopyTo(m_curRDGValues, 0); indxTECComponents = indx; errRes = SaveChanges() //Errors.NoSet //Errors.NoError ; } else { ; //errRes = Errors.ParseError; } } else { errRes = Errors.ParseError; } if (errRes == Errors.ParseError) { //Пропустить запись ГТП, разрешить переход к следующей //Псевдо-закончена обработка всех событий completeHandleStates(INDEX_WAITHANDLE_REASON.SUCCESS); } else { ; } } else { ; } return(errRes); }
public override async Task <RepositoryResponse <bool> > SaveSubModelsAsync(MixPortalPage parent, MixCmsContext _context, IDbContextTransaction _transaction) { var result = new RepositoryResponse <bool> { IsSucceed = true }; if (result.IsSucceed) { foreach (var item in ParentNavs) { item.PageId = parent.Id; var startId = Lib.ViewModels.MixPortalPagePortalPages.UpdateViewModel.Repository.Max(m => m.Id, _context, _transaction).Data + 1; if (item.IsActived) { if (item.Id == 0) { item.Id = startId; startId += 1; } var saveResult = await item.SaveModelAsync(false, _context, _transaction); result.IsSucceed = saveResult.IsSucceed; if (!result.IsSucceed) { result.Exception = saveResult.Exception; Errors.AddRange(saveResult.Errors); } } else { var saveResult = await item.RemoveModelAsync(true, _context, _transaction); result.IsSucceed = saveResult.IsSucceed; if (!result.IsSucceed) { result.Exception = saveResult.Exception; Errors.AddRange(saveResult.Errors); } } } } if (result.IsSucceed) { foreach (var item in ChildNavs) { item.ParentId = parent.Id; var startId = Lib.ViewModels.MixPortalPagePortalPages.UpdateViewModel.Repository.Max(m => m.Id, _context, _transaction).Data + 1; if (item.IsActived) { if (item.Id == 0) { item.Id = startId; startId += 1; } var saveResult = await item.SaveModelAsync(false, _context, _transaction); result.IsSucceed = saveResult.IsSucceed; if (!result.IsSucceed) { result.Exception = saveResult.Exception; Errors.AddRange(saveResult.Errors); } } else { var saveResult = await item.RemoveModelAsync(true, _context, _transaction); result.IsSucceed = saveResult.IsSucceed; if (!result.IsSucceed) { result.Exception = saveResult.Exception; Errors.AddRange(saveResult.Errors); } } } } return(result); }
public bool IsValid(dynamic item) { Errors.Clear(); Validate(item); return(Errors.Count == 0); }
/// <summary> /// Setup the algorithm cash, dates and portfolio as desired. /// </summary> /// <param name="algorithm">Existing algorithm instance</param> /// <param name="brokerage">New brokerage instance</param> /// <param name="baseJob">Backtesting job</param> /// <param name="resultHandler">The configured result handler</param> /// <param name="transactionHandler">The configuration transaction handler</param> /// <param name="realTimeHandler">The configured real time handler</param> /// <returns>Boolean true on successfully setting up the console.</returns> public bool Setup(IAlgorithm algorithm, IBrokerage brokerage, AlgorithmNodePacket baseJob, IResultHandler resultHandler, ITransactionHandler transactionHandler, IRealTimeHandler realTimeHandler) { var initializeComplete = false; try { //Set common variables for console programs: if (baseJob.Type == PacketType.BacktestNode) { var backtestJob = baseJob as BacktestNodePacket; algorithm.SetMaximumOrders(int.MaxValue); // set our parameters algorithm.SetParameters(baseJob.Parameters); algorithm.SetLiveMode(false); algorithm.SetAvailableDataTypes(GetConfiguredDataFeeds()); //Set the source impl for the event scheduling algorithm.Schedule.SetEventSchedule(realTimeHandler); // set the option chain provider algorithm.SetOptionChainProvider(new CachingOptionChainProvider(new BacktestingOptionChainProvider())); // set the future chain provider algorithm.SetFutureChainProvider(new CachingFutureChainProvider(new BacktestingFutureChainProvider())); //Setup Base Algorithm: algorithm.Initialize(); //Finalize Initialization algorithm.PostInitialize(); //Set the time frontier of the algorithm algorithm.SetDateTime(algorithm.StartDate.ConvertToUtc(algorithm.TimeZone)); //Construct the backtest job packet: backtestJob.PeriodStart = algorithm.StartDate; backtestJob.PeriodFinish = algorithm.EndDate; backtestJob.BacktestId = algorithm.GetType().Name; backtestJob.Type = PacketType.BacktestNode; backtestJob.UserId = baseJob.UserId; backtestJob.Channel = baseJob.Channel; //Backtest Specific Parameters: StartingDate = backtestJob.PeriodStart; StartingPortfolioValue = algorithm.Portfolio.Cash; } else { throw new Exception("The ConsoleSetupHandler is for backtests only. Use the BrokerageSetupHandler."); } } catch (Exception err) { Log.Error(err); Errors.Add(new AlgorithmSetupException("During the algorithm initialization, the following exception has occurred: ", err)); } if (Errors.Count == 0) { initializeComplete = true; } return initializeComplete; }
protected override Action <IInvocation> CreateHandler <T>(IInvocation initialInvocation, MethodDef methodDef) => throw Errors.InternalError("This method shouldn't be called.");
protected override MethodDef?CreateMethodDef(MethodInfo methodInfo, IInvocation initialInvocation) => throw Errors.InternalError("This method shouldn't be called.");
private void Branches_TextChanged(object sender, EventArgs e) { Errors.SetError(Branches, ""); }
private static string ParseFile(DafnyFile dafnyFile, Include include, ModuleDecl module, BuiltIns builtIns, Errors errs, bool verifyThisFile = true, bool compileThisFile = true) { var fn = DafnyOptions.Clo.UseBaseNameForFileName ? Path.GetFileName(dafnyFile.FilePath) : dafnyFile.FilePath; try { int errorCount = Dafny.Parser.Parse(dafnyFile.SourceFileName, include, module, builtIns, errs, verifyThisFile, compileThisFile); if (errorCount != 0) { return(string.Format("{0} parse errors detected in {1}", errorCount, fn)); } } catch (IOException e) { Bpl.IToken tok = include == null ? Bpl.Token.NoToken : include.tok; errs.SemErr(tok, "Unable to open included file"); return(string.Format("Error opening file \"{0}\": {1}", fn, e.Message)); } return(null); // Success }