internal void GetBestTagSequence(IList <FeatureVector> words, out double[] prob_c_w, out int[] c_w) { c_w = new int[words.Count]; prob_c_w = new double[words.Count]; double[][] probs_v_c = new double[words.Count][]; for (int v_i = 0; v_i < words.Count; v_i++) { double[] details; int sysClass = classifier.Classify(words[v_i], out details); probs_v_c[v_i] = details; string prevT_name = v_i - 1 < 0 ? "BOS" : classToClassId[c_w[v_i - 1]]; string prevT_featureName = string.Format("prevT={0}", prevT_name); string prevTwoTags_name = v_i - 2 < 0 ? "BOS" : classToClassId[c_w[v_i - 2]]; string prevTwoTags_featureName = string.Format("prevTwoTags={0}+{1}", prevTwoTags_name, prevT_name); var prevT_f = featureToFeatureId[prevT_featureName]; var prevTwoTags_f = featureToFeatureId[prevTwoTags_featureName]; for (int c_i = 0; c_i < classToClassId.Count; c_i++) { double logProb = Math.Log(probs_v_c[v_i][c_i], Math.E); logProb += classifier.CalculateLogProb_c_f(c_i, prevT_f); logProb += classifier.CalculateLogProb_c_f(c_i, prevTwoTags_f); probs_v_c[v_i][c_i] = Math.Pow(Math.E, logProb); } NormalizationHelper.Normalize(probs_v_c[v_i]); int bestClass = StatisticsHelper.ArgMax(probs_v_c[v_i]); c_w[v_i] = bestClass; prob_c_w[v_i] = probs_v_c[v_i][bestClass]; } }
public ActionResult Saaty([FromBody] Alternative[] array) { array.ToList().ForEach(x => x.Mark = Math.Round(x.Mark)); var saaty = NormalizationHelper.DefaultSaaty(array); return(Ok(saaty)); }
public void traning(string traningexamplespath) { IVersatileDataSource source = new CSVDataSource(traningexamplespath, false, CSVFormat.DecimalPoint); var data = new VersatileMLDataSet(source); data.DefineSourceColumn("num1", 0, ColumnType.Continuous); data.DefineSourceColumn("num2", 1, ColumnType.Continuous); data.DefineSourceColumn("num1", 2, ColumnType.Continuous); data.DefineSourceColumn("num1", 3, ColumnType.Continuous); data.DefineSourceColumn("num1", 4, ColumnType.Continuous); data.DefineSourceColumn("num1", 5, ColumnType.Continuous); data.DefineSourceColumn("num1", 6, ColumnType.Continuous); data.DefineSourceColumn("num1", 7, ColumnType.Continuous); data.DefineSourceColumn("num1", 8, ColumnType.Continuous); data.DefineSourceColumn("num1", 9, ColumnType.Continuous); data.DefineSourceColumn("num1", 10, ColumnType.Continuous); data.DefineSourceColumn("num1", 11, ColumnType.Continuous); ColumnDefinition outputColumn = data.DefineSourceColumn("kind", 12, ColumnType.Nominal); data.Analyze(); data.DefineSingleOutputOthersInput(outputColumn); var model = new EncogModel(data); model.SelectMethod(data, MLMethodFactory.TypeFeedforward); // Send any output to the console. model.Report = new ConsoleStatusReportable(); // Now normalize the data. Encog will automatically determine the correct normalization // type based on the model you chose in the last step. data.Normalize(); model.HoldBackValidation(0.3, true, 1001); // Choose whatever is the default training type for this model. model.SelectTrainingType(data); // Use a 5-fold cross-validated train. Return the best method found. bestMethod = (IMLRegression)model.Crossvalidate(5, true); //Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset)); //Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset)); // Display our normalization parameters. helper = data.NormHelper; // Console.WriteLine(helper.ToString()); // Display the final model. //Console.WriteLine(@"Final model: " + bestMethod); source.Close(); saveNetwork("save.eg"); savehelper("helper.hp"); //EncogFramework.Instance.Shutdown(); }
public ActionResult KolSaaty([FromBody] Alternative[] array) { for (int i = 0; i < array.Length; i++) { array[i].Mark = NormalizationHelper.MarkToSaatyRange(array[i].Mark); } var saaty = NormalizationHelper.KolSaaty(array); return(Ok(saaty)); }
/// <summary> /// Handles an inferred equality theorem by communicating it with the normalization helper and scheduler, and also handling proof /// construction is the proof data is provided. /// </summary> /// <param name="equality">The equality theorem to be handled.</param> /// <param name="helper">The normalization helper that verifies and normalizes the theorem.</param> /// <param name="scheduler">The scheduler of inference rules used for the new objects and theorems this equality might have brought.</param> /// <param name="proofData">Either the data needed to mark the theorem's inference in case it's correct; or null, if we are not constructing proofs.</param> /// <param name="isValid">Indicates whether the theorem has been found geometrically valid.</param> /// <param name="anyChangeOfNormalVersion">Indicates whether this equality caused any change of the normal version of some object.</param> private void HandleEquality(Theorem equality, NormalizationHelper helper, Scheduler scheduler, ProofData proofData, out bool isValid, out bool anyChangeOfNormalVersion) { // Mark the equality to the helper helper.MarkProvedEquality(equality, out var isNew, out isValid, out var result); // If it turns out not new or valid, we're done if (!isNew || !isValid) { // No removed objects anyChangeOfNormalVersion = false; // We're done return; } // If we should construct proof, mark the inference to the proof builder proofData?.ProofBuilder.AddImplication(proofData.InferenceData, equality, proofData.Assumptions); #region Handling new normalized theorems // Go through all of them foreach (var(originalTheorem, equalities, normalizedTheorem) in result.NormalizedNewTheorems) { // If we should construct proof, mark the normalized theorem to the proof builder proofData?.ProofBuilder.AddImplication(ReformulatedTheorem, normalizedTheorem, assumptions: equalities.Concat(originalTheorem).ToArray()); // Let the scheduler know about the new normalized theorem scheduler.ScheduleAfterProving(normalizedTheorem); } #endregion // Invalidate theorems result.DismissedTheorems.ForEach(scheduler.InvalidateTheorem); // Invalidate removed objects result.RemovedObjects.ForEach(scheduler.InvalidateObject); // Handle changed objects result.ChangedObjects.ForEach(changedObject => { // First we will invalidate them scheduler.InvalidateObject(changedObject); // And now schedule for them again as if they were knew because now the results of schedules might be different scheduler.ScheduleAfterDiscoveringObject(changedObject); }); // Handle entirely new objects result.NewObjects.ForEach(newObject => HandleNewObject(newObject, helper, scheduler, proofData?.ProofBuilder)); // Set if there has been any change of normal version, which is indicated by removing // an object or changing its normal version anyChangeOfNormalVersion = result.RemovedObjects.Any() || result.ChangedObjects.Any(); }
public int loadHelper(string name) { try { helper = ReadFromBinaryFile <NormalizationHelper>(name); return(1); } catch { return(0); } }
public IntegralCriterionMethodResult FindDecision(Model model) { var result = new IntegralCriterionMethodResult("Мультипликативный критерий", "Значение мультипликативного критерия"); // Нормализуем критерии и определим их тип foreach (Criterion crit in model.Criteria.Values) { Dictionary <TId, double> normalizedCrit = NormalizationHelper.NormalizeCriterionValues(model.Experiments, crit); result.AddNormalizedCriterion(normalizedCrit, crit.Id); } // Вычислим значения мультипликативного критерия для каждого из // экспериментов var multiplicativeCriterion = new Dictionary <TId, double>(); foreach (Experiment exp in model.Experiments.Values) { if (exp.IsActive) { double multiplicativeCriterionValue = 1; foreach (Criterion crit in model.Criteria.Values) { double normalizedCriterionValue = result.GetNormalizedCriterion(crit.Id)[exp.Id]; normalizedCriterionValue = Math.Pow(normalizedCriterionValue, crit.Weight); // Вычислим multiplicativeCriterionValue *= normalizedCriterionValue; } multiplicativeCriterion.Add(exp.Id, multiplicativeCriterionValue); } } // Отсортируем результаты по возрастанию List <SortableDouble> sortedValues = multiplicativeCriterion.Select <KeyValuePair <TId, double>, SortableDouble>( kvp => new SortableDouble() { Direction = SortDirection.Ascending, Id = kvp.Key, Value = kvp.Value } ).ToList(); sortedValues.Sort(); // Заполним результаты foreach (SortableDouble sortedValue in sortedValues) { result.SortedPoints.Add(sortedValue.Id); result.AdditionalData.Add(sortedValue.Id, sortedValue.Value); } return(result); }
/// <summary> /// Handles an inferred theorem by communicating it with the normalization helper and scheduler, and also handling proof /// construction is the proof data is provided. /// </summary> /// <param name="theorem">The theorem to be handled.</param> /// <param name="helper">The normalization helper that verifies and normalizes the theorem.</param> /// <param name="scheduler">The scheduler of inference rules used for the theorem if it is correct.</param> /// <param name="proofData">Either the data needed to mark the theorem's inference in case it's correct; or null, if we are not constructing proofs.</param> /// <param name="isValid">Indicates whether the theorem has been found geometrically valid.</param> private void HandleNonequality(Theorem theorem, NormalizationHelper helper, Scheduler scheduler, ProofData proofData, out bool isValid) { // Mark the theorem to the helper helper.MarkProvedNonequality(theorem, out var isNew, out isValid, out var normalizedTheorem, out var equalities); // If it turns out not new or valid, we're done if (!isNew || !isValid) { return; } #region Handle proof construction // Mark the original theorem proofData?.ProofBuilder.AddImplication(proofData.InferenceData, theorem, proofData.Assumptions); // If any normalization happened if (equalities.Any()) { // Mark the normalized theorem too proofData?.ProofBuilder.AddImplication(ReformulatedTheorem, normalizedTheorem, assumptions: equalities.Concat(theorem).ToArray()); } #endregion // Let the scheduler know scheduler.ScheduleAfterProving(normalizedTheorem); #region Inference from symmetry // If the theorem use an object that is not part of the original configuration, // then we will not do any symmetry inference. If it could prove a new theorem, // then this new theorem would be inferable conventionally if (!normalizedTheorem.GetInnerConfigurationObjects().All(helper.Configuration.AllObjects.Contains)) { return; } // Otherwise try to infer new theorems using this one foreach (var inferredTheorem in normalizedTheorem.InferTheoremsFromSymmetry(helper.Configuration)) { // If it can be done, make sure the proof builder knows it proofData?.ProofBuilder.AddImplication(InferableFromSymmetry, inferredTheorem, assumptions: new[] { normalizedTheorem }); // Call this method to handle this new inferred theorem, without caring if it is valid (it should be logically) HandleNonequality(inferredTheorem, helper, scheduler, proofData, isValid: out var _); } #endregion }
private static Dictionary <TId, Experiment> PrepareCriteria(Model model) { var experiments = new Dictionary <TId, Experiment>(); foreach (Experiment exp in model.Experiments.Values) { if (!exp.IsActive) { continue; } experiments.Add(exp.Id, new Experiment(exp.Id, exp.Number)); } foreach (Criterion crit in model.Criteria.Values) { foreach (Experiment exp in model.Experiments.Values) { if (!exp.IsActive) { continue; } //if (crit.Type == CriterionType.Maximizing) //{ // double oldCriterionValue = exp.CriterionValues[crit.Id]; // if (oldCriterionValue == 0.0) // { // oldCriterionValue = 0.00001; // } // experiments[exp.Id].CriterionValues.Add(crit.Id, 1.0/oldCriterionValue); //} //else //{ experiments[exp.Id].CriterionValues.Add(crit.Id, exp.CriterionValues[crit.Id]); //} } Dictionary <TId, double> normalizedCrit = NormalizationHelper.NormalizeCriterionValues(experiments, crit); //Normalization.NormalizeCriterion(experiments, crit); foreach (Experiment exp in experiments.Values) { exp.CriterionValues[crit.Id] = normalizedCrit[exp.Id]; } } return(experiments); }
/// <summary> /// Handles a new object by commuting it with the scheduler and finding its trivial theorems and passing those to be handled /// by the normalization helper and scheduler. /// </summary> /// <param name="newObject">The new object to be handled.</param> /// <param name="helper">The normalization helper used later for the trivial theorems of the object.</param> /// <param name="scheduler">The scheduler of inference rules used for the new object and later for its trivial theorems.</param> /// <param name="builder">Either the builder to build theorem proofs; or null, if we are not constructing proofs.</param> private void HandleNewObject(ConstructedConfigurationObject newObject, NormalizationHelper helper, Scheduler scheduler, TheoremProofBuilder builder) { // Schedule after finding this object scheduler.ScheduleAfterDiscoveringObject(newObject); // Look for its trivial theorems foreach (var trivialTheorem in _producer.InferTrivialTheoremsFromObject(newObject)) { // Prepare the proof data in case we need to construct proofs var proofData = builder != null ? new ProofData(builder, new TheoremInferenceData(TrivialTheorem), assumptions: Array.Empty <Theorem>()) : null; // Let the other method handle this theorem, while ignoring whether it is geometrically valid (it just should be) HandleNonequality(trivialTheorem, helper, scheduler, proofData, out var _); } }
public void Softmax_Create_Test() { var output = NormalizationHelper.CreateSoftmaxNormalization(givenInput); Assert.IsTrue(!object.ReferenceEquals(output, givenInput)); double sum = 0; for (int i = 0; i < givenInput.Count; i++) { double diff = Math.Abs(expectedOutput[i] - output[i]); Assert.IsTrue(diff < Math.Pow(10, -6)); sum += output[i]; } Assert.IsTrue(StatisticsHelper.IsApproximatelyEqual(sum, 1D)); }
public void Softmax_InPlace_Test() { var values = givenInput.ToArray(); Assert.IsTrue(!object.ReferenceEquals(values, givenInput)); NormalizationHelper.Softmax(values); double sum = 0; for (int i = 0; i < values.Length; i++) { double diff = Math.Abs(expectedOutput[i] - values[i]); Assert.IsTrue(diff < Math.Pow(10, -6)); sum += values[i]; } Assert.IsTrue(StatisticsHelper.IsApproximatelyEqual(sum, 1D)); }
public IntegralCriterionMethodResult FindDecision(Model model) { IntegralCriterionMethodResult result = new IntegralCriterionMethodResult("Метод минимакса", "Максимальное из значений локальных критериев"); // Нормализуем критерии foreach (Criterion criterion in model.Criteria.Values) { Dictionary <TId, double> normalizedCrit = NormalizationHelper.NormalizeCriterionValues(model.Experiments, criterion); result.AddNormalizedCriterion(normalizedCrit, criterion.Id); } // Обнаружим максимальные значения List <SortableDouble> sortedMaxLocalCriterionValues = new List <SortableDouble>(model.Experiments.CountActiveExperiments()); foreach (Experiment experiment in model.Experiments.Values) { if (experiment.IsActive) { var normalizedCriteriaForExp = new List <double>(); foreach (Criterion crit in model.Criteria.Values) { double normalizedCriterionValue = result.GetNormalizedCriterion(crit.Id)[experiment.Id]; normalizedCriterionValue *= crit.Weight; normalizedCriteriaForExp.Add(normalizedCriterionValue); } double maxLocalCriterion = normalizedCriteriaForExp.Max(); sortedMaxLocalCriterionValues.Add(new SortableDouble() { Direction = SortDirection.Ascending, Id = experiment.Id, Value = maxLocalCriterion }); } } sortedMaxLocalCriterionValues.Sort(); // Заполним результаты foreach (SortableDouble sortedMaxLocalCriterionValue in sortedMaxLocalCriterionValues) { result.SortedPoints.Add(sortedMaxLocalCriterionValue.Id); result.AdditionalData.Add(sortedMaxLocalCriterionValue.Id, sortedMaxLocalCriterionValue.Value); } return(result); }
public static Mat FindPoints(Mat mat, double threshold) { mat = NormalizationHelper.Normalization(mat); var result = new Mat(mat.Width, mat.Height); for (var x = 0; x < mat.Width; x++) { for (var y = 0; y < mat.Height; y++) { if (mat.GetAt(x, y) <= threshold || HasBetterNeighbour(mat, x, y)) { continue; } result.Set(x, y, mat.GetAt(x, y)); } } return(result); }
internal void GetBestTagSequence(IList <FeatureVector> vectors, out int[] sysClasses, out double[] distribution) { // Since we are working with logarithmic numbers, we want the weight of the root node to be zero. var beam = new BeamSearch <IdValuePair <double> >(0D); for (int beamDepth = 0; beamDepth < vectors.Count; beamDepth++) { Debug.Assert(beam.Level[beamDepth].Count > 0); foreach (BeamNode <IdValuePair <double> > node in beam.Level[beamDepth]) { double[] probs_v_c = new double[classToClassId.Count]; for (int c_i = 0; c_i < classToClassId.Count; c_i++) { probs_v_c[c_i] = CalculateProbability_v_c(vectors[beamDepth], c_i, node, beamDepth); } NormalizationHelper.NormalizeLogs(probs_v_c, Math.E); // Prune: Idenitify N classes with highest probability: IList <int> topNClasses = SearchHelper.GetMaxNItems(topN, probs_v_c); for (int topN_i = 0; topN_i < topNClasses.Count; topN_i++) { int c_i = topNClasses[topN_i]; double prob = probs_v_c[c_i]; node.AddNextNode(new IdValuePair <double>(c_i, prob), Math.Log(prob, Math.E) + node.Weight); } } beam.Prune(topK, beam_size); } // Repackage the sequence we just received in such a way that the consuming code will find it most digestable. var results = beam.GetBestSequence(); sysClasses = new int[results.Length]; distribution = new double[results.Length]; for (int i = 0; i < results.Length; i++) { sysClasses[i] = results[i].Id; distribution[i] = results[i].Value; } }
/// <summary> /// Initializes a new instance of the <see cref="ObjectIntroductionHelper"/> class. /// </summary> /// <param name="introducer">The introducer of new objects to which deciding based on available objects is delegated.</param> /// <param name="helper">The normalization helper that needs to be know about all objects and therefore needs to be communicated with.</param> public ObjectIntroductionHelper(IObjectIntroducer introducer, NormalizationHelper helper) { _introducer = introducer ?? throw new ArgumentNullException(nameof(introducer)); _helper = helper ?? throw new ArgumentNullException(nameof(helper)); }
/// <summary> /// Proves given theorems that are true in the configuration drawn in a given picture. /// </summary> /// <param name="provenTheorems">The theorems that hold in the configuration without the last object.</param> /// <param name="theoremsToProve">The theorems that say something about the last object.</param> /// <param name="picture">The picture where the configuration in which the theorems hold is drawn.</param> /// <param name="shouldWeConstructProofs">Indicates whether we should construct proofs. This will affect the type of returned result.</param> /// <returns> /// Either the output as for <see cref="ProveTheoremsAndConstructProofs(TheoremMap, TheoremMap, ContextualPicture)"/>, /// if we are constructing proof, or the output as for <see cref="ProveTheorems(TheoremMap, TheoremMap, ContextualPicture)"/> otherwise. /// </returns> private dynamic ProveTheorems(TheoremMap provenTheorems, TheoremMap theoremsToProve, ContextualPicture picture, bool shouldWeConstructProofs) { // Pull the configuration for comfort var configuration = picture.Pictures.Configuration; // Find the trivial theorems based on whether we should do it only // for the last object of the configuration var trivialTheorems = _settings.FindTrivialTheoremsOnlyForLastObject // If yes, do so ? _producer.InferTrivialTheoremsFromObject(configuration.ConstructedObjects.Last()) // Otherwise do it for all objects : configuration.ConstructedObjects.SelectMany(_producer.InferTrivialTheoremsFromObject) // And enumerate the results .ToList(); #region Proof builder initialization // Prepare a proof builder in case we are supposed to construct proofs var proofBuilder = shouldWeConstructProofs ? new TheoremProofBuilder() : null; // Mark trivial theorems to the proof builder in case we are supposed to construct proofs trivialTheorems.ForEach(theorem => proofBuilder?.AddImplication(TrivialTheorem, theorem, assumptions: Array.Empty <Theorem>())); // Mark assumed theorems to the proof builder in case we are supposed to construct proofs provenTheorems.AllObjects.ForEach(theorem => proofBuilder?.AddImplication(AssumedProven, theorem, assumptions: Array.Empty <Theorem>())); #endregion #region Theorems definable simpler // Prepare the list of theorems definable simpler var theoremsDefinableSimpler = new List <Theorem>(); // If we are supposed to assuming them proven... if (_settings.AssumeThatSimplifiableTheoremsAreTrue) { // Go through unproven theorems except for trivial ones foreach (var theoremToProve in theoremsToProve.AllObjects.Except(trivialTheorems)) { // Find the redundant objects var redundantObjects = theoremToProve.GetUnnecessaryObjects(configuration); // If there are none, then it cannot be defined simpler if (redundantObjects.IsEmpty()) { continue; } // Otherwise add it to the list theoremsDefinableSimpler.Add(theoremToProve); // And make sure the proof builder knows it proofBuilder?.AddImplication(new DefinableSimplerInferenceData(redundantObjects), theoremToProve, assumptions: Array.Empty <Theorem>()); } } #endregion #region Theorems inferable from symmetry // Prepare the set of theorems inferred from symmetry var theoremsInferredFromSymmetry = new List <Theorem>(); // Go throw the theorems that are already inferred, i.e. assumed proven, trivial and definable simpler ones foreach (var provedTheorem in provenTheorems.AllObjects.Concat(trivialTheorems).Concat(theoremsDefinableSimpler)) { // Try to infer new theorems using this one foreach (var inferredTheorem in provedTheorem.InferTheoremsFromSymmetry(configuration)) { // Add it to the list theoremsInferredFromSymmetry.Add(inferredTheorem); // Make sure the proof builds knows it proofBuilder?.AddImplication(InferableFromSymmetry, inferredTheorem, assumptions: new[] { provedTheorem }); } } #endregion #region Normalization helper initialization // Initially we are going to assume that the proved theorems are the passed ones var provedTheorems = provenTheorems.AllObjects // And trivial ones .Concat(trivialTheorems) // And ones with redundant objects .Concat(theoremsDefinableSimpler) // And ones inferred from symmetry .Concat(theoremsInferredFromSymmetry) // Distinct ones .Distinct(); // The theorems to prove will be the new ones except for the proved ones var currentTheoremsToBeProven = theoremsToProve.AllObjects.Except(provedTheorems).ToArray(); // Prepare the cloned pictures that will be used to numerically verify new theorems var clonedPictures = picture.Pictures.Clone(); // Prepare a normalization helper with all this information var normalizationHelper = new NormalizationHelper(_verifier, clonedPictures, provedTheorems, currentTheoremsToBeProven); #endregion #region Scheduler initialization // Prepare a scheduler var scheduler = new Scheduler(_manager); // Do the initial scheduling scheduler.PerformInitialScheduling(currentTheoremsToBeProven, configuration); #endregion // Prepare the object introduction helper var objectIntroductionHelper = new ObjectIntroductionHelper(_introducer, normalizationHelper); #region Inference loop // Do until break while (true) { // Ask the scheduler for the next inference data to be used var data = scheduler.NextScheduledData(); #region Object introduction // If there is no data, we will try to introduce objects if (data == null) { // Call the introduction helper var(removedObjects, introducedObject) = objectIntroductionHelper.IntroduceObject( // With the theorems to prove obtained by excluding the proved ones theoremsToProve: theoremsToProve.AllObjects.Except(normalizationHelper.ProvedTheorems)); // Invalidate removed objects removedObjects.ForEach(scheduler.InvalidateObject); // If there is something to be introduced if (introducedObject != null) { // Call the appropriate method to handle introduced objects HandleNewObject(introducedObject, normalizationHelper, scheduler, proofBuilder); // Ask the scheduler for the next inference data to be used data = scheduler.NextScheduledData(); } } #endregion // If all theorems are proven or there is no data even after object introduction, we're done if (!normalizationHelper.AnythingLeftToProve || data == null) { // If we should construct proofs return(shouldWeConstructProofs // Build them for the theorems to be proven ? (dynamic)proofBuilder.BuildProofs(theoremsToProve.AllObjects) // Otherwise just take the theorems to be proven that happen to be proven : theoremsToProve.AllObjects.Where(normalizationHelper.ProvedTheorems.Contains).ToReadOnlyHashSet()); } #region Inference rule applier call // Try to apply the current scheduled data var applierResults = _applier.InferTheorems(new InferenceRuleApplierInput ( // Pass the data provided by the scheduler inferenceRule: data.InferenceRule, premappedAssumption: data.PremappedAssumption, premappedConclusion: data.PremappedConclusion, premappedObject: data.PremappedObject, // Pass the methods that the normalization helper offers mappableTheoremsFactory: normalizationHelper.GetProvedTheoremOfType, mappableObjectsFactory: normalizationHelper.GetObjectsWithConstruction, equalObjectsFactory: normalizationHelper.GetEqualObjects, normalizationFunction: normalizationHelper.GetNormalVersionOfObjectOrNull )) // Enumerate results. This step is needed because the applier could iterate over the // collections of objects and theorems used by the normalization helper .ToArray(); // Before handling results prepare a variable that will indicate whether there has been any change of the // normal version of an object. var anyNormalVersionChange = false; // Handle every inferred theorems foreach (var(theorem, negativeAssumptions, possitiveAssumptions) in applierResults) { // If in some previous iteration there has been a change of the normal version of an object, then // it might happen that some other theorems inferred in this loop no longer contain only correct objects, // therefore we need to verify them. The reason why we don't have to worry about incorrect objects in other // cases is that the normalization helper keeps everything normalized and the inference rule applier provides // only normalized objects. However, if there is a change of normal versions and the applier is already called // and the results are enumerated, then we have to check it manually if (anyNormalVersionChange && normalizationHelper.DoesTheoremContainAnyIncorrectObject(theorem)) { continue; } // We need to check negative assumptions. The inference should be accepted only if all of them are false if (negativeAssumptions.Any(negativeAssumption => _verifier.IsTrueInAllPictures(clonedPictures, negativeAssumption))) { continue; } // Prepare the proof data in case we need to construct proofs var proofData = shouldWeConstructProofs ? new ProofData(proofBuilder, new CustomInferenceData(data.InferenceRule), possitiveAssumptions) : null; // Prepare the variable indicating whether the theorem is geometrically valid bool isValid; // If this is an equality if (theorem.Type == EqualObjects) { // Call the appropriate method to handle it while finding out whether there has been any normal version change HandleEquality(theorem, normalizationHelper, scheduler, proofData, out isValid, out var anyNormalVersionChangeInThisIteration); // If yes, then we set the outer loop variable indicating the same thing for the whole loop if (anyNormalVersionChangeInThisIteration) { anyNormalVersionChange = true; } } // If this is a non-equality else { // Call the appropriate method to handle it HandleNonequality(theorem, normalizationHelper, scheduler, proofData, out isValid); } // If the theorem turns out not to be geometrically valid, trace it if (!isValid) { _tracer.MarkInvalidInferrence(configuration, theorem, data.InferenceRule, negativeAssumptions, possitiveAssumptions); } } #endregion } #endregion }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { ErrorCalculation.Mode = ErrorCalculationMode.RMS; // Download the data that we will attempt to model. string filename = DownloadData(app.Args); // Define the format of the data file. // This area will change, depending on the columns and // format of the file that you are trying to model. var format = new CSVFormat('.', ' '); // decimal point and // space separated IVersatileDataSource source = new CSVDataSource(filename, true, format); var data = new VersatileMLDataSet(source); data.NormHelper.Format = format; ColumnDefinition columnSSN = data.DefineSourceColumn("SSN", ColumnType.Continuous); ColumnDefinition columnDEV = data.DefineSourceColumn("DEV", ColumnType.Continuous); // Analyze the data, determine the min/max/mean/sd of every column. data.Analyze(); // Use SSN & DEV to predict SSN. For time-series it is okay to have // SSN both as // an input and an output. data.DefineInput(columnSSN); data.DefineInput(columnDEV); data.DefineOutput(columnSSN); // Create feedforward neural network as the model type. // MLMethodFactory.TYPE_FEEDFORWARD. // You could also other model types, such as: // MLMethodFactory.SVM: Support Vector Machine (SVM) // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network // MLMethodFactor.TYPE_NEAT: NEAT Neural Network // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network var model = new EncogModel(data); model.SelectMethod(data, MLMethodFactory.TypeFeedforward); // Send any output to the console. model.Report = new ConsoleStatusReportable(); // Now normalize the data. Encog will automatically determine the // correct normalization // type based on the model you chose in the last step. data.Normalize(); // Set time series. data.LeadWindowSize = 1; data.LagWindowSize = WindowSize; // Hold back some data for a final validation. // Do not shuffle the data into a random ordering. (never shuffle // time series) // Use a seed of 1001 so that we always use the same holdback and // will get more consistent results. model.HoldBackValidation(0.3, false, 1001); // Choose whatever is the default training type for this model. model.SelectTrainingType(data); // Use a 5-fold cross-validated train. Return the best method found. // (never shuffle time series) var bestMethod = (IMLRegression)model.Crossvalidate(5, false); // Display the training and validation errors. Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset)); Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset)); // Display our normalization parameters. NormalizationHelper helper = data.NormHelper; Console.WriteLine(helper.ToString()); // Display the final model. Console.WriteLine(@"Final model: " + bestMethod); // Loop over the entire, original, dataset and feed it through the // model. This also shows how you would process new data, that was // not part of your training set. You do not need to retrain, simply // use the NormalizationHelper class. After you train, you can save // the NormalizationHelper to later normalize and denormalize your // data. source.Close(); var csv = new ReadCSV(filename, true, format); var line = new String[2]; // Create a vector to hold each time-slice, as we build them. // These will be grouped together into windows. var slice = new double[2]; var window = new VectorWindow(WindowSize + 1); IMLData input = helper.AllocateInputVector(WindowSize + 1); // Only display the first 100 int stopAfter = 100; while (csv.Next() && stopAfter > 0) { var result = new StringBuilder(); line[0] = csv.Get(2); // ssn line[1] = csv.Get(3); // dev helper.NormalizeInputVector(line, slice, false); // enough data to build a full window? if (window.IsReady()) { window.CopyWindow(((BasicMLData)input).Data, 0); String correct = csv.Get(2); // trying to predict SSN. IMLData output = bestMethod.Compute(input); String predicted = helper .DenormalizeOutputVectorToString(output)[0]; result.Append(line); result.Append(" -> predicted: "); result.Append(predicted); result.Append("(correct: "); result.Append(correct); result.Append(")"); Console.WriteLine(result.ToString()); } // Add the normalized slice to the window. We do this just after // the after checking to see if the window is ready so that the // window is always one behind the current row. This is because // we are trying to predict next row. window.Add(slice); stopAfter--; } csv.Close(); // Delete data file and shut down. File.Delete(filename); EncogFramework.Instance.Shutdown(); }
public PredictionMachine(IMLRegression neuralNet, NormalizationHelper normalizationHelper, PredictionConfig config) { NeuralNet = neuralNet; _normalizationHelper = normalizationHelper; Config = config; }
public ActionResult Saaty99([FromBody] Alternative[] array) { var saaty = NormalizationHelper.Saaty99(array); return(Ok(saaty)); }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { // Download the data that we will attempt to model. string irisFile = DownloadData(app.Args); // Define the format of the data file. // This area will change, depending on the columns and // format of the file that you are trying to model. IVersatileDataSource source = new CSVDataSource(irisFile, false, CSVFormat.DecimalPoint); var data = new VersatileMLDataSet(source); data.DefineSourceColumn("sepal-length", 0, ColumnType.Continuous); data.DefineSourceColumn("sepal-width", 1, ColumnType.Continuous); data.DefineSourceColumn("petal-length", 2, ColumnType.Continuous); data.DefineSourceColumn("petal-width", 3, ColumnType.Continuous); // Define the column that we are trying to predict. ColumnDefinition outputColumn = data.DefineSourceColumn("species", 4, ColumnType.Nominal); // Analyze the data, determine the min/max/mean/sd of every column. data.Analyze(); // Map the prediction column to the output of the model, and all // other columns to the input. data.DefineSingleOutputOthersInput(outputColumn); // Create feedforward neural network as the model type. MLMethodFactory.TYPE_FEEDFORWARD. // You could also other model types, such as: // MLMethodFactory.SVM: Support Vector Machine (SVM) // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network // MLMethodFactor.TYPE_NEAT: NEAT Neural Network // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network var model = new EncogModel(data); model.SelectMethod(data, MLMethodFactory.TypeFeedforward); // Send any output to the console. model.Report = new ConsoleStatusReportable(); // Now normalize the data. Encog will automatically determine the correct normalization // type based on the model you chose in the last step. data.Normalize(); // Hold back some data for a final validation. // Shuffle the data into a random ordering. // Use a seed of 1001 so that we always use the same holdback and will get more consistent results. model.HoldBackValidation(0.3, true, 1001); // Choose whatever is the default training type for this model. model.SelectTrainingType(data); // Use a 5-fold cross-validated train. Return the best method found. var bestMethod = (IMLRegression)model.Crossvalidate(5, true); // Display the training and validation errors. Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset)); Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset)); // Display our normalization parameters. NormalizationHelper helper = data.NormHelper; Console.WriteLine(helper.ToString()); // Display the final model. Console.WriteLine(@"Final model: " + bestMethod); // Loop over the entire, original, dataset and feed it through the model. // This also shows how you would process new data, that was not part of your // training set. You do not need to retrain, simply use the NormalizationHelper // class. After you train, you can save the NormalizationHelper to later // normalize and denormalize your data. source.Close(); var csv = new ReadCSV(irisFile, false, CSVFormat.DecimalPoint); var line = new String[4]; IMLData input = helper.AllocateInputVector(); while (csv.Next()) { var result = new StringBuilder(); line[0] = csv.Get(0); line[1] = csv.Get(1); line[2] = csv.Get(2); line[3] = csv.Get(3); String correct = csv.Get(4); helper.NormalizeInputVector(line, ((BasicMLData)input).Data, false); IMLData output = bestMethod.Compute(input); String irisChosen = helper.DenormalizeOutputVectorToString(output)[0]; result.Append(line); result.Append(" -> predicted: "); result.Append(irisChosen); result.Append("(correct: "); result.Append(correct); result.Append(")"); Console.WriteLine(result.ToString()); } csv.Close(); // Delete data file ande shut down. File.Delete(irisFile); EncogFramework.Instance.Shutdown(); }
/// <inheritdoc/> public void Init(NormalizationHelper normalizationHelper) { }
public ActionResult Geometry([FromBody] Alternative[] array) { var geom = NormalizationHelper.GeometryMethod(array); return(Ok(geom)); }
public IntegralCriterionMethodResult FindDecision(Model model) { var result = new IntegralCriterionMethodResult("Аддитивный критерий", "Значение аддитивного критерия"); // Нормализуем критерии foreach (Criterion crit in model.Criteria.Values) { Dictionary <TId, double> normalizedCrit = NormalizationHelper.NormalizeCriterionValues(model.Experiments, crit); //Normalization.NormalizeCriterion(model.Experiments, crit); result.AddNormalizedCriterion(normalizedCrit, crit.Id); } // Вычислим значения аддитивного критерия для каждого из // экспериментов var additiveCriterion = new Dictionary <TId, double>(); foreach (Experiment exp in model.Experiments.Values) { if (exp.IsActive) { double additiveCriterionValue = 0; foreach (Criterion crit in model.Criteria.Values) { double normalizedCriterionValue = result.GetNormalizedCriterion(crit.Id)[exp.Id]; normalizedCriterionValue *= crit.Weight; //switch (crit.Type) //{ // case CriterionType.Maximizing: // additiveCriterionValue -= normalizedCriterionValue; // break; // case CriterionType.Minimizing: additiveCriterionValue += normalizedCriterionValue; // break; //} } additiveCriterion.Add(exp.Id, additiveCriterionValue); } } // Отсортируем результаты по возрастанию по значению // аддитивного критерия (меньше - лучше) List <SortableDouble> sortedAdditiveCriterionValues = additiveCriterion.Select <KeyValuePair <TId, double>, SortableDouble>( kvp => new SortableDouble() { Direction = SortDirection.Ascending, Id = kvp.Key, Value = kvp.Value } ).ToList(); sortedAdditiveCriterionValues.Sort(); // Заполним результаты foreach (SortableDouble sortedAdditiveCriterionValue in sortedAdditiveCriterionValues) { result.SortedPoints.Add(sortedAdditiveCriterionValue.Id); result.AdditionalData.Add(sortedAdditiveCriterionValue.Id, sortedAdditiveCriterionValue.Value); } return(result); }