Beispiel #1
0
        private void FindStartPointViterbi(CRFGraph graph)
        {
            var request = new SolveInference(graph, NumberLabels);
            request.RequestInDefaultContext();
            var resultLabeling = request.Solution.Labeling;

            foreach (var item in graph.Nodes)
            {
                item.Data.TempAssign = resultLabeling[item.GraphId];
            }
        }
Beispiel #2
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            double tp = 0.001, tn = 0.001, fp = 0.001, fn = 0.001;

            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;
                for (int k = 0; k < labeling.Length; k++)
                {
                    if (graph.Data.ReferenceLabeling[k] > 0)
                    {
                        if (labeling[k] > 0)
                        {
                            tp += 1;
                        }
                        else
                        {
                            fn += 1;
                        }
                    }

                    else
                    {
                        if (labeling[k] > 0)
                        {
                            fp += 1;
                        }
                        else
                        {
                            tn += 1;
                        }
                    }
                }

                var   loss               = LossFunctionIteration(labeling, graph.Data.ReferenceLabeling);
                int[] countsPred         = CountPred(graph, labeling);
                int[] countsRef          = CountPred(graph, graph.Data.ReferenceLabeling);
                int[] countsRefMinusPred = new int[countsPred.Length];
                for (int k = 0; k < countsPred.Length; k++)
                {
                    countsRefMinusPred[k] = countsRef[k] - countsPred[k];
                }
                var weightedScore = 0.0;
                for (int k = 0; k < weights.Length; k++)
                {
                    weightedScore += weights[k] * countsRefMinusPred[k];
                }
                double l2normsq = (countsRefMinusPred.Sum(entry => entry * entry));


                var deltaomegaFactor = (loss - weightedScore) / (l2normsq);
                var deltaomega       = new double[weights.Length];
                for (int k = 0; k < weights.Length; k++)
                {
                    if (l2normsq > 0)
                    {
                        deltaomega[k] = deltaomegaFactor * countsRefMinusPred[k];
                    }
                    weights[k]    += deltaomega[k];
                    weightsSum[k] += weights[k];
                }

                Log.Post("loss: " + Math.Round(loss, 5));
                Log.Post("weightedScore: " + Math.Round(weightedScore, 5));
                Log.Post("l2normsquare: " + Math.Round(l2normsq, 5));
            }
            var mcc = (tp * tn + fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));
            //mccMax = Math.Max(mccMax, mcc);

            var weightChanges = new double[weights.Length];

            for (int k = 0; k < weights.Length; k++)
            {
                weightChanges[k] = (weightsSum[k] / iteration) - weightCurrent[k];
            }
            weights = this.WeightObservationUnit.ApplyChangeVector(weightChanges, weightCurrent);
            return(weights);
        }
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            double tp = 0.001, tn = 0.001 + 0, fp = 0.001, fn = 0.001;

            int[] countsRefMinusPred = new int[weightCurrent.Length];

            lastWeights = weights.ToArray();

            var totalChange          = random.NextDouble();

            //change weights
            for (int i = 0; i < weights.Length; i++)
            {
                var localChange = random.NextDouble();
                weights[i] += (random.NextDouble() * 0.2 - 0.1) * totalChange * localChange;
            }

            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                //set scores according to weights
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;

                for (int k = 0; k < labeling.Length; k++)
                {
                    if (graph.Data.ReferenceLabeling[k] > 0)
                    {
                        if (labeling[k] > 0)
                        {
                            tp += 1;
                        }
                        else
                        {
                            fn += 1;
                        }
                    }

                    else
                    {
                        if (labeling[k] > 0)
                        {
                            fp += 1;
                        }
                        else
                        {
                            tn += 1;
                        }
                    }
                }

                int[] countsPred = CountPred(graph, labeling);
                int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                for (int k = 0; k < countsPred.Length; k++)
                {
                    countsRefMinusPred[k] += countsRef[k] - countsPred[k];
                }
            }

            var mcc = (tp * tn + fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));

            if (mcc < mccMax)
            {
                weights = lastWeights;
                Log.Post("Weight unchanged.");
            }
            else
            {
                Log.Post("Weight changed.");
            }

            mccMax = Math.Max(mccMax, mcc);

            lastMCC = mcc;
            return(weights);
        }
Beispiel #4
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            double tp = 0.001, tn = 0.001 + 0, fp = 0.001, fn = 0.001;

            int[] countsRefMinusPred = new int[weightCurrent.Length];

            WeightObservationUnit_II.Update(weights);

            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                //set scores according to weights
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;

                for (int k = 0; k < labeling.Length; k++)
                {
                    if (graph.Data.ReferenceLabeling[k] > 0)
                    {
                        if (labeling[k] > 0)
                        {
                            tp += 1;
                        }
                        else
                        {
                            fn += 1;
                        }
                    }

                    else
                    {
                        if (labeling[k] > 0)
                        {
                            fp += 1;
                        }
                        else
                        {
                            tn += 1;
                        }
                    }
                }

                int[] countsPred = CountPred(graph, labeling);
                int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                for (int k = 0; k < countsPred.Length; k++)
                {
                    countsRefMinusPred[k] += countsRef[k] - countsPred[k];
                }
            }

            var mcc = (tp * tn + fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));

            mccMax = Math.Max(mccMax, mcc);

            return(weights);
        }
        /*
         *  Die mit Herrn Waack besprochene Version des Projektzyklus zum Testen der verschiedenen Trainingsvarianten von OLM
         *
         *
         */
        public void RunCycle(TrainingEvaluationCycleInputParameters inputParameters)
        {
            #region Schritt 0: Vorbereiten der Daten

            // Zwischenspeichern von viel genutzten Variablen zur Übersichtlichkeit:
            var inputGraph = inputParameters.Graph;
            var graphList  = new List <GWGraph <CRFNodeData, CRFEdgeData, CRFGraphData> >();

            // Graphen erzeugen
            for (int i = 0; i < inputParameters.NumberOfGraphInstances; i++)
            {
                var newGraph = inputGraph.Clone(nd => new CRFNodeData()
                {
                    X = nd.Data.X, Y = nd.Data.Y, Z = nd.Data.Z
                }, ed => new CRFEdgeData(), gd => new CRFGraphData());
                graphList.Add(newGraph);
            }

            // Erzeugung der benötigten Objekte:
            seedingMethodPatchCreation = new SeedingMethodPatchCreation(inputParameters.NumberOfSeedsForPatchCreation, inputParameters.MaximumTotalPatchSize);

            #endregion


            #region Schritt 1: Referenzlabelings erzeugen.

            int[][] referenceLabelings = new int[inputParameters.NumberOfGraphInstances][];
            for (int i = 0; i < inputParameters.NumberOfGraphInstances; i++)
            {
                seedingMethodPatchCreation.CreatePatchAndSetAsReferenceLabel(graphList[i]);

                if (i == 0 && GraphVisalization == true)
                {
                    var graph3D = graphList[i].Wrap3D(nd => new Node3DWrap <CRFNodeData>(nd.Data)
                    {
                        ReferenceLabel = nd.Data.ReferenceLabel, X = nd.Data.X, Y = nd.Data.Y, Z = nd.Data.Z
                    }, (ed) => new Edge3DWrap <CRFEdgeData>(ed.Data)
                    {
                        Weight = 1.0
                    });
                    new ShowGraph3D(graph3D).Request();
                }
            }


            #endregion

            #region Schritt 2: Beobachtungen erzeugen (und Scores)

            var createObservationsUnit = new CreateObservationsUnit(inputParameters.TransitionProbabilities);
            var isingModel             = new IsingModel(inputParameters.IsingConformityParameter, inputParameters.IsingCorrelationParameter);
            for (int i = 0; i < inputParameters.NumberOfGraphInstances; i++)
            {
                var graph = graphList[i];
                createObservationsUnit.CreateObservation(graph);
                //graph.Data.Observations = observation;

                // zugehörige Scores erzeugen
                isingModel.CreateCRFScore(graph);

                if (i == 0)
                {
                    var graph3D = graph.Wrap3D();
                    new ShowGraph3D(graph3D).Request();
                }
            }
            #endregion

            #region Schritt 3: Aufteilen der Daten in Evaluation und Training
            // Verhaeltnis: 50 50
            int separation = inputParameters.NumberOfGraphInstances / 2;

            var testGraphs = new List <IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData> >
                                 (new IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData> [separation]);
            var evaluationGraphs = new List <GWGraph <CRFNodeData, CRFEdgeData, CRFGraphData> >
                                       (new GWGraph <CRFNodeData, CRFEdgeData, CRFGraphData> [inputParameters.NumberOfGraphInstances - separation]);

            for (int i = 0; i < separation; i++)
            {
                testGraphs[i] = graphList[i];
            }
            int k = 0;
            for (int j = separation; j < inputParameters.NumberOfGraphInstances; j++)
            {
                evaluationGraphs[k++] = graphList[j];
            }

            #endregion

            #region Schritt 4: Die verschiedenen Varianten von OLM trainieren und evaluieren

            // object for evaluation
            var evaluationResults = new Dictionary <OLMVariant, OLMEvaluationResult>();

            foreach (var trainingVariant in inputParameters.TrainingVariantsToTest)
            {
                evaluationResults.Add(trainingVariant, new OLMEvaluationResult());

                #region Schritt 4.1: Training der OLM-Variante
                {
                    var request = new OLMRequest(trainingVariant, testGraphs);
                    request.BasisMerkmale.AddRange(new IsingMerkmalNode(), new IsingMerkmalEdge());
                    //TODO: loss function auslagern
                    request.LossFunctionValidation = (a, b) =>
                    {
                        var loss = 0.0;
                        for (int i = 0; i < a.Length; i++)
                        {
                            loss += a[i] != b[i] ? 1 : 0;
                        }
                        return(loss / a.Length);
                    };

                    request.Request();

                    var olmResult = request.Result;


                    // update Ising parameters in IsingModel
                    isingModel.ConformityParameter  = olmResult.ResultingWeights[0];
                    isingModel.CorrelationParameter = olmResult.ResultingWeights[1];

                    // zugehörige Scores erzeugen für jeden Graphen (auch Evaluation)
                    foreach (var graph in graphList)
                    {
                        isingModel.CreateCRFScore(graph);
                    }
                }
                #endregion

                #region Schritt 4.2: Evaluation der OLM-Variante

                var keys    = new ComputeKeys();
                var results = new OLMEvaluationResult();
                results.ConformityParameter  = isingModel.ConformityParameter;
                results.CorrelationParameter = isingModel.CorrelationParameter;

                // 1) Viterbi-Heuristik starten (request: SolveInference) + zusätzliche Parameter hinzufügen
                for (int graph = 0; graph < evaluationGraphs.Count; graph++)
                {
                    var request2 = new SolveInference(evaluationGraphs[graph], inputParameters.NumberOfLabels,
                                                      inputParameters.BufferSizeViterbi);

                    request2.RequestInDefaultContext();

                    // 2) Ergebnis des request auswerten (request.Solution liefert ein Labeling)
                    int[] predictionLabeling = request2.Solution.Labeling;

                    // 3) Ergebnisse aller Evaluationsgraphen auswerten (TP, TN, FP, FN, MCC) und zwischenspeichern
                    // neues Objekt, damit in Schritt 5 darauf zugegriffen werden kann.
                    var result = keys.computeEvalutionGraphResult(evaluationGraphs[graph], predictionLabeling);
                    // einfügen in Dictionary -> Liste
                    evaluationResults[trainingVariant].GraphResults.Add(result);
                }

                // Berechnen der Average-Werte
                foreach (OLMVariant variant in evaluationResults.Keys)
                {
                    results.ComputeValues(evaluationResults[trainingVariant]);
                }

                // debug output
                Log.Post("Average Values");
                Log.Post("Sensitivity: " + evaluationResults[trainingVariant].AverageSensitivity +
                         "\t Specificy: " + evaluationResults[trainingVariant].AverageSpecificity +
                         "\t MCC: " + evaluationResults[trainingVariant].AverageMCC +
                         //"\t Accuracy: " + evaluationResults[trainingVariant].AverageAccuracy +
                         "\t TotalTP: " + evaluationResults[trainingVariant].TotalTP + "\n");

                #endregion
            }

            #endregion

            #region Schritt 5: Ergebnisse präsentieren und speichern
            // output of the keys
            //outputKeys(evaluation, inputParameters, evaluationGraphs);

            // output of the labels
            //outputLabelingsScores(graphList, inputParameters);


            // TODO: Marlon
            // graphische Ausgabe

            var olmPresentationRequest = new ShowOLMResult(evaluationResults.Values.ToList());
            //foreach (var variant in evaluationResults.Keys)
            //{

            //    //foreach (var graphresult in evaluationResults[variant].GraphResults)
            //    //{
            //    //    //var graph = graphresult.Graph;
            //    //}
            //}
            olmPresentationRequest.Request();
            #endregion
        }
Beispiel #6
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            if (AddRdmNode && globalIteration % 20 == 0)
            {
                for (int i = 0; i < weights.Length; i++)
                {
                    weights[i] += 0.0 + 0.2 * random.NextDouble() - 0.1;
                }
            }

            //double tp = 0.001, tn = CoreResidues, fp = 0.001, fn = 0.001;

            tp = 0; tn = 0; fp = 0; fn = 0;

            var newPoint = new MemoryPoint(weights, new int[weights.Length], 0.0);

            MemoryPoints.Add(newPoint);
            while (MemoryPoints.Count > MemoryPointsCount)
            {
                MemoryPoints.RemoveAt(0);
            }

            ReferencePoint = new MemoryPoint(weights, new int[weights.Length], 1.0);
            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;

                int[] countsPred = CountPred(graph, labeling);
                int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                for (int k = 0; k < countsPred.Length; k++)
                {
                    newPoint.Counts[k]       += countsPred[k];
                    ReferencePoint.Counts[k] += countsRef[k];
                }
                TrackResults(labeling, graph.Data.ReferenceLabeling);
            }
            //WriterResults();
            //var sensitivity = tp / (tp + fn);
            //var specificity = tn / (tn + fp);
            var mcc = (tp * tn - fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));

            newPoint.Score = mcc;

            //Log.Post("New MCC: " + Math.Round(mcc, 5) + " Sens: " + Math.Round(sensitivity, 5) + " Spec: " + Math.Round(specificity, 5));

            //tn -= CoreResidues;
            //var sensitivity2 = tp / (tp + fn);
            //var specificity2 = tn / (tn + fp);
            //var mcc2 = (tp * tn - fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));

            //Log.Post("New MCC(2): " + Math.Round(mcc2, 5) + " Sens: " + Math.Round(sensitivity2, 5) + " Spec: " + Math.Round(specificity2, 5));

            if (globalIteration == 1)
            {
                MemoryPoints.Add(ReferencePoint);
            }

            var deltaomega = new double[weights.Length];

            for (int k = 0; k < MemoryPoints.Count - 1; k++)
            {
                var pointOne = MemoryPoints[k];
                for (int l = k + 1; l < MemoryPoints.Count; l++)
                {
                    var   pointTwo           = MemoryPoints[l];
                    int[] countsRefMinusPred = new int[weights.Length];
                    for (int m = 0; m < weights.Length; m++)
                    {
                        countsRefMinusPred[m] = (pointOne.Counts[m] - pointTwo.Counts[m]);
                    }
                    var weightedScore = 0.0;
                    for (int m = 0; m < weights.Length; m++)
                    {
                        weightedScore += weights[m] * (countsRefMinusPred[m]);
                    }
                    double l2normsq = (countsRefMinusPred.Sum(entry => entry * entry));

                    var loss = 100 * (pointOne.Score - pointTwo.Score);

                    var deltaomegaFactor = (loss - weightedScore) / (l2normsq);
                    for (int m = 0; m < weights.Length; m++)
                    {
                        if (l2normsq > 0)
                        {
                            deltaomega[m] += deltaomegaFactor * countsRefMinusPred[m];
                        }
                    }
                }
            }

            //if (MemoryPoints.Count >= memoryPoints)
            {
                //normalize
                var normFactor = MemoryPoints.Count * (MemoryPoints.Count - 1) / 2;
                for (int m = 0; m < weights.Length; m++)
                {
                    deltaomega[m] /= normFactor;
                }

                for (int k = 0; k < weights.Length; k++)
                {
                    weights[k] += deltaomega[k];
                }
            }
            //else
            //{
            //    for (int k = 0; k < weights.Length; k++)
            //    {
            //        weights[k] = 0.02 * random.NextDouble() - 0.01;
            //    }
            //}
            //if (iteration == 1)
            //MemoryPoints.Remove(ReferencePoint);

            return(weights);
        }
Beispiel #7
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            double tp = 0.001, tn = 0.001, fp = 0.001, fn = 0.001;

            int[] countsRefMinusPred = new int[weightCurrent.Length];


            Log.Post("#Iteration: " + globalIteration);

            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                //set scores according to weights
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;

                for (int k = 0; k < labeling.Length; k++)
                {
                    if (graph.Data.ReferenceLabeling[k] > 0)
                    {
                        if (labeling[k] > 0)
                        {
                            tp += 1;
                        }
                        else
                        {
                            fn += 1;
                        }
                    }

                    else
                    {
                        if (labeling[k] > 0)
                        {
                            fp += 1;
                        }
                        else
                        {
                            tn += 1;
                        }
                    }
                }

                int[] countsPred = CountPred(graph, labeling);
                int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                for (int k = 0; k < countsPred.Length; k++)
                {
                    countsRefMinusPred[k] += countsRef[k] - countsPred[k];
                }
            }

            var weightedScore        = 0.0;

            for (int k = 0; k < weights.Length; k++)
            {
                weightedScore += weights[k] * countsRefMinusPred[k];
            }
            double l2normsq = (countsRefMinusPred.Sum(entry => entry * entry));

            var loss = (fp + fn);

            var deltaomegaFactor = (loss - weightedScore) / l2normsq;
            var deltaomega       = new double[weights.Length];

            for (int k = 0; k < weights.Length; k++)
            {
                if (l2normsq > 0)
                {
                    deltaomega[k] = deltaomegaFactor * countsRefMinusPred[k];
                }
                weights[k]   += deltaomega[k];
                weightsSum[k] = weights[k] + deltaomega[k];
            }

            return(weights);
        }
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var    weights = weightCurrent.ToArray();
            var    weightsSum = new double[weightCurrent.Length];
            int    iteration = 0;
            double mcc = 0.0, mcc2 = 0.0;

            var newPoint = new Point(weights, new int[weights.Length], 0.0);


            {
                double tp = 0.001, tn = CoreResiduesTraining, fp = 0.001, fn = 0.001;

                MemoryPoints.Add(newPoint);
                if (MemoryPoints.Count > memoryPoints)
                {
                    MemoryPoints.RemoveAt(0);
                }

                ReferencePoint = new Point(weights, new int[weights.Length], 1.0);
                for (int i = 0; i < TrainingGraphs.Count; i++)
                {
                    var graph = TrainingGraphs[i];
                    //set scores according to weights
                    #region set weights
                    foreach (var node in graph.Nodes)
                    {
                        var scores = new double[Labels];
                        node.Data.Scores = scores;

                        for (int label = 0; label < Labels; label++)
                        {
                            for (int k = 0; k < weights.Length; k++)
                            {
                                scores[label] += weights[k] * BasisMerkmale[k].Score(node, label);
                            }
                        }
                    }
                    foreach (var edge in graph.Edges)
                    {
                        var scores = new double[Labels, Labels];
                        edge.Data.Scores = scores;

                        for (int label1 = 0; label1 < Labels; label1++)
                        {
                            for (int label2 = 0; label2 < Labels; label2++)
                            {
                                for (int k = 0; k < weights.Length; k++)
                                {
                                    scores[label1, label2] += weights[k] * BasisMerkmale[k].Score(edge, label1, label2);
                                }
                            }
                        }
                    }
                    #endregion

                    //compute labeling with viterbi algorithm
                    var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                    request.RequestInDefaultContext();
                    int[] labeling = request.Solution.Labeling;
                    //check nonequality

                    iteration++;
                    for (int k = 0; k < labeling.Length; k++)
                    {
                        if (graph.Data.ReferenceLabeling[k] > 0)
                        {
                            if (labeling[k] > 0)
                            {
                                tp += 1;
                            }
                            else
                            {
                                fn += 1;
                            }
                        }
                        else
                        {
                            if (labeling[k] > 0)
                            {
                                fp += 1;
                            }
                            else
                            {
                                tn += 1;
                            }
                        }
                    }

                    int[] countsPred = CountPred(graph, labeling);
                    int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                    for (int k = 0; k < countsPred.Length; k++)
                    {
                        newPoint.Counts[k]       += countsPred[k];
                        ReferencePoint.Counts[k] += countsRef[k];
                    }
                    //TrackResults(labeling, graph.Data.ReferenceLabeling);
                }
                //WriterResults();
                var sensitivity = tp / (tp + fn);
                var specificity = tn / (tn + fp);
                mcc            = (tp * tn - fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));
                newPoint.Score = mcc;

                Log.Post("New MCC: " + Math.Round(mcc, 5) + " Sens: " + Math.Round(sensitivity, 5) + " Spec: " + Math.Round(specificity, 5));
            }
            //change Interval borders
            {
                var randomChanges = DivisorChange.RandomInstances(Intervals - 1);

                foreach (var change in randomChanges)
                {
                    var changesFor = random.Next(5);
                    if (changesFor == 0)
                    {
                        ValueMapRasa.ApplyChange(change);
                    }
                    else if (changesFor == 1)
                    {
                        ValueMapEMax01.ApplyChange(change);
                    }
                    else if (changesFor == 2)
                    {
                        ValueMapEMax11.ApplyChange(change);
                    }
                    else if (changesFor == 3)
                    {
                        ValueMapEDiff01.ApplyChange(change);
                    }
                    else if (changesFor == 4)
                    {
                        ValueMapEDiff11.ApplyChange(change);
                    }
                }


                double tp = 0.001, tn = CoreResiduesTraining, fp = 0.001, fn = 0.001;

                //var newPoint = new Point(weights, new int[weights.Length], 0.0);
                //MemoryPoints.Add(newPoint);
                //if (MemoryPoints.Count > memoryPoints)
                //    MemoryPoints.RemoveAt(0);

                //ReferencePoint = new Point(weights, new int[weights.Length], 1.0);
                for (int i = 0; i < TrainingGraphs.Count; i++)
                {
                    var graph = TrainingGraphs[i];
                    SetWeightsCRF(weights, graph);

                    //compute labeling with viterbi algorithm
                    var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                    request.RequestInDefaultContext();
                    int[] labeling = request.Solution.Labeling;
                    //check nonequality

                    for (int k = 0; k < labeling.Length; k++)
                    {
                        if (graph.Data.ReferenceLabeling[k] > 0)
                        {
                            if (labeling[k] > 0)
                            {
                                tp += 1;
                            }
                            else
                            {
                                fn += 1;
                            }
                        }
                        else
                        {
                            if (labeling[k] > 0)
                            {
                                fp += 1;
                            }
                            else
                            {
                                tn += 1;
                            }
                        }
                    }

                    int[] countsPred = CountPred(graph, labeling);
                    int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                    //for (int k = 0; k < countsPred.Length; k++)
                    //{
                    //    newPoint.Counts[k] += countsPred[k];
                    //    //ReferencePoint.Counts[k] += countsRef[k];
                    //}
                    //TrackResults(labeling, graph.Data.ReferenceLabeling);
                }
                //WriterResults();
                var sensitivity = tp / (tp + fn);
                var specificity = tn / (tn + fp);
                mcc2 = (tp * tn - fp * fn) / Math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn));
                //newPoint.Score = mcc2;

                Log.Post("New MCC(IChanged): " + Math.Round(mcc, 5) + " Sens: " + Math.Round(sensitivity, 5) + " Spec: " + Math.Round(specificity, 5));
            }

            if (mcc2 > mcc)
            {
                ValueMapRasa.lastChanges.Clear();
                ValueMapEMax01.lastChanges.Clear();
                ValueMapEMax11.lastChanges.Clear();
                ValueMapEDiff01.lastChanges.Clear();
                ValueMapEDiff11.lastChanges.Clear();
                Log.Post("Intervals changed");

                MemoryPoints.RemoveAt(MemoryPoints.Count - 1);
            }
            else
            {
                ValueMapRasa.UndoLastChanges();
                ValueMapEMax01.UndoLastChanges();
                ValueMapEMax11.UndoLastChanges();
                ValueMapEDiff01.UndoLastChanges();
                ValueMapEDiff11.UndoLastChanges();

                if (MemoryPoints.Count == 1)
                {
                    MemoryPoints.Add(ReferencePoint);
                }

                var deltaomega = new double[weights.Length];
                for (int k = 0; k < MemoryPoints.Count - 1; k++)
                {
                    var pointOne = MemoryPoints[k];
                    for (int l = k + 1; l < MemoryPoints.Count; l++)
                    {
                        var   pointTwo           = MemoryPoints[l];
                        int[] countsRefMinusPred = new int[weights.Length];
                        for (int m = 0; m < weights.Length; m++)
                        {
                            countsRefMinusPred[m] = (pointOne.Counts[m] - pointTwo.Counts[m]);
                        }
                        var weightedScore = 0.0;
                        for (int m = 0; m < weights.Length; m++)
                        {
                            weightedScore += weights[m] * (countsRefMinusPred[m]);
                        }
                        double l2normsq = (countsRefMinusPred.Sum(entry => entry * entry));

                        var loss = 100 * (pointOne.Score - pointTwo.Score);

                        var deltaomegaFactor = (loss - weightedScore) / (l2normsq);
                        for (int m = 0; m < weights.Length; m++)
                        {
                            if (l2normsq > 0)
                            {
                                deltaomega[m] += deltaomegaFactor * countsRefMinusPred[m];
                            }
                        }
                    }
                }

                //if (MemoryPoints.Count >= memoryPoints)
                {
                    //normalize
                    var normFactor = MemoryPoints.Count * (MemoryPoints.Count - 1) / 2;
                    for (int m = 0; m < weights.Length; m++)
                    {
                        deltaomega[m] /= normFactor;
                    }

                    for (int k = 0; k < weights.Length; k++)
                    {
                        weights[k] += deltaomega[k];
                    }
                }
                //else
                //{
                //    for (int k = 0; k < weights.Length; k++)
                //    {
                //        weights[k] = 0.02 * random.NextDouble() - 0.01;
                //    }
                //}
                //if (iteration == 1)
                //MemoryPoints.Remove(ReferencePoint);
            }

            return(weights);
        }
Beispiel #9
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var    weights = weightCurrent.ToArray();
            int    u       = TrainingGraphs.Count;
            var    vit     = new int[u][];
            var    mcmc    = new int[u][];
            double devges  = 0.0;
            // Anzahl Knoten
            double mx       = 0;
            var    refLabel = new int[u][];
            double devgesT  = 0;
            // Summe aller Knoten aller Graphen
            double mu = 0;

            int[] countsMCMCMinusRef = new int[weightCurrent.Length];
            int[] countsRefMinusMCMC = new int[weightCurrent.Length];

            Log.Post("#Iteration: " + globalIteration);

            for (int g = 0; g < TrainingGraphs.Count; g++)
            {
                var graph = TrainingGraphs[g];
                mx  = graph.Nodes.Count();
                mu += mx;
                // Labeling mit Viterbi (MAP)
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labelingVit = request.Solution.Labeling;
                vit[g] = labelingVit;

                // Labeling mit MCMC basierend auf MAP
                var requestMCMC = new GiveProbableLabelings(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>)
                {
                    StartingPoints = 1, PreRunLength = 100000, RunLength = 1
                };
                requestMCMC.RequestInDefaultContext();
                var   result       = requestMCMC.Result;
                int[] labelingMCMC = new int[labelingVit.Length];
                foreach (var item in result)
                {
                    labelingMCMC[item.Key.GraphId] = (int)item.Value;
                }
                mcmc[g] = labelingMCMC;

                // reales labeling
                int[] labeling = graph.Data.ReferenceLabeling;
                refLabel[g] = labeling;

                // Berechnung des typischen/mittleren Fehlers
                devges  += LossFunctionIteration(refLabel[g], mcmc[g]);
                devgesT += LossFunctionIteration(refLabel[g], vit[g]);

                // set scores according to weights
                SetWeightsCRF(weights, graph);

                if (debugOutputEnabled)
                {
                    printLabelings(vit[g], mcmc[g], refLabel[g], g);
                }

                // calculate equation 6.13 and 6.14
                int[] countsRef  = CountPred(graph, refLabel[g]);
                int[] countsMCMC = CountPred(graph, mcmc[g]);

                for (int k = 0; k < countsRef.Length; k++)
                {
                    countsMCMCMinusRef[k] += countsMCMC[k] - countsRef[k];
                    countsRefMinusMCMC[k] += countsRef[k] - countsMCMC[k];
                }
            }

            // mittlerer (typischer) Fehler (Summen-Gibbs-Score)
            middev = devges / u;

            // realer Fehler fuer diese Runde (Summen-Trainings-Score)
            realdev = devgesT / u;

            var loss = (realdev - middev) * mu;

            // Scores berechnen?? Im Skript so, aber nicht notwendig

            double l2norm = (countsRefMinusMCMC.Sum(entry => entry * entry));

            var deltaomegaFactor = 0.0;
            var deltaomega       = new double[weights.Length];
            var weightedScore    = 0.0;

            for (int k = 0; k < weights.Length; k++)
            {
                if (l2norm > 0)
                {
                    weightedScore   += weights[k] * countsMCMCMinusRef[k];
                    deltaomegaFactor = (loss + weightedScore) / l2norm;
                    deltaomega[k]    = deltaomegaFactor * countsRefMinusMCMC[k];
                }
                else
                {
                    weightedScore   += weights[k] * countsRefMinusMCMC[k];
                    deltaomegaFactor = (loss + weightedScore) / l2norm;
                    deltaomega[k]    = deltaomegaFactor * countsMCMCMinusRef[k];
                }
                weights[k] += deltaomega[k];
            }

            // debug output
            Log.Post("Loss: " + (int)loss + " Realdev: " + realdev + " Middev: " + middev);

            return(weights);
        }
Beispiel #10
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var weights    = weightCurrent.ToArray();
            var weightsSum = new double[weightCurrent.Length];
            int iteration  = 0;

            double tp = 0.001, tn = 0.001, fp = 0.001, fn = 0.001;

            int[] countsRefMinusPred = new int[weightCurrent.Length];

            for (int i = 0; i < TrainingGraphs.Count; i++)
            {
                var graph = TrainingGraphs[i];
                //set scores according to weights
                SetWeightsCRF(weights, graph);

                //compute labeling with viterbi algorithm
                //BufferSizeInference = 2000;
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labeling = request.Solution.Labeling;
                //check nonequality

                iteration++;

                for (int k = 0; k < labeling.Length; k++)
                {
                    if (graph.Data.ReferenceLabeling[k] > 0)
                    {
                        if (labeling[k] > 0)
                        {
                            tp += 1;
                        }
                        else
                        {
                            fn += 1;
                        }
                    }

                    else
                    {
                        if (labeling[k] > 0)
                        {
                            fp += 1;
                        }
                        else
                        {
                            tn += 1;
                        }
                    }
                }

                //var loss = LossFunctionIteration(labeling, graph.Data.ReferenceLabeling);
                int[] countsPred = CountPred(graph, labeling);
                int[] countsRef  = CountPred(graph, graph.Data.ReferenceLabeling);
                for (int k = 0; k < countsPred.Length; k++)
                {
                    countsRefMinusPred[k] += countsRef[k] - countsPred[k];
                }
            }

            var weightedScore        = 0.0;

            for (int k = 0; k < weights.Length; k++)
            {
                weightedScore += weights[k] * countsRefMinusPred[k];
            }
            double l2normsq = (countsRefMinusPred.Sum(entry => entry * entry));

            //var loss = (mccMax - mcc) * lossFunctionFactorMCC;
            var loss = (fp + fn);

            var deltaomegaFactor = (loss - weightedScore) / l2normsq;
            var deltaomega       = new double[weights.Length];

            for (int k = 0; k < weights.Length; k++)
            {
                if (l2normsq > 0)
                {
                    deltaomega[k] = deltaomegaFactor * countsRefMinusPred[k];
                }
                weights[k] += deltaomega[k];
                //weights[k] = 2 * (random.NextDouble() - 0.5);
                weightsSum[k] = weights[k] + deltaomega[k];
            }


            //var weightChanges = new double[weights.Length];
            //for (int k = 0; k < weights.Length; k++)
            //{
            //    weightChanges[k] = ((weights[k]) - weightCurrent[k]);
            //}
            //weights = this.WeightObservationUnit.ApplyChangeVector(weightChanges, weightCurrent);
            return(weights);
        }
Beispiel #11
0
        public void Do(int weights, IEnumerable <IGWGraph <NodeData, EdgeData, GraphData> > graphs, int maxIterations, OLMRequest olmrequest)
        {
            Weights = weights;
            int    validationQuantils = 2;
            double quantilratio       = 1.0 / validationQuantils;
            var    quantiledGraphs    = new List <IGWGraph <NodeData, EdgeData, GraphData> > [validationQuantils];

            for (int i = 0; i < validationQuantils; i++)
            {
                quantiledGraphs[i] = new List <IGWGraph <NodeData, EdgeData, GraphData> >();
            }

            //divide graphs in training / validation
            foreach (var graph in graphs)
            {
                var quantil = random.Next(validationQuantils);
                quantiledGraphs[quantil].Add(graph);
            }

            for (int quantilIteration = 0; quantilIteration < validationQuantils; quantilIteration++)
            {
                TrainingGraphs   = new List <IGWGraph <NodeData, EdgeData, GraphData> >();
                ValidationGraphs = new List <IGWGraph <NodeData, EdgeData, GraphData> >();


                //CoreResidues = 0;
                for (int quantil = 0; quantil < validationQuantils; quantil++)
                {
                    if (quantil == quantilIteration)
                    {
                        ValidationGraphs.AddRange(quantiledGraphs[quantil]);
                    }
                    else
                    {
                        TrainingGraphs.AddRange(quantiledGraphs[quantil]);
                    }
                }

                //foreach (var graph in ValidationGraphs)
                //{
                //    CoreResidues += graph.Data.CoreResidues;
                //}

                Iteration     = 0;
                MaxIterations = maxIterations;

                SetStartingWeights();

                this.WeightObservationUnit.Init(weightCurrent);
                var lossOpt     = double.MaxValue;
                var lossOptOld  = 0.0;
                var lossCurrent = 0.0;

                OLMTracker = new OLMTracking(weights, new int[] { 1, 3, 5, 8, 12, 20, 50 }, weightCurrent, Name + "_q" + quantilIteration + "_OLMTracking.txt");

                var interfaceValid    = 0;
                var noninterfaceValid = 0;

                foreach (var graph in ValidationGraphs)
                {
                    interfaceValid    += graph.Data.ReferenceLabeling.Sum();
                    noninterfaceValid += graph.Nodes.Count() - graph.Data.ReferenceLabeling.Sum();
                }

                var sitesValid = interfaceValid + noninterfaceValid;

                while (!CheckCancelCriteria())
                {
                    Iteration++;

                    var oldWVector = weightCurrent.ToArray();
                    weightCurrent = DoIteration(TrainingGraphs, weightCurrent, Iteration);

                    ResultingWeights = weightCurrent;

                    //for (int i = 1; i < 20; i++)
                    //{
                    //    for (int k = 1; k < 20; k++)
                    //    {
                    //        weightCurrent[0] = Math.Pow(-1.0, i) * ((int)(i / 2)) * 0.1;
                    //        weightCurrent[1] = Math.Pow(-1.0, k) * ((int)(k / 2)) * 0.1;

                    tp          = 0; tn = 0; fp = 0; fn = 0;
                    lossCurrent = 0.0;
                    foreach (var graph in ValidationGraphs)
                    {
                        SetWeightsCRF(weightCurrent, graph);

                        var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, null, Labels, BufferSizeInference);
                        request.RequestInDefaultContext();

                        var prediction = request.Solution.Labeling;
                        lossCurrent += LossFunctionValidation(graph.Data.ReferenceLabeling, prediction);

                        TrackResults(graph.Data.ReferenceLabeling, prediction);
                    }
                    WriterResults();
                    lossCurrent /= sitesValid;

                    if (lossCurrent < lossOpt)
                    {
                        lossOptOld = lossOpt;
                        lossOpt    = lossCurrent;
                        weightOpt  = weightCurrent;
                    }

                    OLMTracker.Track(weightCurrent, lossCurrent);
                    var iterationResult = new OLMIterationResult(weightCurrent.ToArray(), lossCurrent);
                    olmrequest.Result.ResultsHistory.IterationResultHistory.Add(iterationResult);

                    //    }
                    //}
                }
            }

            OLMTracker.WriteWeights();

            //return weightOpt;
        }
Beispiel #12
0
        protected override double[] DoIteration(List <IGWGraph <NodeData, EdgeData, GraphData> > TrainingGraphs, double[] weightCurrent, int globalIteration)
        {
            var    weights     = weightCurrent.ToArray();
            int    u           = TrainingGraphs.Count;
            var    vit         = new int[u][];
            var    samplesMCMC = new int[NumberOfSamples][];
            double devges      = 0.0;
            // Anzahl Knoten
            double mx       = 0;
            var    refLabel = new int[u][];
            double devgesT  = 0;
            // Summe aller Knoten aller Graphen
            double mu = 0;

            MCMCDeviations    = 0.0;
            refMCMCDeviations = 0.0;

            int[] countsRefMinusPred = new int[weightCurrent.Length];

            Log.Post("#Iteration: " + globalIteration);

            for (int g = 0; g < TrainingGraphs.Count; g++)
            {
                var graph = TrainingGraphs[g];
                mx  = graph.Nodes.Count();
                mu += mx;

                // Labeling mit Viterbi (MAP)
                var request = new SolveInference(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>, Labels, BufferSizeInference);
                request.RequestInDefaultContext();
                int[] labelingVit = request.Solution.Labeling;
                vit[g] = labelingVit;

                // Labeling mit MCMC basierend auf MAP
                // TODO generate not 1 but k labelings for each graph
                for (int i = 0; i < NumberOfSamples; i++)
                {
                    var requestMCMC = new GiveProbableLabelings(graph as IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData>)
                    {
                        StartingPoints = 1, PreRunLength = 100000, RunLength = 1
                    };
                    requestMCMC.RequestInDefaultContext();
                    var   result       = requestMCMC.Result;
                    int[] labelingMCMC = new int[labelingVit.Length];
                    foreach (var item in result)
                    {
                        labelingMCMC[item.Key.GraphId] = (int)item.Value;
                    }
                    samplesMCMC[i] = labelingMCMC;
                }

                // TODO function to sum the deviations in mcmc labelings
                CalculateMCMCDeviations(samplesMCMC);

                // reales labeling
                int[] labeling = graph.Data.ReferenceLabeling;
                refLabel[g] = labeling;

                // TODO function to sum deviations from reflabel to MCMC labelings
                CalculateRefMCMCDeviations(samplesMCMC, labeling);

                // Berechnung des realen Fehlers
                devgesT += LossFunctionIteration(refLabel[g], vit[g]);

                // set scores according to weights
                SetWeightsCRF(weights, graph);

                int[] countsRef  = CountPred(graph, refLabel[g]);
                int[] countsPred = CountPred(graph, vit[g]);

                for (int k = 0; k < countsRef.Length; k++)
                {
                    countsRefMinusPred[k] += countsRef[k] - countsPred[k];
                }
            }

            // mittlerer (typischer) Fehler (Summen-Gibbs-Score)
            middev = devges / u;
            // realer Fehler fuer diese Runde (Summen-Trainings-Score)
            realdev            = devgesT / u;
            MCMCDeviations    /= u;
            refMCMCDeviations /= u;

            var loss = realdev * mu;

            // Scores berechnen?? Im Skript so, aber nicht notwendig

            double l2norm = (countsRefMinusPred.Sum(entry => entry * entry));

            var deltaomega    = new double[weights.Length];
            var weightedScore = 0.0;

            for (int k = 0; k < weights.Length; k++)
            {
                weightedScore += weights[k] * countsRefMinusPred[k];
            }
            var deltaomegaFactor = (loss - weightedScore) / l2norm;

            for (int k = 0; k < weights.Length; k++)
            {
                if (l2norm > 0)
                {
                    deltaomega[k] = deltaomegaFactor * countsRefMinusPred[k];
                }
                else
                {
                    Log.Post("wiu wiu");
                    deltaomega[k] = 0;
                }
                weights[k] += deltaomega[k];
            }

            // debug output
            Log.Post("Loss: " + (int)loss + " refMCMCDeviations: " + refMCMCDeviations + " MCMCDeviations: " + MCMCDeviations);

            return(weights);
        }
        /*
         *  Die mit Herrn Waack besprochene Version des Projektzyklus zum Testen der verschiedenen Trainingsvarianten von OLM
         *
         *
         */
        public void RunCycle(TrainingEvaluationCycleInputParameters inputParameters)
        {
            #region Schritt 1: Vorbereiten der Daten

            var graphList         = inputParameters.Graphs;
            int numberOfLabels    = inputParameters.NumberOfLabels;
            int numberOfIntervals = inputParameters.NumberOfIntervals;

            #endregion

            #region Schritt 2: Beobachtungen erzeugen (und Scores)

            // var createObservationsUnit = new CreateObservationsUnit(inputParameters.Threshold);
            var createObservationsUnit = new CreateObservationsUnit(inputParameters.TransitionProbabilities);

            if (UseIsingModel)
            {
                Log.Post("Ising-Model");
            }
            else
            {
                Log.Post("Potts-Model with " + inputParameters.NumberOfIntervals + " Intervals");
            }

            var isingModel = new IsingModel(inputParameters.IsingConformityParameter, inputParameters.IsingCorrelationParameter);
            //var pottsModel = new PottsModel(inputParameters.PottsConformityParameters, inputParameters.IsingCorrelationParameter,
            //    inputParameters.AmplifierControlParameter, inputParameters.NumberOfLabels);
            var pottsModel = new PottsModelComplex(inputParameters.PottsConformityParameters, inputParameters.PottsCorrelationParameters,
                                                   inputParameters.AmplifierControlParameter, inputParameters.NumberOfLabels);

            for (int i = 0; i < inputParameters.NumberOfGraphInstances; i++)
            {
                var graph = graphList[i];
                createObservationsUnit.CreateObservation(graph);
                //createObservationsUnit.CreateObservationThresholding(graph);

                // zugehörige Scores erzeugen
                if (UseIsingModel)
                {
                    isingModel.CreateCRFScore(graph);
                }

                else
                {
                    pottsModel.InitCRFScore(graph);
                }

                if (i == 0 && GraphVisualization == true)
                {
                    var graph3D = graph.Wrap3D();
                    new ShowGraph3D(graph3D).Request();
                }
            }
            #endregion

            #region Schritt 3: Aufteilen der Daten in Evaluation und Training
            // Verhaeltnis: 80 20
            int separation = inputParameters.NumberOfGraphInstances - inputParameters.NumberOfGraphInstances / 5;
            // Verhältnis Leave-one-out
            //int separation = inputParameters.NumberOfGraphInstances - 1;

            var trainingGraphs = new List <IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData> >
                                     (new IGWGraph <ICRFNodeData, ICRFEdgeData, ICRFGraphData> [separation]);
            var evaluationGraphs = new List <GWGraph <CRFNodeData, CRFEdgeData, CRFGraphData> >
                                       (new GWGraph <CRFNodeData, CRFEdgeData, CRFGraphData> [inputParameters.NumberOfGraphInstances - separation]);
            var randomizedGraphList = graphList.RandomizeOrder().ToList();

            for (int i = 0; i < separation; i++)
            {
                trainingGraphs[i] = randomizedGraphList[i];
                //trainingGraphs[i] = graphList[i];
            }
            int k = 0;
            for (int j = separation; j < inputParameters.NumberOfGraphInstances; j++, k++)
            {
                evaluationGraphs[k] = randomizedGraphList[j];
                //evaluationGraphs[i] = graphList[i];
            }

            Log.Post("Evaluation Graph ID: " + evaluationGraphs[0].Id);
            #endregion

            #region Schritt 4: Die verschiedenen Varianten von OLM trainieren und evaluieren

            // object for evaluation
            var evaluationResults = new Dictionary <OLMVariant, OLMEvaluationResult>();

            foreach (var trainingVariant in inputParameters.TrainingVariantsToTest)
            {
                evaluationResults.Add(trainingVariant, new OLMEvaluationResult());

                #region Schritt 4.1: Training der OLM-Variante
                {
                    var request = new OLMRequest(trainingVariant, trainingGraphs);
                    if (UseIsingModel)
                    {
                        request.BasisMerkmale.AddRange(new IsingMerkmalNode(), new IsingMerkmalEdge());
                    }
                    else
                    {
                        request.BasisMerkmale.AddRange(pottsModel.AddNodeFeatures(graphList, numberOfIntervals));
                        //request.BasisMerkmale.Add(new IsingMerkmalEdge());
                        request.BasisMerkmale.AddRange(pottsModel.AddEdgeFeatures(graphList, numberOfIntervals));
                    }

                    // loss function
                    request.LossFunctionIteration  = OLM.OLM.LossRatio;
                    request.LossFunctionValidation = OLM.OLM.LossRatio;

                    // execute training methods by calling OLMManager -> OLMBase
                    request.Request();

                    var olmResult = request.Result;

                    // update parameters in PottsModel
                    if (UseIsingModel)
                    {
                        isingModel.ConformityParameter  = olmResult.ResultingWeights[0];
                        isingModel.CorrelationParameter = olmResult.ResultingWeights[1];
                    }
                    else
                    {
                        int i = 0;
                        for (i = 0; i < pottsModel.ConformityParameter.Length; i++)
                        {
                            pottsModel.ConformityParameter[i] = olmResult.ResultingWeights[i];
                        }
                        //pottsModel.CorrelationParameter = olmResult.ResultingWeights[numberOfIntervals * 2];
                        for (int j = 0; j < pottsModel.CorrelationParameter.Length; j++)
                        {
                            pottsModel.CorrelationParameter[j] = olmResult.ResultingWeights[i++];
                        }
                    }

                    // zugehörige Scores erzeugen für jeden Graphen (auch Evaluation)
                    foreach (var graph in graphList)
                    {
                        if (UseIsingModel)
                        {
                            isingModel.CreateCRFScore(graph);
                        }
                        else
                        {
                            pottsModel.CreateCRFScore(graph, request.BasisMerkmale);
                        }
                    }
                }
                #endregion

                #region Schritt 4.2: Evaluation der OLM-Variante

                var keys    = new ComputeKeys();
                var results = new OLMEvaluationResult();
                if (UseIsingModel)
                {
                    results = new OLMEvaluationResult
                    {
                        ConformityParameter  = isingModel.ConformityParameter,
                        CorrelationParameter = isingModel.CorrelationParameter
                    };
                }
                else
                {
                    results = new OLMEvaluationResult
                    {
                        ConformityParameters = pottsModel.ConformityParameter,
                        //  CorrelationParameter = pottsModel.CorrelationParameter
                        CorrelationParameters = pottsModel.CorrelationParameter
                    };
                }

                if (UseIsingModel)
                {
                    Log.Post("Conformity: " + results.ConformityParameter + "\t Correlation: " + results.CorrelationParameter);
                }
                else
                {
                    for (int i = 0; i < results.ConformityParameters.Length; i++)
                    {
                        Log.Post("Conformity " + i + ": " + results.ConformityParameters[i] + "\t");
                    }
                    Log.Post("Correlation: " + results.CorrelationParameter);
                }

                // 1) Viterbi-Heuristik starten (request: SolveInference) + zusätzliche Parameter hinzufügen
                for (int graph = 0; graph < evaluationGraphs.Count; graph++)
                {
                    var request2 = new SolveInference(evaluationGraphs[graph], inputParameters.NumberOfLabels,
                                                      inputParameters.BufferSizeViterbi);

                    request2.RequestInDefaultContext();

                    // 2) Ergebnis des request auswerten (request.Solution liefert ein Labeling)
                    int[] predictionLabeling = request2.Solution.Labeling;

                    // 3) Ergebnisse aller Evaluationsgraphen auswerten (TP, TN, FP, FN, MCC) und zwischenspeichern
                    // neues Objekt, damit in Schritt 5 darauf zugegriffen werden kann.
                    var result = keys.computeEvalutionGraphResult(evaluationGraphs[graph], predictionLabeling);
                    // einfügen in Dictionary -> Liste
                    evaluationResults[trainingVariant].GraphResults.Add(result);
                }

                // Berechnen der Average-Werte
                foreach (OLMVariant variant in evaluationResults.Keys)
                {
                    results.ComputeValues(evaluationResults[trainingVariant]);
                }

                // debug output
                Log.Post("Average Values");
                Log.Post("Sensitivity: " + evaluationResults[trainingVariant].AverageSensitivity +
                         "\t Specificy: " + evaluationResults[trainingVariant].AverageSpecificity +
                         "\t MCC: " + evaluationResults[trainingVariant].AverageMCC +
                         //"\t Accuracy: " + evaluationResults[trainingVariant].AverageAccuracy +
                         "\t TotalTP: " + evaluationResults[trainingVariant].TotalTP +
                         "\t TotalFP: " + evaluationResults[trainingVariant].TotalFP +
                         "\t TotalTN: " + evaluationResults[trainingVariant].TotalTN +
                         "\t TotalFN: " + evaluationResults[trainingVariant].TotalFN);

                #endregion
            }

            #endregion
        }