void Start()
 {
     if (openSee == null)
     {
         openSee = GetComponent <OpenSee>();
     }
     if (openSee == null)
     {
         ResetInfo();
         return;
     }
     ResetInfo();
     expressions = new Dictionary <string, List <float[]> >();
     model       = new ThunderSVMModel();
     rnd         = new System.Random();
 }
            //private bool newModel = false;

            static public void LoadSerialized(byte[] modelBytes, out Dictionary <string, List <float[]> > expressions, out SVMModel model, out string[] classLabels, out int[] indices, ref PointSelection pointSelection)
            {
                IFormatter   formatter    = new BinaryFormatter();
                MemoryStream memoryStream = new MemoryStream(modelBytes);

                memoryStream.Position = 0;
                OpenSeeExpressionRepresentation oser;

                using (GZipStream gzipStream = new GZipStream(memoryStream, CompressionMode.Decompress)) {
                    oser = formatter.Deserialize(gzipStream) as OpenSeeExpressionRepresentation;
                }
                expressions = oser.expressions;
                if (oser.thunderSVM)
                {
                    model = new ThunderSVMModel(oser.modelBytes);
                }
                else
                {
                    model = new SVMModel(oser.modelBytes);
                }
                classLabels = oser.classLabels;
                indices     = oser.indices;
                //pointSelection = oser.pointSelection;
                if (indices == null)
                {
                    indices = new int[1 + 1 + 3 + 4 + 3 + 3 * 66];
                    for (int i = 0; i < 1 + 1 + 3 + 4 + 3 + 3 * 66; i++)
                    {
                        indices[i] = i;
                    }
                }

                /*if (pointSelection == null)
                 *  pointSelection = new PointSelection();
                 * if (!oser.newModel)
                 *  pointSelection.features = true;*/
            }
        public bool TrainModel()
        {
            Debug.Log("---------------------");
            train = false;
            if (openSee == null)
            {
                ResetInfo();
                return(false);
            }
            modelReady = false;
            if (model is SVMModel)
            {
                model = new ThunderSVMModel();
            }
            SelectPoints();
            List <string> keys             = new List <string>();
            List <string> accuracyWarnings = new List <string>();
            int           samples          = 0;

            foreach (string key in expressions.Keys)
            {
                List <float[]> list = new List <float[]>(expressions[key]);
                list.RemoveAll(x => x.Length != colsFull);
                if (list != null && list.Count == maxSamples)
                {
                    Debug.Log("[Training info] Adding expression " + key + " to training data.");
                    keys.Add(key);
                    samples += list.Count;
                }
                else
                {
                    if (list != null && list.Count > 20)
                    {
                        Debug.Log("[Training warning] Expression " + key + " has little data and might be inaccurate. (" + list.Count + ")");
                        accuracyWarnings.Add("Expression " + key + " has little data and might be inaccurate. (" + list.Count + ")");
                        samples += list.Count;
                        keys.Add(key);
                    }
                    else
                    {
                        Debug.Log("[Training warning] Skipping expression " + key + " due to lack of collected data. (" + list.Count + ")");
                        accuracyWarnings.Add("Skipping expression " + key + " due to lack of collected data. (" + list.Count + ")");
                        continue;
                    }
                }
            }
            int classes = keys.Count;

            if (classes < 2 || classes > 10)
            {
                Debug.Log("[Training error] The number of expressions that can be used for training is " + classes + ", which is either below 2 or higher than 10.");
                accuracyWarnings.Add("[Training error] The number of expressions that can be used for training is " + classes + ", which is either below 2 or higher than 10.");
                return(false);
            }
            keys.Sort();
            classLabels = keys.ToArray();

            float[] weights = null;
            if (weightMap != null)
            {
                weights = new float[classLabels.Length];
                for (int i = 0; i < classLabels.Length; i++)
                {
                    if (weightMap.ContainsKey(classLabels[i]))
                    {
                        weights[i] = weightMap[classLabels[i]];
                    }
                    else
                    {
                        weights[i] = 1f;
                    }
                    Debug.Log("[Training info] Adding weight " + weights[i] + " for " + classLabels[i] + ".");
                }
            }

            Debug.Log("[Training info] Preparing trainig data.");

            int train_split = maxSamples * 3 / 4;
            int test_split  = maxSamples - train_split;
            int rows_train  = classes * train_split;
            int rows_test   = classes * test_split;

            float[] X_train = new float[rows_train * cols];
            float[] X_test  = new float[rows_test * cols];
            float[] y_train = new float[rows_train];
            float[] y_test  = new float[rows_test];
            int     i_train = 0;
            int     i_test  = 0;

            System.Random rnd = new System.Random();
            for (int i = 0; i < classes; i++)
            {
                List <float[]> list = new List <float[]>(expressions[keys[i]]);
                list.RemoveAll(x => x.Length != colsFull);
                int local_train_split = list.Count * 3 / 4;
                test_split = list.Count - local_train_split;
                for (int j = list.Count; j > 1;)
                {
                    j--;
                    int     k   = rnd.Next(j + 1);
                    float[] tmp = list[k];
                    list[k] = list[j];
                    list[j] = tmp;
                }
                int train_split_current = train_split;
                if (classLabels[i] == "neutral")
                {
                    train_split_current *= 1;
                }
                for (int j = 0; j < train_split_current; j++)
                {
                    float factor = 1f;
                    float adder  = 0f;

                    /*if (j > train_split || j > list.Count) {
                     *  factor = UnityEngine.Random.Range(0.98f, 1.02f);
                     *  adder = UnityEngine.Random.Range(-0.02f, 0.02f);
                     * }*/
                    for (int k = 0; k < cols; k++)
                    {
                        float v = list[j % local_train_split][indices[k]] * factor + adder;
                        X_train[i_train * cols + k] = v;
                    }
                    y_train[i_train] = i;
                    i_train++;
                }
                for (int j = local_train_split; j < local_train_split + test_split; j++)
                {
                    for (int k = 0; k < cols; k++)
                    {
                        float v = list[j][indices[k]];
                        X_test[i_test * cols + k] = v;
                    }
                    y_test[i_test] = i;
                    i_test++;
                }
            }
            Debug.Log("[Training info] Preparation complete.");
            int probability = 0;

            if (enableProbabilityTraining)
            {
                probability = 1;
            }
            model.TrainModel(X_train, y_train, weights, i_train, cols, probability, C);
            confusionMatrix       = model.ConfusionMatrix(X_test, y_test, i_test, out accuracy);
            confusionMatrixString = SVMModel.FormatMatrix(confusionMatrix, classLabels);
            for (int label = 0; label < classes; label++)
            {
                int total = 0;
                for (int p = 0; p < classes; p++)
                {
                    total += confusionMatrix[label, p];
                }
                float error = 100f * (1f - ((float)confusionMatrix[label, label] / (float)total));
                if (error > warningThreshold)
                {
                    accuracyWarnings.Add("[Training warning] The expression \"" + classLabels[label] + "\" is misclassified with a chance of " + error.ToString("0.00") + "%.");
                    Debug.Log("[Training warning] The expression \"" + classLabels[label] + "\" is misclassified with a chance of " + error.ToString("0.00") + "%.");
                }
            }
            warnings   = accuracyWarnings.ToArray();
            modelReady = true;
            Debug.Log("[Training info] Trained model.");
            return(true);
        }
            static public byte[] ToSerialized(Dictionary <string, List <float[]> > expressions, SVMModel model, string[] classLabels, int[] indices, PointSelection pointSelection)
            {
                OpenSeeExpressionRepresentation oser = new OpenSeeExpressionRepresentation();

                oser.expressions    = expressions;
                oser.modelBytes     = model.SaveModel();
                oser.classLabels    = classLabels;
                oser.indices        = indices;
                oser.pointSelection = pointSelection;
                oser.thunderSVM     = true;
                //oser.newModel = true;

                IFormatter   formatter    = new BinaryFormatter();
                MemoryStream memoryStream = new MemoryStream();

                using (GZipStream gzipStream = new GZipStream(memoryStream, CompressionMode.Compress)) {
                    formatter.Serialize(gzipStream, oser);
                    gzipStream.Flush();
                }
                return(memoryStream.ToArray());
            }