void CreateLinearMulticlass() { for (int i = 0; i < _outputSize; i++) { _myMulticlassModel.Add(MlDllWrapper.CreateLinearModel(_inputSize)); } }
void trainLinearMulticlass() { print("Starting trainning"); _outputTrainningMulticlass.Clear(); _outputTrainningMulticlass = new List <double[]>(); int outputLength = trainningOuput.Length / _outputSize; for (int i = 0; i < _outputSize; i++) { List <double> tmp = new List <double>(); for (int j = 0; j < outputLength; j++) { tmp.Add(trainningOuput[j * _outputSize + i]); } _outputTrainningMulticlass.Add(tmp.ToArray()); } for (int i = 0; i < _outputTrainningMulticlass.Count; i++) { MlDllWrapper.trainLinearClass(_myMulticlassModel[i], trainningInput, trainningInput.Length, _inputSize, _outputTrainningMulticlass[i], _outputTrainningMulticlass[i].Length, 1, learningRate, isClassification, epochs); } print("Trainning Finished"); }
void Update() { if (Input.GetKeyDown(KeyCode.A)) { if (!isClassification) { predictMLPMulticlassRegression(); } else { predictMLPMulticlass(); } } if (Input.GetKeyDown(KeyCode.T)) { print("Starting trainning"); MlDllWrapper.trainMLPModelClass(MyModel, numberLayer, trainningInput.Length / npl[0], npl, trainningInput, trainningInput.Length, _inputSize, trainningOuput, trainningOuput.Length, _outputSize, epochs, learningRate, isClassification); print("Trainning Finished"); //trainModel(); //trainLinearMulticlass(); } if (Input.GetKeyDown(KeyCode.R)) { MlDllWrapper.DeleteLinearModel(MyModel); } }
void Update() { if (Input.GetKeyDown(KeyCode.A)) { predictMLPMulticlass(); } if (Input.GetKeyDown(KeyCode.I)) { File.Copy(userRequestPathFile, @".\Assets\Resources\Dataset\requestImage.jpg", true); requestPicRead(); } if (Input.GetKeyDown(KeyCode.T)) { print("Starting trainning"); MlDllWrapper.trainMLPModelClass(MyModel, numberLayer, trainningInput.Length / npl[0], npl, trainningInput, trainningInput.Length, _inputSize, trainningOuput, trainningOuput.Length, _outputSize, epochs, learningRate, isClassification); evaluateDataset(); print("Trainning Finished"); } if (Input.GetKeyDown(KeyCode.R)) { MlDllWrapper.DeleteLinearModel(MyModel); } if (Input.GetKeyDown(KeyCode.O)) { StopAllCoroutines(); StartCoroutine(infiniteTrain()); } }
void predictMLPMulticlass() { int bestModel = 0; double bestValue = 0.0f; IntPtr pointerToValues = MlDllWrapper.PredictMLPModel(MyModel, requestInput, _inputSize, numberLayer, npl, isClassification); string[] arrayValuesasString = Marshal.PtrToStringAnsi(pointerToValues).Split(';'); for (int i = 0; i < _outputSize; i++) { double currentValue = Convert.ToDouble(arrayValuesasString[i].Replace('.', ',')); print($"{currentValue} and model {i}"); //print($"{currentValue} and model {i}"); if (currentValue > bestValue) { bestModel = i; bestValue = currentValue; } } print($"The best value is {bestValue} with the model {bestModel}"); //Regression Lineaire /*if(pointerToValues != IntPtr.Zero) * MlDllWrapper.DeleteLinearModel(pointerToValues);*/ }
void trainModel() { print("Model start trainning"); MlDllWrapper.trainLinearClass(MyModel, trainningInput, trainningInput.Length, 2, trainningOuput, trainningOuput.Length, 1, learningRate, true, epochs); print("Model finished train"); }
void Start() { CsvReader.readCSVFile("TrainningData\\" + trainningFile, ref inputList, ref outputList); trainningInput = inputList.ToArray(); trainningOuput = outputList.ToArray(); // MlDllWrapper.InitRBF(trainningInput, trainningInput.Length, 784, trainningOuput, // trainningOuput.Length, 10, 500); _inputSize = npl[0]; _outputSize = npl.Last(); numberLayer = npl.Length; // // if (CsvReader.inputCount != _inputSize) { Debug.LogError( $"Input Length ({CsvReader.inputCount}) in CSV File don't match the npl input length ({_inputSize})"); } if (CsvReader.outputCount != _outputSize) { Debug.LogError( $"Output Length ({CsvReader.outputCount}) in CSV File don't match the npl output length ({_outputSize})"); } //MyModel = MlDllWrapper.CreateLinearModel(modelSize); // //CreateLinearMulticlass(); // // if (isRandomized) { randomizeSpheres(minRandom, maxRandom); } // testDataSet = new List <double[]>(); // // foreach (sphereExposer sphere in spheres) { Vector3 pos = sphere.myTransform.position; double[] tmp = new[] { (double)pos.x, pos.y }; testDataSet.Add(tmp); // } // // // // //trainModel(); // // // // //predictOnDataSet(); // // MyModel = MlDllWrapper.CreateMLPModel(numberLayer, npl); } // Update is called once per frame }
IEnumerator infiniteTrain() { int realEpochs = 0; while (true) { MlDllWrapper.trainMLPModelClass(MyModel, numberLayer, trainningInput.Length / npl[0], npl, trainningInput, trainningInput.Length, _inputSize, trainningOuput, trainningOuput.Length, _outputSize, epochs, learningRate, isClassification); evaluateDataset(); realEpochs++; yield return(null); } }
// Start is called before the first frame update void Start() { CsvReader.readCSVFile("TrainningData\\inputCsv.csv", ref inputList, ref outputList); trainningInput = inputList.ToArray(); trainningOuput = outputList.ToArray(); _inputSize = npl[0]; _outputSize = npl.Last(); numberLayer = npl.Length; if (CsvReader.inputCount != _inputSize) { Debug.LogError($"Input Length ({CsvReader.inputCount}) in CSV File don't match the npl input length ({_inputSize})"); } if (CsvReader.outputCount != _outputSize) { Debug.LogError($"Output Length ({CsvReader.outputCount}) in CSV File don't match the npl output length ({_outputSize})"); } MyModel = MlDllWrapper.CreateMLPModel(numberLayer, npl); }
void predictLinearMulticlass() { for (int j = 0; j < testDataSet.Count; j++) { _multiclassResult.Clear(); int bestModel = 0; double bestValue = 0.0f; for (int i = 0; i < _outputSize; i++) { double currentValue = MlDllWrapper.PredictLinearModel(_myMulticlassModel[i], testDataSet[j], _inputSize, isClassification); print($"Model numero {i} = {currentValue}"); if (currentValue > bestValue) { bestModel = i; bestValue = currentValue; } } switch (bestModel) { default: spheres[j].ChangeMaterial(blueMat); break; case 1: spheres[j].ChangeMaterial(redMat); break; case 2: spheres[j].ChangeMaterial(greenMat); break; } } }
void predictMLPMulticlassRegression() { for (int j = 0; j < testDataSet.Count; j++) { _multiclassResult.Clear(); int bestModel = 0; double bestValue = 0.0f; IntPtr pointerToValues = MlDllWrapper.PredictMLPModel(MyModel, testDataSet[j], _inputSize, numberLayer, npl, isClassification); string[] arrayValuesasString = Marshal.PtrToStringAnsi(pointerToValues).Split(';'); for (int i = 0; i < _outputSize; i++) { double currentValue = Convert.ToDouble(arrayValuesasString[i].Replace('.', ',')); print($"{currentValue} and model {i}"); if (currentValue > bestValue) { bestModel = i; bestValue = currentValue; } } print(arrayValuesasString[0]); //Regression Lineaire spheres[j].changeZ((float)Convert.ToDouble(arrayValuesasString[0].Replace('.', ','))); /*if(pointerToValues != IntPtr.Zero) * MlDllWrapper.DeleteLinearModel(pointerToValues);*/ } }
void predictMLPMulticlass() { for (int j = 0; j < testDataSet.Count; j++) { _multiclassResult.Clear(); int bestModel = 0; double bestValue = 0.0f; IntPtr pointerToValues = MlDllWrapper.PredictMLPModel(MyModel, testDataSet[j], _inputSize, numberLayer, npl, isClassification); string[] arrayValuesasString = Marshal.PtrToStringAnsi(pointerToValues).Split(';'); for (int i = 0; i < _outputSize; i++) { double currentValue = Convert.ToDouble(arrayValuesasString[i].Replace('.', ',')); if (currentValue > 0) { spheres[j].ChangeMaterial(blueMat); } else { spheres[j].ChangeMaterial(redMat); } } /*if(pointerToValues != IntPtr.Zero) * MlDllWrapper.DeleteLinearModel(pointerToValues);*/ } }
int predictDatasetSample(int k) { int bestModel = 0; double bestValue = 0.0f; List <double> tmp = new List <double>(); for (int i = k * npl[0]; i < npl[0] * (k + 1); i++) { tmp.Add(trainningInput[i]); } double[] tmp2 = tmp.ToArray(); IntPtr pointerToValues = MlDllWrapper.PredictMLPModel(MyModel, tmp2, _inputSize, numberLayer, npl, isClassification); string[] arrayValuesasString = Marshal.PtrToStringAnsi(pointerToValues).Split(';'); for (int i = 0; i < _outputSize; i++) { double currentValue = Convert.ToDouble(arrayValuesasString[i].Replace('.', ',')); // print($"{currentValue} and model {i}"); //print($"{currentValue} and model {i}"); if (currentValue > bestValue) { bestModel = i; bestValue = currentValue; } } return(bestModel); }