/// <summary> /// Saves this model to disk using LibSVM's model format. /// </summary> /// /// <param name="stream">The stream where the file should be written.</param> /// public void Save(Stream stream) { StreamWriter writer = new StreamWriter(stream); writer.WriteLine("solver_type " + Solver.GetDescription().ToUpperInvariant()); writer.WriteLine("nr_class " + Classes); writer.Write("label"); for (int i = 0; i < Labels.Length; i++) { writer.Write(" " + Labels[i]); } writer.WriteLine(); writer.WriteLine("nr_feature " + Dimension); writer.WriteLine("bias " + Bias.ToString("G17", CultureInfo.InvariantCulture)); writer.WriteLine("w"); for (int i = 0; i < Weights.Length; i++) { writer.WriteLine(Weights[i].ToString("G17", CultureInfo.InvariantCulture) + " "); } writer.Flush(); }
public void Save(Stream s) { XmlWriter w = null; try { XmlWriterSettings settings = new XmlWriterSettings(); settings.Indent = true; settings.ConformanceLevel = ConformanceLevel.Fragment; w = XmlWriter.Create(s, settings); w.WriteStartElement("ColorMap"); w.WriteAttributeString("Type", Type.ToString()); w.WriteAttributeString("Bias", Bias.ToString()); w.WriteAttributeString("Contrast", Contrast.ToString()); SaveCore(w); w.WriteEndElement(); } finally { if (w != null) { w.Close(); } } }
/// <summary> /// 通过I2C获取温度,Vcc,Bias,TxPower /// </summary> void GetParas() { //AOH 读取SN //A2H double temp, vcc, txPower, bais; short cache = 0; ushort ucache = 0; List <byte> data = TranBase.MyI2C_ReadA2HByte(SerBuf, Port, 96, 10); //Temp 96,97 cache = DigitTransform(data[0], data[1]); temp = (double)cache / 256; Temp = temp; TestingPara.Temp = Temp.ToString(); //Vcc 98,99 ucache = UDigitTransform(data[2], data[3]); vcc = (double)ucache / 10000; //V Vcc = vcc; TestingPara.Vcc = Vcc.ToString(); //Bais 100,101 ucache = UDigitTransform(data[4], data[5]); bais = (double)ucache / 500; Bias = bais; TestingPara.Bias = Bias.ToString(); //TxPower 102,103 ucache = UDigitTransform(data[6], data[7]); txPower = (double)ucache / 10000; //mW //取两位有效数字 TxPower = Math.Round((Math.Log10(txPower) * 10), 2); TestingPara.TxPower = TxPower.ToString(); }
private Task SetElectrometer(ExcelJob job) { return(Task.Run(() => { if (el == null) { MessageBox.Show("No electrometer available!"); return; } ////ZERO //if (!el.IsZeroed() && !alreadyZeroed) //{ // logger.Log("Zeroing electrometer..."); // await el.Zero(); //} //SET RANGE if (el.GetRange() != Autodrive.Electrometers.Enums.Range.HIGH) { el.SetRange(Autodrive.Electrometers.Enums.Range.HIGH); } //SET BIAS Bias reqBias = Bias.UNKNOWN; var currentBias = this.el.GetBias(); switch (job.Bias) { case -100: case -300: reqBias = Bias.NEG_100PERC; break; case -50: case -150: reqBias = Bias.NEG_50PERC; break; case 0: reqBias = Bias.ZERO; break; case 50: case 150: reqBias = Bias.POS_50PERC; break; case 100: case 300: reqBias = Bias.POS_100PERC; break; } if (reqBias != currentBias) { logger.Log($"Settng Bias {reqBias.ToString()} + 10 sec delay"); el.SetBias(reqBias); Thread.Sleep(10000); } //SET MODE if (el.GetMode() != MeasureMode.CHARGE) { el.SetMode(MeasureMode.CHARGE); } ; el.StopMeasurement(); el.Reset(); })); }
public string ToString() { string content = "Input vector length: <" + InputLength + ">" + Environment.NewLine; content += "Output classes: <" + Classes + ">" + Environment.NewLine; content += "Hidden layers: <" + HiddenLayers + ">" + Environment.NewLine; string hlpString = ""; foreach (var i in NeuronPerLayer) { hlpString += i.ToString() + ","; } content += "Neurons in each layer: <" + hlpString + ">" + Environment.NewLine; content += "Bias: <" + Bias.ToString() + ">" + Environment.NewLine; content += "Seed: <" + Seed + ">" + Environment.NewLine; content += "Iterations: <" + Iterations + ">" + Environment.NewLine; content += "Learning factor: <" + LearningFactor + ">" + Environment.NewLine; return(content); }
/// <summary> /// Saves this model to disk using LibSVM's model format. /// </summary> /// /// <param name="stream">The stream where the file should be written.</param> /// public void Save(Stream stream) { StreamWriter writer = new StreamWriter(stream); writer.WriteLine("solver_type " + Solver.GetDescription().ToUpperInvariant()); writer.WriteLine("nr_class " + NumberOfClasses); writer.Write("label"); for (int i = 0; i < Labels.Length; i++) { writer.Write(" " + Labels[i]); } writer.WriteLine(); writer.WriteLine("nr_feature " + NumberOfInputs); writer.WriteLine("bias " + Bias.ToString("G17", System.Globalization.CultureInfo.InvariantCulture)); if (this.Vectors == null) { writer.WriteLine("w"); for (int i = 0; i < Weights.Length; i++) { writer.WriteLine(Weights[i].ToString("G17", System.Globalization.CultureInfo.InvariantCulture) + " "); } } else { writer.WriteLine("SV"); for (int i = 0; i < Vectors.Length; i++) { string alpha = Weights[i].ToString("G17", System.Globalization.CultureInfo.InvariantCulture); string values = Sparse.FromDense(Vectors[i]).ToString(); writer.WriteLine(alpha + " " + values); } } writer.Flush(); }
public Perceptron Treinar(bool verbose = false) { Console.WriteLine($"Treinando perceptron usando os {QuantTreinamento} primeiros exemplos"); int epoca = 0, erroTotal; var paraTreinar = Exemplos.Take(QuantTreinamento).ToArray(); var início = DateTime.Now; while (true) { erroTotal = 0; foreach (var exemplo in paraTreinar) { var erro = Sinal(exemplo.Saída) - Sinal(Testar(exemplo.Normalizados)); if (verbose) { Console.WriteLine($" Acertou: {(erro == 0 ? "Sim" : "Não")}: {exemplo.ToString(this)}"); } Bias += Magnitude * erro; for (int i = 0; i < Entradas; i++) { Pesos[i] += Magnitude * erro * exemplo.Normalizados[i]; } erroTotal += erro == 0 ? 0 : 1; } if (verbose) { Console.WriteLine($"Época {epoca}, Total de erros: {erroTotal}\nBias: {Bias.ToString("0.00")}\nPesos: {string.Join(", ", Pesos.Select(p=>p.ToString("0.00")))}"); } if (erroTotal == 0) { break; } else { epoca++; } } Console.WriteLine($"Treinamento completo! Levou {epoca} épocas e {(DateTime.Now - início).TotalSeconds} segundos."); return(this); }
static int trainingSetNumber = 2; //Applies to aproximation variant #endregion public static void Main(String[] args) { Random gen = new Random(); if (variant == Variant.transformation) { double succesfulOutputsCount = 0; double totalOutputsCount = 4 * executionsCount; for (int counter = 1; counter <= executionsCount; counter++) { string fileName = variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml"; StreamWriter sw = new StreamWriter(fileName); XmlSerializer xs = new XmlSerializer(typeof(List <double>)); List <double> EpochsMSEs = new List <double>(); double[][] testSamples = LoadTrainingDataFromFileTransformation(); double[][] finalInputOutput = null; List <double[]> trainingSet = new List <double[]>(); RefillTrainingSet(trainingSet, testSamples); Neuron[] hiddenLayer = null; Neuron[] outputLayer = null; InitalizeLayers(ref hiddenLayer, ref outputLayer); for (int i = 1; i <= epochsCount; i++) { double EpochMSE = 0; double IterationError = 0; EpochMSE = 0; for (int j = trainingSet.Count; j > 0; j--) { IterationError = 0; int randomIndex = gen.Next(j); double[] inputs1 = trainingSet[randomIndex]; double[] inputs2 = new double[hiddenLayerCount]; foreach (Neuron n in hiddenLayer) { n.Inputs = inputs1; } for (int k = 0; k < hiddenLayer.Length; k++) { inputs2[k] = hiddenLayer[k].Output(); } foreach (Neuron n in outputLayer) { n.Inputs = inputs2; } double[] outputsErrors = new double[4]; for (int k = 0; k < outputLayer.Length; k++) { outputsErrors[k] = (inputs1[k] - outputLayer[k].Output()); IterationError += Pow(outputsErrors[k], 2); } for (int k = 0; k < outputLayer.Length; k++) { outputLayer[k].Error = Sigm.FunctionDerivative(outputLayer[k].Output()) * (outputsErrors[k]); } for (int k = 0; k < hiddenLayer.Length; k++) { double value = 0; for (int l = 0; l < hiddenLayer[k].Weights.Length; l++) { value += Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[l].Error * outputLayer[l].Weights[k]; } hiddenLayer[k].Error = value; } for (int k = 0; k < outputLayer.Length; k++) { outputLayer[k].UpdateWeights(); } for (int k = 0; k < hiddenLayer.Length; k++) { hiddenLayer[k].UpdateWeights(); } trainingSet.RemoveAt(randomIndex); EpochMSE += IterationError; } EpochMSE /= 4; RefillTrainingSet(trainingSet, testSamples); if (i % 20 == 1) { EpochsMSEs.Add(EpochMSE); } } for (int i = 0; i < 4; i++) { int maxIndex = 0; double[] inputs1 = trainingSet[i]; double[] inputs2 = new double[hiddenLayerCount]; foreach (Neuron n in hiddenLayer) { n.Inputs = inputs1; } for (int j = 0; j < hiddenLayer.Length; j++) { inputs2[j] = hiddenLayer[j].Output(); } foreach (Neuron n in outputLayer) { n.Inputs = inputs2; } for (int j = 0; j < outputLayer.Length; j++) { if (outputLayer[j].Output() > outputLayer[maxIndex].Output()) { maxIndex = j; } } List <int> indexes = GetNumbers(4); indexes.Remove(maxIndex); for (int j = 0; j < 4; j++) { WriteLine($"Input: {trainingSet[i][j]} Output: {outputLayer[j].Output()}"); } WriteLine(); if (outputLayer[indexes[0]].Output() < 0.5 && outputLayer[indexes[1]].Output() < 0.5 && outputLayer[indexes[2]].Output() < 0.5 && outputLayer[maxIndex].Output() > 0.5) { succesfulOutputsCount++; } } WriteLine("================================================"); ReadKey(); xs.Serialize(sw, EpochsMSEs); } WriteLine($"Successful: {succesfulOutputsCount} Total: {totalOutputsCount}"); XmlSerializer xs1 = new XmlSerializer(typeof(double[])); using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution_stats.xml")) { xs1.Serialize(sw1, new double[] { succesfulOutputsCount, totalOutputsCount }); } ReadKey(); } if (variant == Variant.aproximation) { for (int counter = 1; counter <= executionsCount; counter++) { StreamWriter sw = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "EpochsDiffrences.xml"); XmlSerializer xs = new XmlSerializer(typeof(List <ApproximationData>)); List <ApproximationData> toSerialize = new List <ApproximationData>(); List <double> trainingDataInputs = new List <double>(); List <double> trainingDataOutputs = new List <double>(); List <double> testingDataInputs = new List <double>(); List <double> testingDataOutputs = new List <double>(); LoadTrainingDataFromFileAproximation(trainingDataInputs, trainingDataOutputs, testingDataInputs, testingDataOutputs); Neuron[] hiddenLayer = new Neuron[hiddenLayerCount]; Neuron[] outputLayer = new Neuron[1]; for (int i = 0; i < hiddenLayer.Length; i++) { hiddenLayer[i] = new Neuron(1, 1); hiddenLayer[i].RandomizeValues(); } outputLayer[0] = new Neuron(hiddenLayerCount, 2); outputLayer[0].RandomizeValues(); double TrainingMSE = 0; for (int i = 1; i <= epochsCount; i++) { List <int> numbers = GetNumbers(trainingDataInputs.Count); List <double> finalOutput = new List <double>(); TrainingMSE = 0; for (int j = 0; j < trainingDataInputs.Count; j++) { int randomIndex = gen.Next(numbers.Count); numbers.RemoveAt(randomIndex); double[] hiddenLayerInputs = new double[] { trainingDataInputs[randomIndex] }; double[] outputLayerInputs = new double[hiddenLayerCount]; foreach (Neuron n in hiddenLayer) { n.Inputs = hiddenLayerInputs; } for (int k = 0; k < hiddenLayer.Length; k++) { outputLayerInputs[k] = hiddenLayer[k].Output(); } outputLayer[0].Inputs = outputLayerInputs; double diffrence = 0; diffrence = trainingDataOutputs[randomIndex] - outputLayer[0].Output(); TrainingMSE += Pow(diffrence, 2); outputLayer[0].Error = Linear.FunctionDerivative(outputLayer[0].Output()) * diffrence; for (int k = 0; k < hiddenLayer.Length; k++) { hiddenLayer[k].Error = Sigm.FunctionDerivative(hiddenLayer[k].Output()) * outputLayer[0].Error * outputLayer[0].Weights[k]; hiddenLayer[k].UpdateWeights(); } outputLayer[0].UpdateWeights(); } TrainingMSE /= trainingDataInputs.Count; double TestingMSE = 0; for (int j = 0; j < testingDataInputs.Count; j++) { double[] hiddenLayerInputs = new double[] { testingDataInputs[j] }; double[] outputLayerInputs = new double[hiddenLayerCount]; foreach (Neuron n in hiddenLayer) { n.Inputs = hiddenLayerInputs; } for (int k = 0; k < hiddenLayer.Length; k++) { outputLayerInputs[k] = hiddenLayer[k].Output(); } outputLayer[0].Inputs = outputLayerInputs; TestingMSE += Pow(testingDataOutputs[j] - outputLayer[0].Output(), 2); if (i == epochsCount) { finalOutput.Add(outputLayer[0].Output()); } } if (i == epochsCount) { XmlSerializer xs1 = new XmlSerializer(typeof(List <double>)); using (StreamWriter sw1 = new StreamWriter(variant.ToString() + "_" + bias.ToString() + "_Execution" + counter.ToString() + "FinalOuput.xml")) { xs1.Serialize(sw1, finalOutput); } } TestingMSE /= testingDataInputs.Count; ApproximationData approximationData; approximationData.MSETrening = TrainingMSE; approximationData.MSETest = TestingMSE; if (i % 20 == 1) { toSerialize.Add(approximationData); } } xs.Serialize(sw, toSerialize); } } }