public void TestToString() { double[] ps = { 5, 0, 0, 0 }; var funct = new MexicanHatFunction(3, ps, 0); double[] x = { -1, 0, 1 }; funct.Evaluate(x); Assert.AreEqual("[MexicanHatFunction:width=5.00,center=0.00,0.00,0.00]", funct.ToString()); }
public void TestEvaluate() { double[] ps = { 5, 0, 0, 0 }; var funct = new MexicanHatFunction(3, ps, 0); double[] x = { -1, 0, 1 }; double y = funct.Evaluate(x); Assert.AreEqual(-0.36787944117144233, y, AIFH.DefaultPrecision); }
public void TestOther() { double[] ps = { 5, 0, 0, 0 }; var funct = new MexicanHatFunction(3, ps, 0); Assert.AreEqual(3, funct.Dimensions); funct.SetCenter(0, 100); Assert.AreEqual(100, funct.GetCenter(0), AIFH.DefaultPrecision); funct.Width = 5; Assert.AreEqual(5, funct.Width, AIFH.DefaultPrecision); }
void Solve() { #region prepare and assign trainingSet.Clear(); for (int i = 0; i < trainVectorCount; i++) { List <double> dl = new List <double>(); for (int j = 0; j < trainVectorDimension; j++) { dl.Add(trainVectors[i][j]); } trainingSet.Add(new TrainingSample(dl.ToArray())); } /// process /// start learning /// get learning radius for neighborhood function int learningRadius = 0; for (int i = 0; i < dimension; i++) { if (size[i] > learningRadius) { learningRadius = size[i]; } } learningRadius /= 2; INeighborhoodFunction neighborhoodFunction = new GaussianFunction(learningRadius, netUP.neighborDistance) as INeighborhoodFunction; if (neighborhood) { neighborhoodFunction = new MexicanHatFunction(learningRadius) as INeighborhoodFunction; } LatticeTopology topology = LatticeTopology.Rectangular; if (latticeTopology) { topology = LatticeTopology.Hexagonal; } /// instantiate relevant network layers KohonenLayer inputLayer = new KohonenLayer(trainVectorDimension); KohonenLayerND outputLayer = new KohonenLayerND(size, neighborhoodFunction, topology); KohonenConnectorND connector = new KohonenConnectorND(inputLayer, outputLayer, netUP.initialNodes); if (netUP.initialNodes.Length != 0) { connector.Initializer = new GivenInput(netUP.initialNodes); } else { connector.Initializer = new RandomFunction(0.0, 1.0); } outputLayer.SetLearningRate(learningRate, 0.05d); outputLayer.IsDimensionCircular = isDimensionCircular; network = new KohonenNetworkND(inputLayer, outputLayer); network.useRandomTrainingOrder = randomTrainingOrder; inputLayer.ParallelComputation = false; outputLayer.ParallelComputation = parallelComputing; #endregion #region delegates network.BeginEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { #region trainingCylce if (network == null || !GO) { return; } trainedVectors = new double[outputLayer.neuronCount, trainVectorDimension]; for (int i = 0; i < outputLayer.neuronCount; i++) { IList <ISynapse> synapses = (network.OutputLayer as KohonenLayerND)[outputLayer.adressBook[i]].SourceSynapses; for (int j = 0; j < trainVectorDimension; j++) { trainedVectors[i, j] = synapses[j].Weight; } } //make new net here netP = new CrowNetSOMNDP(size, isDimensionCircular, latticeTopology, neighborhood, trainedVectors, outputLayer.adressBook); counter++; #endregion }); network.EndSampleEvent += new TrainingSampleEventHandler( delegate(object senderNetwork, TrainingSampleEventArgs args) { netP.winner = outputLayer.WinnerND.CoordinateND; }); #endregion network.Learn(trainingSet, cycles); }
void Solve() { CrowNetP NetP = new CrowNetP(); if (netUP.netType == "som") { #region self organizing maps #region prepare and assign trainingSet.Clear(); int trainVectorDimension = 3; if (trainDataArePoints) { for (int i = 0; i < pointsList.Count; i++) { trainingSet.Add(new TrainingSample(new double[] { pointsList[i].Value.X, pointsList[i].Value.Y, pointsList[i].Value.Z })); } } else { trainVectorDimension = trainingVectorTree.Branches[0].Count; trainingSet = new TrainingSet(trainVectorDimension); for (int i = 0; i < trainingVectorTree.Branches.Count; i++) { double[] values = new double[trainVectorDimension]; for (int j = 0; j < trainVectorDimension; j++) { values[j] = trainingVectorTree.Branches[i][j].Value; } trainingSet.Add(new TrainingSample(values)); } } /// process /// start learning int learningRadius = Math.Max(layerWidth, layerHeight) / 2; INeighborhoodFunction neighborhoodFunction = new GaussianFunction(learningRadius, netUP.neighborDistance) as INeighborhoodFunction; if (neighborhood) { neighborhoodFunction = new MexicanHatFunction(learningRadius) as INeighborhoodFunction; } LatticeTopology topology = LatticeTopology.Rectangular; if (latticeTopology) { topology = LatticeTopology.Hexagonal; } KohonenLayer inputLayer = new KohonenLayer(trainVectorDimension); KohonenLayer outputLayer = new KohonenLayer(new Size(layerWidth, layerHeight), neighborhoodFunction, topology); KohonenConnector connector = new KohonenConnector(inputLayer, outputLayer); connector.Initializer = randomizer; outputLayer.SetLearningRate(learningRate, 0.05d); outputLayer.IsRowCircular = isCircularRows; outputLayer.IsColumnCircular = isCircularColumns; network = new KohonenNetwork(inputLayer, outputLayer); network.useRandomTrainingOrder = opt.UseRandomTraining; #endregion #region delegates network.BeginEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { #region TrainingCycle if (network == null || !GO) { return; } int iPrev = layerWidth - 1; allValuesTree = new GH_Structure <GH_Number>(); for (int i = 0; i < layerWidth; i++) { for (int j = 0; j < layerHeight; j++) { IList <ISynapse> synapses = (network.OutputLayer as KohonenLayer)[i, j].SourceSynapses; double x = synapses[0].Weight; double y = synapses[1].Weight; double z = synapses[2].Weight; for (int k = 0; k < trainVectorDimension; k++) { allValuesTree.Append(new GH_Number(synapses[k].Weight), new GH_Path(i, j)); } rowX[j][i] = x; rowY[j][i] = y; rowZ[j][i] = z; columnX[i][j] = x; columnY[i][j] = y; columnZ[i][j] = z; if (j % 2 == 1) { hexagonalX[i][j] = x; hexagonalY[i][j] = y; hexagonalZ[i][j] = z; } else { hexagonalX[iPrev][j] = x; hexagonalY[iPrev][j] = y; hexagonalZ[iPrev][j] = z; } } iPrev = i; } if (isCircularRows) { for (int i = 0; i < layerHeight; i++) { rowX[i][layerWidth] = rowX[i][0]; rowY[i][layerWidth] = rowY[i][0]; rowZ[i][layerWidth] = rowZ[i][0]; } } if (isCircularColumns) { for (int i = 0; i < layerWidth; i++) { columnX[i][layerHeight] = columnX[i][0]; columnY[i][layerHeight] = columnY[i][0]; columnZ[i][layerHeight] = columnZ[i][0]; hexagonalX[i][layerHeight] = hexagonalX[i][0]; hexagonalY[i][layerHeight] = hexagonalY[i][0]; hexagonalZ[i][layerHeight] = hexagonalZ[i][0]; } } Array.Clear(isWinner, 0, layerHeight * layerWidth); #endregion NetP = new CrowNetP("som", layerWidth, layerHeight, isCircularRows, isCircularColumns, latticeTopology, neighborhood, isWinner, rowX, rowY, rowZ, columnX, columnY, columnZ, hexagonalX, hexagonalY, hexagonalZ, allValuesTree); counter++; }); network.EndSampleEvent += new TrainingSampleEventHandler( delegate(object senderNetwork, TrainingSampleEventArgs args) { isWinner[network.Winner.Coordinate.X, network.Winner.Coordinate.Y] = true; }); #endregion #endregion } network.Learn(trainingSet, cycles); }