Пример #1
0
        public override void Create(int input, int layers, int neurons, int output)
        {
            IStochasticFunction function = new GaussianFunction();

            //Setup network
            switch (layers)
            {
            case 1:
                DeepAccordNetwork = new DeepBeliefNetwork(function, input, neurons, output);     //Activation function, input, hidden, hidden, output.
                break;

            case 2:
                DeepAccordNetwork = new DeepBeliefNetwork(function, input, neurons, neurons, output);     //Activation function, input, hidden, hidden, output.
                break;

            case 3:
                DeepAccordNetwork = new DeepBeliefNetwork(function, input, neurons, neurons, neurons, output);     //Activation function, input, hidden, hidden, output.
                break;

            case 4:
                DeepAccordNetwork = new DeepBeliefNetwork(function, input, neurons, neurons, neurons, neurons, output);     //Activation function, input, hidden, hidden, output.
                break;

            case 5:
                DeepAccordNetwork = new DeepBeliefNetwork(function, input, neurons, neurons, neurons, neurons, neurons, output);     //Activation function, input, hidden, hidden, output.
                break;
            }

            new GaussianWeights(DeepAccordNetwork, 0.1).Randomize();
            DeepAccordNetwork.UpdateVisibleWeights();
        }
Пример #2
0
 public Classifier(
     DeepBeliefNetwork network,
     ClassifierConfiguration configuration,
     IGuiLogger logger) : this(logger, configuration)
 {
     _network = network;
 }
 private void loadButton_Clicked(object sender, EventArgs e)
 {
     if (openFileDialog2.ShowDialog() == DialogResult.OK)
     {
         _network = DeepBeliefNetwork.Load(openFileDialog2.FileName);
     }
 }
Пример #4
0
        public NeutralNetwork(double[][] input, double[][] output, double[][] testInput, double[][] testOutput)
        {
            var network = new DeepBeliefNetwork(28 * 28, new int[] { 1000, 10 });

            new GaussianWeights(network).Randomize();
            network.UpdateVisibleWeights();

            var teacher = new DeepNeuralNetworkLearning(network)
            {
                Algorithm  = (ann, i) => new ParallelResilientBackpropagationLearning(ann),
                LayerIndex = network.Machines.Count - 1,
            };
            var layerData = teacher.GetLayerInput(input);

            for (int i = 0; i < 5000; i++)
            {
                teacher.RunEpoch(layerData, output);
            }
            network.UpdateVisibleWeights();
            var inputArr = new double[28 * 28];

            for (int i = 0; i < 28 * 28; i++)
            {
                inputArr[i] = testInput[0][i];
            }

            var a = network.Compute(testInput[0]);

            Console.WriteLine(Array.IndexOf(a, a.Max()));
        }
 private void ReplaceModelWithUpdate(DeepBeliefNetwork updatedNetwork)
 {
     _modelMutex.WaitOne();
     network = updatedNetwork;
     _modelMutex.ReleaseMutex();
     IsReady             = true;
     IsCurrentlyLearning = false;
 }
Пример #6
0
 static void Main(string[] args)
 {
     var network = new DeepBeliefNetwork(new BernoulliFunction(), 1024, 50, 10);
     var teacher = new BackPropagationLearning(network)
     {
         LearningRate = 0.1,
         Momentum     = 0.9
     };
 }
Пример #7
0
        public static DeepBeliefNetwork CreateAndGetNetwork(int inputsCount)
        {
            DeepBeliefNetwork network = new DeepBeliefNetwork(inputsCount, hiddenNeurons, hiddenNeurons);

            new GaussianWeights(network, stdDev).Randomize();
            network.UpdateVisibleWeights();

            return(network);
        }
Пример #8
0
        private static void Main(string[] args)
        {
            if (Environment.OSVersion.Version.Major >= 6)
            {
                SetProcessDPIAware();
            }

            var tuple = GetData();

            double[][] dataTrainX = new double[(int)(tuple.mfcc_inputs.Length * 0.66)][];
            double[][] dataTestX  = new double[(int)(tuple.mfcc_inputs.Length * 0.33)][];

            bool[] dataTrainY = new bool[(int)(tuple.classes.Length * 0.66)];
            bool[] dataTestY  = new bool[(int)(tuple.classes.Length * 0.33)];

            Console.WriteLine("Loaded datasets (x): " + tuple.mfcc_inputs.Length);
            Console.WriteLine("Loaded datasets (y): " + tuple.classes.Length);

            Console.WriteLine("MFCC dimension: " + tuple.mfcc_inputs[0].Length);

            Console.WriteLine("Coefficient 1: " + (int)(tuple.mfcc_inputs.Length * 0.66));
            Console.WriteLine("Coefficient 2: " + (int)(tuple.mfcc_inputs.Length * 0.33));

            Array.Copy(tuple.mfcc_inputs, 0, dataTrainX, 0, (int)(tuple.mfcc_inputs.Length * 0.66));
            Array.Copy(tuple.mfcc_inputs, 0, dataTestX, (int)(tuple.mfcc_inputs.Length * 0.66),
                       tuple.mfcc_inputs.Length - 1);

            Array.Copy(tuple.classes, 0, dataTrainY, 0, (int)(tuple.classes.Length * 0.66));
            Array.Copy(tuple.classes, 0, dataTestY, (int)(tuple.classes.Length * 0.66), tuple.classes.Length - 1);

            Console.WriteLine("Training dimension: (" + dataTrainX.Length + ", " + dataTrainY.Length + ")");
            Console.WriteLine("Test dimension: (" + dataTestX.Length + ", " + dataTestY.Length + ")");

            DeepBeliefNetwork network = Train(ref dataTrainX, ref dataTrainY);

            double[][] predictionProba = Predict(network, dataTestX);

            double similarity = 0;

            for (int i = 0; i < predictionProba.Length; i++)
            {
                bool classPredicted = predictionProba[i][0] > 0.5;
                bool classReal      = dataTestY[i];
                similarity += Convert.ToInt32(classPredicted == classReal);
            }
            similarity /= predictionProba.Length;

            Console.WriteLine("Similarity: " + similarity + "%");

            while (true)
            {
                Console.ReadKey(true);
            }
        }
Пример #9
0
 private void LoadNetworks()
 {
     for (var i = 0; i <= Constants.MaxEmptiesCount; i++)
     {
         var networkFile = Path.Combine(networkPath, "deeplearning-19.net");
         if (!File.Exists(networkFile))
         {
             continue;
         }
         networks[i] = DeepBeliefNetwork.Load(networkFile);
     }
 }
        public double Learn(double[][] learnData, int[] learnLabel, double[][] testData, int[] testLabel)
        {
            Log.Write(this.GetType(), "Begin Learning");

            int nInputs  = learnData[0].Length;
            int nOutputs = learnLabel.Distinct().Count();

            double[][] labelDoubles = DictionaryTools.IntToDoubles(nOutputs, learnLabel);

            int[] layers =
            {
                nOutputs * 2, nOutputs
            };

            _neuralNetwork = new DeepBeliefNetwork(nInputs, layers);
            new GaussianWeights(_neuralNetwork).Randomize();
            _neuralNetwork.UpdateVisibleWeights();

            BackPropagationLearning learning = new BackPropagationLearning(_neuralNetwork);

            List <double> errorList = new List <double>();
            int           counter   = 1;

            while (true)
            {
                double error    = learning.RunEpoch(learnData, labelDoubles);
                double tmpError = 0;
                if (errorList.Count > 0)
                {
                    tmpError = errorList.Last();
                }
                errorList.Add(error);

                if (counter % 10 == 0)
                {
                    Log.Write(this.GetType(), $"Iteration {counter} | Score {Score(testData, testLabel)} | Error {error}");
                }

                if (Math.Abs(errorList.Last() - tmpError) < 0.01)
                {
                    break;
                }

                counter++;
            }

            double scoreResult = Score(testData, testLabel);

            Log.Write(this.GetType(), $"Final Score {scoreResult}");
            Log.Write(this.GetType(), "End Learning");

            return(scoreResult);
        }
Пример #11
0
        public NeutralNetwork()
        {
            _network = new DeepBeliefNetwork(784, new int[] { 1000, 10 });
            new GaussianWeights(_network).Randomize();
            _network.UpdateVisibleWeights();

            _teacher = new DeepNeuralNetworkLearning(_network)
            {
                Algorithm  = (ann, i) => new ParallelResilientBackpropagationLearning(ann),
                LayerIndex = _network.Machines.Count - 1,
            };
        }
Пример #12
0
 //初始化載入DBN網路狀態
 public static bool Initialization(string Path)
 {
     try
     {
         DBNetwork = DeepBeliefNetwork.Load(Path);
         Console.WriteLine("建置完成");
     }
     catch (Exception ex)
     {
         Debug.WriteLine(ex.ToString());
         return(false);
     }
     return(true);
 }
Пример #13
0
        public double[][] Compute(double[][] i)
        {
            if (n == null)
            {
                n = DeepBeliefNetwork.Load(p);
            }

            List <double[]> d = new List <double[]>();

            for (int j = 0; j < i.Length; j++)
            {
                d.Add(n.Compute(i[j]));
            }
            return(d.ToArray());
        }
Пример #14
0
        public void Train(double[][] i, double[][] o = null, int outputLength = 10, int hiddenLayer = -1)
        {
            if (n == null)
            {
                if (File.Exists(p))
                {
                    n = DeepBeliefNetwork.Load(p);
                }
                else
                {
                    outputLength = (o == null) ? outputLength : o[0].Length;
                    hiddenLayer  = (hiddenLayer == -1) ? (int)Math.Log(i[0].Length, outputLength) : hiddenLayer;
                    List <int> layers = new List <int>();
                    for (int j = 0; j < hiddenLayer; j++)
                    {
                        layers.Add(i[0].Length);
                    }
                    layers.Add(outputLength);
                    n = new DeepBeliefNetwork(new BernoulliFunction(), i[0].Length, layers.ToArray());
                    new GaussianWeights(n).Randomize();
                }
            }

            dynamic t;

            if (o == null)
            {
                t = new DeepBeliefNetworkLearning(n)
                {
                    Algorithm = (h, v, j) => new ContrastiveDivergenceLearning(h, v), LayerIndex = n.Machines.Count - 1,
                };
                while (true)
                {
                    e = t.RunEpoch(t.GetLayerInput(i));
                }
            }
            else
            {
                t = new DeepNeuralNetworkLearning(n)
                {
                    Algorithm = (ann, j) => new ParallelResilientBackpropagationLearning(ann), LayerIndex = n.Machines.Count - 1,
                };
                while (true)
                {
                    e = t.RunEpoch(t.GetLayerInput(i), o);
                }
            }
        }
        public void Initialize(int inputCount, int hiddenLayerNodes, double usLR = 0.8, double usM = 0.8, double usD = 0.9, double sLR = 0.2, double sM = 0.21)
        {
            IsReady          = false;
            HiddenLayerNodes = hiddenLayerNodes;
            unsupervisedLR   = usLR;
            unsupervisedM    = usM;
            unsupervisedD    = usD;
            supervisedLR     = sLR;
            supervisedM      = sM;

            network = new DeepBeliefNetwork(inputCount, HiddenLayerNodes, 1);
            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            CreateTeachers(network);
        }