예제 #1
0
 public BackpropAlgorithm(ConvNet net) : base(net)
 {
     m_EpochCount          = DFT_EPOCH_COUNT;
     m_LearningRate        = DFT_LEARNING_RATE;
     m_Stop                = DTF_STOP_CRITERIA;
     m_BatchSize           = DFT_BATCH_SIZE;
     m_MaxBatchThreadCount = DFT_BATCH_THREAD_COUNT;
 }
예제 #2
0
        private void init()
        {
            m_EpochCount   = DFT_EPOCH_COUNT;
            m_BatchSize    = DFT_BATCH_SIZE;
            m_LearningRate = DFT_LEARNING_RATE;
            m_Stop         = DTF_STOP_CRITERIA;
            m_EpochLength  = TrainingSample.Count;
            m_InputDim     = Result.InputDim;
            m_OutputDim    = Result[Result.LayerCount - 1].NeuronCount;

            m_Errors = new double[Result.LayerCount][];
            for (int i = 0; i < Result.LayerCount; i++)
            {
                var ncount = Result[i].NeuronCount;
                m_Errors[i] = new double[ncount];
            }

            m_Gradient = new double[Result.LayerCount][, ];
            for (int i = 0; i < Result.LayerCount; i++)
            {
                var pcount = (i > 0) ? Result[i - 1].NeuronCount : m_InputDim;
                var lcount = Result[i].NeuronCount;
                m_Gradient[i] = new double[lcount, pcount + 1]; // take bias into account
            }

            m_ExpectedOutputs = new Dictionary <Class, double[]>();
            var count = Classes.Count;

            if (count != OutputDim)
            {
                throw new MLException("Number of classes must be equal to dimension of output vector");
            }

            for (int i = 0; i < count; i++)
            {
                var cls = Classes.FirstOrDefault(p => (int)p.Value.Value == i).Value;
                if (cls == null)
                {
                    throw new MLException(string.Format("There is no class with value {0}. It is neccessary to have full set of classes with values from 0 to {1}", i, count));
                }

                var output = new double[count];
                output[i] = 1.0D;
                m_ExpectedOutputs[cls] = output;
            }
        }
예제 #3
0
        private void init()
        {
            m_EpochCount   = DFT_EPOCH_COUNT;
            m_BatchSize    = DFT_BATCH_SIZE;
            m_LearningRate = DFT_LEARNING_RATE;
            m_Stop         = DTF_STOP_CRITERIA;
            m_InputDim     = Net.InputDim;
            m_OutputDim    = Net[Net.LayerCount - 1].NeuronCount;

            m_Errors = new double[Net.LayerCount][];
            for (int i = 0; i < Net.LayerCount; i++)
            {
                var ncount = Net[i].NeuronCount;
                m_Errors[i] = new double[ncount];
            }

            m_Gradient = new double[Net.LayerCount][, ];
            for (int i = 0; i < Net.LayerCount; i++)
            {
                var pcount = (i > 0) ? Net[i - 1].NeuronCount : m_InputDim;
                var lcount = Net[i].NeuronCount;
                m_Gradient[i] = new double[lcount, pcount + 1]; // take bias into account
            }
        }
예제 #4
0
 public override string ToString()
 {
     return(string.Format("Selection type: {0} \nStop criteria: {1}\nContest size: {2}\nPopulation size: {3}\n" +
                          "Generation count: {4}\nTime: {5}\nMin range: {6}\nMax range: {7}", SelectionMode.ToString(), StopCriteria.ToString(),
                          ContestSize, PopulationSize, GenerationCount, Time, MinRange, MaxRange));
 }
예제 #5
0
 public StopDecision RunOnce()
 {
     Algorithm.RunOnce();
     return(StopCriteria.MakeDecision(this, Algorithm.Population));
 }