public float CalcCostFunction(float[] expected, CostFunctionType cType) { for (int i = 0; i < numberofOutputs; i++) { error[i] = outputs[i] - expected[i]; } float sol = 0.0f; switch (cType) { case CostFunctionType.MSE: int cnt = 0; float sum = 0; for (int i = 0; i < error.Length; i++) { sum += error[i]; cnt++; } sol = sum / cnt; break; case CostFunctionType.SSE: sum = 0; for (int i = 0; i < error.Length; i++) { sum += error[i]; } sol = sum; break; default: break; } return(sol); }
public NetworkConfiguration( CostFunctionType costFunctionType, LearningRateDecayer decayer, int inputDimenision, int inputChannels) { Decayer = decayer; CostFunctionType = costFunctionType; InputDimenision = inputDimenision; InputChannels = inputChannels; }
public static ICostFunction Produce(CostFunctionType type) { switch (type) { case CostFunctionType.Quadratic: return(new QuadraticCostFunction()); case CostFunctionType.SoftMax: return(new SoftMaxCostFunction()); case CostFunctionType.CrossEntropy: return(new CrossEntropyCostFunction()); default: throw new Exception("Cost function is not supported"); } }
public static (CostFunction, CostFunctionPrime) GetCostFunctions(CostFunctionType type) { switch (type) { case CostFunctionType.Quadratic: return(CostFunctions.QuadraticCost, CostFunctions.QuadraticCostPrime); case CostFunctionType.CrossEntropy: return(CostFunctions.CrossEntropyCost, CostFunctions.CrossEntropyCostPrime); case CostFunctionType.LogLikelyhood: return(CostFunctions.LogLikelyhoodCost, CostFunctions.CrossEntropyCostPrime); default: throw new InvalidOperationException("Unsupported cost function"); } }
public NeuralNetwork( int[] layerLengths, float learnRate, int batchSize, float dropoutKeepRate, float weightDecayRate, ActivisionFunctionType activisionType, ActivisionFunctionType activisionTypeOutput, CostFunctionType costType, InitializationType initializationType, int initializationSeed, float activisionCoeffitient, int dropoutSeed ) { m_layerLengths = layerLengths; m_layerCount = m_layerLengths.Length; m_learnRate = learnRate; m_activisionCoeffitient = activisionCoeffitient; m_dropoutKeepRate = dropoutKeepRate; m_weightDecayRate = weightDecayRate; m_initDropoutSeed = dropoutSeed; m_currentDropoutSeed = dropoutSeed; if (batchSize <= 0) { Debug.Log("Info: batch size was <= 0 (" + batchSize + "). It was set to a default value of 1!"); m_batchSize = 1; } m_batchSize = batchSize; m_activisionFunctionType = activisionType; m_activisionFunctionTypeOutput = activisionTypeOutput; m_costFunctionType = costType; m_initializationType = initializationType; SetActivisionFunction(activisionType); InitializeBiases(null, initializationSeed); for (int i = 1; i < m_layerCount; i++) { initializationSeed += m_layerLengths[i]; } InitializeWeights(null, initializationSeed); InitializeBatch(); InitializeBackPropagation(); //m_activisionFunctionType = ActivisionFunctionType.Sigmoid; }
public static LayerFactory FullyConnected( int neurons, ActivationFunctionType activation, CostFunctionType cost, WeightsInitializationMode weightsMode = WeightsInitializationMode.GlorotUniform, BiasInitializationMode biasMode = BiasInitializationMode.Zero) => input => new OutputLayer(input, neurons, activation, cost, weightsMode, biasMode);