/// <summary> /// Backpropagate "errors" to previous layer for future use /// </summary> protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevErrors, double[][,] errors) { if (prevLayer == null) { throw new MLException("Prev layer is null"); } for (int p = 0; p < m_InputDepth; p++) { for (int y = 0; y < m_WindowHeight; y++) { for (int x = 0; x < m_WindowWidth; x++) { var g = 0.0D; for (int q = 0; q < m_OutputDepth; q++) { var idx = x + y * m_WindowWidth + p * m_KernelParamCount + q * m_FeatureMapParamCount; g += errors[q][0, 0] * m_Weights[idx]; // Kernel(q, p, y, x) } var value = prevValues[p][y, x]; var deriv = (prevLayer.ActivationFunction != null) ? prevLayer.ActivationFunction.DerivativeFromValue(value) : 1; prevErrors[p][y, x] = g * deriv; } } } }
protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevErrors, double[][,] errors) { for (int i = 0; i < prevErrors.Length; i++) { Array.Clear(prevErrors[i], 0, prevErrors[i].Length); } var maxPos = MaxIndexPositions; // backpropagate "errors" to previous layer for future use for (int q = 0; q < m_OutputDepth; q++) { for (int i = 0; i < m_OutputHeight; i++) { for (int j = 0; j < m_OutputWidth; j++) { var xmaxIdx = maxPos.Value[q][i, j, 0]; var ymaxIdx = maxPos.Value[q][i, j, 1]; var value = prevValues[q][ymaxIdx, xmaxIdx]; var deriv = (prevLayer.ActivationFunction != null) ? prevLayer.ActivationFunction.DerivativeFromValue(value) : 1; prevErrors[q][ymaxIdx, xmaxIdx] += errors[q][i, j] * deriv; } } } }
/// <summary> /// Backpropagate "errors" to previous layer for future use /// </summary> /// <param name="prevLayer">Previous layer</param> /// <param name="errors">Current layer gradient "errors"</param> /// <param name="updates">Previous layer gradient "errors"</param> public void Backprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevErrors, double[][,] errors) { if (!m_IsTraining) { throw new MLException("Backpropagation can not run in test mode"); } DoBackprop(prevLayer, prevValues, prevErrors, errors); }
/// <summary> /// Backpropagate "errors" to previous layer for future use /// </summary> protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevErrors, double[][,] errors) { if (prevLayer == null) { throw new MLException("Prev layer is null"); } for (int p = 0; p < m_InputDepth; p++) { for (int i = 0; i < m_InputHeight; i++) { for (int j = 0; j < m_InputWidth; j++) { var g = 0.0D; for (int q = 0; q < m_OutputDepth; q++) { var y = i + m_PaddingHeight + m_StrideHeight; for (int k = 0; k < m_OutputHeight; k++) { y -= m_StrideHeight; if (y >= m_WindowHeight) { continue; } if (y < 0) { break; } var x = j + m_PaddingWidth + m_StrideWidth; for (int m = 0; m < m_OutputWidth; m++) { x -= m_StrideWidth; if (x >= m_WindowWidth) { continue; } if (x < 0) { break; } var idx = x + y * m_WindowWidth + p * m_KernelParamCount + q * m_FeatureMapParamCount; g += errors[q][k, m] * m_Weights[idx]; // Kernel(q, p, y, x) } } } var value = prevValues[p][i, j]; var deriv = (prevLayer.ActivationFunction != null) ? prevLayer.ActivationFunction.DerivativeFromValue(value) : 1; prevErrors[p][i, j] = g * deriv; } } } }
protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevError, double[][,] errors) { for (int p = 0; p < m_OutputDepth; p++) { for (int i = 0; i < m_OutputHeight; i++) { for (int j = 0; j < m_OutputWidth; j++) { var value = prevValues[p][i, j]; var deriv = (prevLayer.ActivationFunction != null) ? prevLayer.ActivationFunction.DerivativeFromValue(value) : 1; prevError[p][i, j] = errors[p][i, j] * deriv; } } } }
/// <summary> /// Backpropagate "errors" to previous layer for future use /// </summary> protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevError, double[][,] errors) { if (prevLayer == null) { throw new MLException("Prev layer is null"); } var plen = m_InputDepth + 1; for (int p = 0; p < m_InputDepth; p++) { var g = 0.0D; for (int q = 0; q < m_OutputDepth; q++) { g += errors[q][0, 0] * m_Weights[p + q * plen]; // Kernel(q, p, 0, 0) } var value = prevValues[p][0, 0]; var deriv = (prevLayer.ActivationFunction != null) ? prevLayer.ActivationFunction.DerivativeFromValue(value) : 1; prevError[p][0, 0] = g * deriv; } }
protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevErrors, double[][,] errors) { throw new NotImplementedException(); //TODO }
protected abstract void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevError, double[][,] errors);
protected override void DoBackprop(DeepLayerBase prevLayer, double[][,] prevValues, double[][,] prevError, double[][,] errors) { throw new NotSupportedException(); }