Accord.Neuro.Learning.ResilientBackpropagationLearning.CalculateGradient C# (CSharp) Method

CalculateGradient() private method

Calculate weights updates
private CalculateGradient ( double input ) : void
input double Network's input vector.
return void
        private void CalculateGradient(double[] input)
        {
            // 1. calculate updates for the first layer
            ActivationLayer layer = network.Layers[0] as ActivationLayer;
            double[] weightErrors = neuronErrors[0];
            double[][] layerWeightsDerivatives = weightsDerivatives[0];
            double[] layerThresholdDerivatives = thresholdsDerivatives[0];

            // So, for each neuron of the first layer:
            for (int i = 0; i < layer.Neurons.Length; i++)
            {
                ActivationNeuron neuron = layer.Neurons[i] as ActivationNeuron;
                double[] neuronWeightDerivatives = layerWeightsDerivatives[i];

                // for each weight of the neuron:
                for (int j = 0; j < neuron.InputsCount; j++)
                {
                    neuronWeightDerivatives[j] += weightErrors[i] * input[j];
                }
                layerThresholdDerivatives[i] += weightErrors[i];
            }

            // 2. for all other layers
            for (int k = 1; k < network.Layers.Length; k++)
            {
                layer = network.Layers[k] as ActivationLayer;
                weightErrors = neuronErrors[k];
                layerWeightsDerivatives = weightsDerivatives[k];
                layerThresholdDerivatives = thresholdsDerivatives[k];

                ActivationLayer layerPrev = network.Layers[k - 1] as ActivationLayer;

                // for each neuron of the layer
                for (int i = 0; i < layer.Neurons.Length; i++)
                {
                    double[] neuronWeightDerivatives = layerWeightsDerivatives[i];

                    // for each weight of the neuron
                    for (int j = 0; j < layerPrev.Neurons.Length; j++)
                    {
                        neuronWeightDerivatives[j] += weightErrors[i] * layerPrev.Neurons[j].Output;
                    }
                    layerThresholdDerivatives[i] += weightErrors[i];
                }
            }
        }
    }