AdvancedOCR.ConvolutionWeights.PropogateUnitSecondDerivatives C# (CSharp) Method

PropogateUnitSecondDerivatives() protected method

protected PropogateUnitSecondDerivatives ( RectangularStep upstream, RectangularStep downstream, int weightX, int weightY, int mapNumber ) : void
upstream RectangularStep
downstream RectangularStep
weightX int
weightY int
mapNumber int
return void
        protected void PropogateUnitSecondDerivatives(RectangularStep upstream, RectangularStep downstream, int weightX, int weightY, int mapNumber)
        {
            double weight2ndDerivative = 0;

            int weightIndex = mapNumber * Width * Height + weightY * Width + weightX;
            double weight = Weight[weightIndex];

            int downstreamIndex = 0;
            int upstreamIndex = (weightY * upstream.Width) + weightX;

            // This loop here is equivalent to the sigma in equation 19 in Gradient-Based Learning Applied to Document Recognition.
            for (int y = 0; y < downstream.Height; y++)
            {
                for (int x = 0; x < downstream.Width; x++)
                {
                    double upstreamInput = upstream.Output[upstreamIndex];
                    double downstreamError2ndDerivative = downstream.ErrorDerivative[downstreamIndex]; // (d^2)E/(dAj)^2, where Aj is the sum of inputs to this downstream unit.

                    // Here we calculate (d^2)Ej/(dWij)^2 by multiplying the 2nd derivative of E with respect to the sum of inputs, Aj
                    // by the state of Oi, the upstream unit, squared. Refer to Equation 25 in document.
                    // The summing happening here is described by equation 23.
                    weight2ndDerivative += downstreamError2ndDerivative * upstreamInput * upstreamInput;

                    // This is implementing the last sigma of Equation 27.
                    // This propogates error second derivatives back to previous layer, but will need to be multiplied by the second derivative
                    // of the activation function at the previous layer.
                    upstream.ErrorDerivative[upstreamIndex] += weight * weight * downstreamError2ndDerivative;

                    downstreamIndex += 1;
                    upstreamIndex += 1;
                }
                upstreamIndex += Width - 1; // Equal to: upstream.Width - downstream.Width;
            }

            WeightStepSize[weightIndex] += weight2ndDerivative;
        }

Same methods

ConvolutionWeights::PropogateUnitSecondDerivatives ( RectangularStep downstream, int mapNumber ) : void