Encog.Neural.Flat.Train.Prop.TrainFlatNetworkQPROP.UpdateWeight C# (CSharp) Method

UpdateWeight() public method

Update a weight.
public UpdateWeight ( double gradients, double lastGradient, int index ) : double
gradients double The gradients.
lastGradient double The last gradients.
index int The index.
return double
        public override double UpdateWeight(double[] gradients,
                                            double[] lastGradient, int index)
        {
            double w = Network.Weights[index];
            double d = LastDelta[index];
            double s = -Gradients[index] + Decay*w;
            double p = -lastGradient[index];
            double nextStep = 0.0;

            // The step must always be in direction opposite to the slope.
            if (d < 0.0)
            {
                // If last step was negative...
                if (s > 0.0)
                {
                    // Add in linear term if current slope is still positive.
                    nextStep -= EPS*s;
                }
                // If current slope is close to or larger than prev slope...
                if (s >= (Shrink*p))
                {
                    // Take maximum size negative step.
                    nextStep += LearningRate*d;
                }
                else
                {
                    // Else, use quadratic estimate.
                    nextStep += d*s/(p - s);
                }
            }
            else if (d > 0.0)
            {
                // If last step was positive...
                if (s < 0.0)
                {
                    // Add in linear term if current slope is still negative.
                    nextStep -= EPS*s;
                }
                // If current slope is close to or more neg than prev slope...
                if (s <= (Shrink*p))
                {
                    // Take maximum size negative step.
                    nextStep += LearningRate*d;
                }
                else
                {
                    // Else, use quadratic estimate.
                    nextStep += d*s/(p - s);
                }
            }
            else
            {
                // Last step was zero, so use only linear term. 
                nextStep -= EPS * s;
            }

            // update global data arrays
            LastDelta[index] = nextStep;
            LastGradient[index] = gradients[index];

            return nextStep;
        }
    }