public double RunEpoch(T[][] observations, int[] outputs)
{
double error = 0;
if (stochastic)
{
// In batch mode, we will use the average of the gradients
// at each point as a better estimate of the true gradient.
Array.Clear(gradient, 0, gradient.Length);
int progress = 0;
#if SERIAL // For each training point
for (int i = 0; i < observations.Length; i++)
#else
Parallel.For(0, observations.Length, ParallelOptions, i =>
#endif
{
calculator.Inputs = new[] { observations[i] };
calculator.Outputs = new[] { outputs[i] };
// Compute the estimated gradient
double[] estimate = calculator.Gradient();
lock (lockObj)
{
// Accumulate
for (int j = 0; j < estimate.Length; j++)
gradient[j] += estimate[j];
error += calculator.LastError;
}
int current = Interlocked.Increment(ref progress);
double percent = current / (double)observations.Length * 100.0;
OnProgressChanged(new ProgressChangedEventArgs((int)percent, i));
Accord.Diagnostics.Debug.Assert(!gradient.HasNaN());
}
#if !SERIAL
);
#endif
// Compute the average gradient
for (int i = 0; i < gradient.Length; i++)
gradient[i] /= observations.Length;
}
else
{
calculator.Inputs = observations;
calculator.Outputs = outputs;
// Compute the true gradient
gradient = calculator.Gradient();
error = calculator.LastError;
}
double[] parameters = Model.Function.Weights;
// Do the Resilient Backpropagation parameter update
for (int k = 0; k < calculator.Parameters.Length; k++)
{
if (Double.IsInfinity(parameters[k])) continue;
double S = previousGradient[k] * gradient[k];
if (S > 0.0)
{
weightsUpdates[k] = Math.Min(weightsUpdates[k] * etaPlus, deltaMax);
parameters[k] -= Math.Sign(gradient[k]) * weightsUpdates[k];
previousGradient[k] = gradient[k];
}
else if (S < 0.0)
{
weightsUpdates[k] = Math.Max(weightsUpdates[k] * etaMinus, deltaMin);
previousGradient[k] = 0.0;
}
else
{
parameters[k] -= Math.Sign(gradient[k]) * weightsUpdates[k];
previousGradient[k] = gradient[k];
}
}
Accord.Diagnostics.Debug.Assert(!Model.Function.Weights.HasNaN());
return convergence.NewValue = error;
}