/// <summary>
/// This is based off of this article:
/// http://www.codeproject.com/Articles/54575/An-Introduction-to-Encog-Neural-Networks-for-C
/// </summary>
/// <remarks>
/// Go here for documentation of encog:
/// http://www.heatonresearch.com/wiki
///
/// Download link:
/// https://github.com/encog/encog-dotnet-core/releases
/// </remarks>
private void btnXOR_Click(object sender, RoutedEventArgs e)
{
try
{
_trainingData = null;
_results = null;
BasicNetwork network = new BasicNetwork();
#region Create nodes
// Create the network's nodes
//NOTE: Using ActivationSigmoid, because there are no negative values. If there were negative, use ActivationTANH
//http://www.heatonresearch.com/wiki/Activation_Function
//NOTE: ActivationSigmoid (0 to 1) and ActivationTANH (-1 to 1) are pure but slower. A cruder but faster function is ActivationElliott (0 to 1) and ActivationElliottSymmetric (-1 to 1)
//http://www.heatonresearch.com/wiki/Elliott_Activation_Function
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); // input layer
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6)); // hidden layer
network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); // output layer
network.Structure.FinalizeStructure();
// Randomize the links
network.Reset();
#endregion
#region Training data
// Neural networks must be trained before they are of any use. To train this neural network, we must provide training
// data. The training data is the truth table for the XOR operator. The XOR has the following inputs:
double[][] xor_input = new[]
{
new[] { 0d, 0d },
new[] { 1d, 0d },
new[] { 0d, 1d },
new[] { 1d, 1d },
};
// And the expected outputs
double[][] xor_ideal_output = new[]
{
new[] { 0d },
new[] { 1d },
new[] { 1d },
new[] { 0d },
};
_trainingData = GetDrawDataFromTrainingData(xor_input, xor_ideal_output);
#endregion
#region Train network
INeuralDataSet trainingSet = new BasicNeuralDataSet(xor_input, xor_ideal_output);
// This is a good general purpose training algorithm
//http://www.heatonresearch.com/wiki/Training
ITrain train = new ResilientPropagation(network, trainingSet);
List<double> log = new List<double>();
int trainingIteration = 1;
do
{
train.Iteration();
log.Add(train.Error);
trainingIteration++;
} while ((trainingIteration < 2000) && (train.Error > 0.001));
// Paste this into excel and chart it to see the error trend
string logExcel = string.Join("\r\n", log);
#endregion
#region Test
//NOTE: I initially ran a bunch of tests, but the network always returns exactly the same result when given the same inputs
//var test = Enumerable.Range(0, 1000).
// Select(o => new { In1 = _rand.Next(2), In2 = _rand.Next(2) }).
var test = xor_input.
Select(o => new { In1 = Convert.ToInt32(o[0]), In2 = Convert.ToInt32(o[1]) }).
Select(o => new
{
o.In1,
o.In2,
Expected = XOR(o.In1, o.In2),
NN = CallNN(network, o.In1, o.In2),
}).
Select(o => new { o.In1, o.In2, o.Expected, o.NN, Error = Math.Abs(o.Expected - o.NN) }).
OrderByDescending(o => o.Error).
ToArray();
#endregion
#region Test intermediate values
// It was only trained with inputs of 0 and 1. Let's see what it does with values in between
var intermediates = Enumerable.Range(0, 1000).
Select(o => new { In1 = _rand.NextDouble(), In2 = _rand.NextDouble() }).
Select(o => new
{
o.In1,
o.In2,
NN = CallNN(network, o.In1, o.In2),
}).
OrderBy(o => o.In1).
ThenBy(o => o.In2).
//OrderBy(o => o.NN).
ToArray();
#endregion
#region Serialize/Deserialize
// Serialize it
string weightDump = network.DumpWeights();
double[] dumpArray = weightDump.Split(',').
Select(o => double.Parse(o)).
ToArray();
//TODO: Shoot through the layers, and store in some custom structure that can be serialized, then walked through to rebuild on deserialize
//string[] layerDump = network.Structure.Layers.
// Select(o => o.ToString()).
// ToArray();
// Create a clone
BasicNetwork clone = new BasicNetwork();
clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
clone.Structure.FinalizeStructure();
clone.DecodeFromArray(dumpArray);
// Test the clone
string cloneDump = clone.DumpWeights();
bool isSame = weightDump == cloneDump;
var cloneTests = xor_input.
Select(o => new
{
Input = o,
NN = CallNN(clone, o[0], o[1]),
}).ToArray();
#endregion
#region Store results
double[] matchValues = new[] { 0d, 1d };
double matchRange = .03; //+- 5% of target value would be considered a match
_results = intermediates.
Select(o => Tuple.Create(new Point(o.In1, o.In2), o.NN, IsMatch(o.NN, matchValues, matchRange))).
ToArray();
#endregion
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error);
}
finally
{
RedrawResults();
}
}