public void PredictTest3()
{
// We will try to create a Hidden Markov Model which
// can recognize (and predict) the following sequences:
double[][] sequences =
{
new double[] { 1, 2, 3, 4, 5 },
new double[] { 1, 2, 4, 3, 5 },
new double[] { 1, 2, 5 },
};
// Creates a new left-to-right (forward) Hidden Markov Model
// with 4 states for an output alphabet of six characters.
var hmm = HiddenMarkovModel.CreateGeneric(new Forward(4), 6);
// Try to fit the model to the data until the difference in
// the average log-likelihood changes only by as little as 0.0001
var teacher = new BaumWelchLearning<GeneralDiscreteDistribution>(hmm)
{
Tolerance = 0.0001,
Iterations = 0
};
// Run the learning algorithm on the model
double logLikelihood = teacher.Run(sequences);
// Now, we will try to predict the next
// observations after a base sequence
double[] input = { 1, 2 }; // base sequence for prediction
// Predict the next observation in sequence
Mixture<GeneralDiscreteDistribution> mixture = null;
double prediction = hmm.Predict(input, out mixture);
Assert.AreEqual(5, prediction);
// At this point, prediction probabilities
// should be equilibrated around 3, 4 and 5
Assert.AreEqual(4, mixture.Mean, 0.1);
double[] input2 = { 1 };
// The only possible value after 1 must be 2.
prediction = hmm.Predict(input2, out mixture);
Assert.AreEqual(2, prediction);
}