public void PredictTest3()
{
// We will try to create a Hidden Markov Model which
// can recognize (and predict) the following sequences:
int[][] sequences =
{
new[] { 1, 2, 3, 4, 5 },
new[] { 1, 2, 4, 3, 5 },
new[] { 1, 2, 5 },
};
// Creates a new left-to-right (forward) Hidden Markov Model
// with 4 states for an output alphabet of six characters.
HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);
// Try to fit the model to the data until the difference in
// the average log-likelihood changes only by as little as 0.0001
BaumWelchLearning teacher = new BaumWelchLearning(hmm)
{
Tolerance = 0.0001,
Iterations = 0
};
// Run the learning algorithm on the model
double logLikelihood = teacher.Run(sequences);
// Now, we will try to predict the next
// observations after a base sequence
int[] input = { 1, 2 }; // base sequence for prediction
double[] logLikelihoods;
// Predict the next observation in sequence
int prediction = hmm.Predict(input, out logLikelihoods);
var probs = Matrix.Exp(logLikelihoods);
// At this point, prediction probabilities
// should be equilibrated around 3, 4 and 5
Assert.AreEqual(probs.Length, 6);
Assert.AreEqual(probs[0], 0.00, 0.01);
Assert.AreEqual(probs[1], 0.00, 0.01);
Assert.AreEqual(probs[2], 0.00, 0.01);
Assert.AreEqual(probs[3], 0.33, 0.05);
Assert.AreEqual(probs[4], 0.33, 0.05);
Assert.AreEqual(probs[5], 0.33, 0.05);
double[][] probabilities2;
// Predict the next 2 observation2 in sequence
int[] prediction2 = hmm.Predict(input, 2, out probabilities2);
Assert.AreEqual(probabilities2.Length, 2);
Assert.AreEqual(probabilities2[0].Length, 6);
Assert.AreEqual(probabilities2[1].Length, 6);
Assert.IsTrue(probabilities2[0].IsEqual(logLikelihoods));
}