public void LearnTest1()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a univariate sequence and the same sequence backwards.
double[][] sequences = new double[][]
{
new double[] { 0,1,2,3,4 }, // This is the first sequence with label = 0
new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
};
// Labels for the sequences
int[] labels = { 0, 1 };
// Creates a sequence classifier containing 2 hidden Markov Models
// with 2 states and an underlying Normal distribution as density.
NormalDistribution density = new NormalDistribution();
var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences, labels);
// Calculate the probability that the given
// sequences originated from the model
double likelihood1, likelihood2;
// Try to classify the first sequence (output should be 0)
int c1 = classifier.Compute(sequences[0], out likelihood1);
// Try to classify the second sequence (output should be 1)
int c2 = classifier.Compute(sequences[1], out likelihood2);
Assert.AreEqual(0, c1);
Assert.AreEqual(1, c2);
Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10);
Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
Assert.IsFalse(double.IsNaN(logLikelihood));
Assert.IsFalse(double.IsNaN(likelihood1));
Assert.IsFalse(double.IsNaN(likelihood2));
}