public static HiddenMarkovClassifier<Independent> CreateModel1()
{
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a multivariate sequence and the same sequence backwards.
double[][][] sequences = new double[][][]
{
new double[][]
{
// This is the first sequence with label = 0
new double[] { 0 },
new double[] { 1 },
new double[] { 2 },
new double[] { 3 },
new double[] { 4 },
},
new double[][]
{
// This is the second sequence with label = 1
new double[] { 4 },
new double[] { 3 },
new double[] { 2 },
new double[] { 1 },
new double[] { 0 },
}
};
// Labels for the sequences
int[] labels = { 0, 1 };
// Creates a sequence classifier containing 2 hidden Markov Models
// with 2 states and an underlying Normal distribution as density.
NormalDistribution component = new NormalDistribution();
Independent density = new Independent(component);
var classifier = new HiddenMarkovClassifier<Independent>(2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,
// Train each model until the log-likelihood changes less than 0.001
modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0
}
);
// Train the sequence classifier using the algorithm
double logLikelihood = teacher.Run(sequences, labels);
Assert.AreEqual(-13.271981026832929d, logLikelihood, 1e-10);
return classifier;
}