public static HiddenMarkovClassifier<Independent, double[]> CreateModel2_learn(out double[][][] sequences, out int[] labels)
{
sequences = new double[][][]
{
new double[][]
{
// This is the first sequence with label = 0
new double[] { 0, 1.1 },
new double[] { 1, 2.5 },
new double[] { 1, 3.4 },
new double[] { 1, 4.7 },
new double[] { 2, 5.8 },
},
new double[][]
{
// This is the second sequence with label = 1
new double[] { 2, 3.2 },
new double[] { 2, 2.6 },
new double[] { 1, 1.2 },
new double[] { 1, 0.8 },
new double[] { 0, 1.1 },
}
};
labels = new[] { 0, 1 };
// Create a Continuous density Hidden Markov Model Sequence Classifier
// to detect a multivariate sequence and the same sequence backwards.
var comp1 = new GeneralDiscreteDistribution(3);
var comp2 = new NormalDistribution(1);
var density = new Independent(comp1, comp2);
// Creates a sequence classifier containing 2 hidden Markov Models with 2 states
// and an underlying multivariate mixture of Normal distributions as density.
var classifier = new HiddenMarkovClassifier<Independent, double[]>(
2, new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new HiddenMarkovClassifierLearning<Independent, double[]>(classifier)
{
// Train each model until the log-likelihood changes less than 0.0001
Learner = modelIndex => new BaumWelchLearning<Independent, double[]>(classifier.Models[modelIndex])
{
Tolerance = 0.0001,
Iterations = 0,
}
};
// Train the sequence classifier using the algorithm
//double logLikelihood = teacher.Run(sequences, labels);
var model = teacher.Learn(sequences, labels);
Assert.AreSame(model, classifier);
return classifier;
}