public static HiddenMarkovClassifier<Independent<NormalDistribution>> CreateModel4(out double[][][] words, out int[] labels, bool usePriors)
{
double[][] hello =
{
new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
new double[] { 0.0, 0.0, 1.0, 0.0 },
new double[] { 0.0, 0.0, 1.0, 0.0 },
new double[] { 0.0, 0.0, 0.1, 1.1 },
};
double[][] car =
{
new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
new double[] { 0.0, 0.0, 0.1, 0.0 },
new double[] { 1.0, 0.0, 0.0, 0.0 },
};
double[][] wardrobe =
{
new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
new double[] { 0.0, 0.1, 1.0, 0.0 },
new double[] { 0.1, 0.0, 1.0, 0.1 },
};
double[][] wardrobe2 =
{
new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word.
new double[] { 0.0, 0.1, 1.0, 0.0 },
new double[] { 0.1, 0.0, 1.0, 0.2 },
};
words = new double[][][] { hello, car, wardrobe, wardrobe2 };
labels = new [] { 0, 1, 2, 2 };
var initial = new Independent<NormalDistribution>
(
new NormalDistribution(0, 1),
new NormalDistribution(0, 1),
new NormalDistribution(0, 1),
new NormalDistribution(0, 1)
);
int numberOfWords = 3;
int numberOfStates = 5;
var classifier = new HiddenMarkovClassifier<Independent<NormalDistribution>>
(
classes: numberOfWords,
topology: new Forward(numberOfStates),
initial: initial
);
var teacher = new HiddenMarkovClassifierLearning<Independent<NormalDistribution>>(classifier,
modelIndex => new BaumWelchLearning<Independent<NormalDistribution>>(classifier.Models[modelIndex])
{
Tolerance = 0.001,
Iterations = 100,
FittingOptions = new IndependentOptions()
{
InnerOption = new NormalOptions() { Regularization = 1e-5 }
}
}
);
if (usePriors)
teacher.Empirical = true;
double logLikelihood = teacher.Run(words, labels);
Assert.AreEqual(208.38345600145777d, logLikelihood);
return classifier;
}