public void FittingOptionsTest()
{
// Create a degenerate problem
double[][] sequences = new double[][]
{
new double[] { 1,1,1,1,1,0,1,1,1,1 },
new double[] { 1,1,1,1,0,1,1,1,1,1 },
new double[] { 1,1,1,1,1,1,1,1,1,1 },
new double[] { 1,1,1,1,1,1 },
new double[] { 1,1,1,1,1,1,1 },
new double[] { 1,1,1,1,1,1,1,1,1,1 },
new double[] { 1,1,1,1,1,1,1,1,1,1 },
};
// Creates a continuous hidden Markov Model with two states organized in a ergodic
// topology and an underlying multivariate Normal distribution as density.
var density = new MultivariateNormalDistribution(1);
var model = new HiddenMarkovModel<MultivariateNormalDistribution>(new Ergodic(2), density);
// Configure the learning algorithms to train the sequence classifier
var teacher = new BaumWelchLearning<MultivariateNormalDistribution>(model)
{
Tolerance = 0.0001,
Iterations = 0,
// Configure options for fitting the normal distribution
FittingOptions = new NormalOptions() { Regularization = 0.0001, }
};
// Fit the model. No exceptions will be thrown
double logLikelihood = teacher.Run(sequences);
double likelihood = Math.Exp(logLikelihood);
Assert.AreEqual(5.3782215178437722, logLikelihood, 1e-15);
Assert.IsFalse(double.IsNaN(logLikelihood));
Assert.AreEqual(0.0001, (teacher.FittingOptions as NormalOptions).Regularization);
// Try without a regularization constant to get an exception
bool thrown;
thrown = false;
density = new MultivariateNormalDistribution(1);
model = new HiddenMarkovModel<MultivariateNormalDistribution>(new Ergodic(2), density);
teacher = new BaumWelchLearning<MultivariateNormalDistribution>(model) { Tolerance = 0.0001, Iterations = 0, };
Assert.IsNull(teacher.FittingOptions);
try { teacher.Run(sequences); }
catch { thrown = true; }
Assert.IsTrue(thrown);
thrown = false;
density = new Accord.Statistics.Distributions.Multivariate.MultivariateNormalDistribution(1);
model = new HiddenMarkovModel<MultivariateNormalDistribution>(new Ergodic(2), density);
teacher = new BaumWelchLearning<MultivariateNormalDistribution>(model)
{
Tolerance = 0.0001,
Iterations = 0,
FittingOptions = new NormalOptions() { Regularization = 0 }
};
Assert.IsNotNull(teacher.FittingOptions);
try { teacher.Run(sequences); }
catch { thrown = true; }
Assert.IsTrue(thrown);
}