public void ComputeTest2()
{
double[][] input =
{
new double[] { 1, 4, 2, 0, 1 },
new double[] { 1, 3, 2, 0, 1 },
new double[] { 3, 0, 1, 1, 1 },
new double[] { 3, 0, 1, 0, 1 },
new double[] { 0, 5, 5, 5, 5 },
new double[] { 1, 5, 5, 5, 5 },
new double[] { 1, 0, 0, 0, 0 },
new double[] { 1, 0, 0, 0, 0 },
};
int[] output =
{
0, 0,
1, 1,
2, 2,
3, 3,
};
IKernel kernel = new Polynomial(2);
int classes = 4;
int inputs = 5;
// Create the Multi-class Support Vector Machine using the selected Kernel
var msvm = new MulticlassSupportVectorMachine(inputs, kernel, classes);
// Create the learning algorithm using the machine and the training data
var ml = new MulticlassSupportVectorLearning(msvm, input, output);
// Configure the learning algorithm
ml.Algorithm = (svm, classInputs, classOutputs, i, j) =>
{
var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs)
{
Complexity = 1
};
return smo;
};
Assert.AreEqual(0, msvm.GetLastKernelEvaluations());
#if DEBUG
msvm.ParallelOptions.MaxDegreeOfParallelism = 1;
ml.ParallelOptions.MaxDegreeOfParallelism = 1;
#endif
// Executes the training algorithm
double error = ml.Run();
Assert.AreEqual(error, 0);
Assert.AreEqual(6, msvm.GetLastKernelEvaluations());
int[] evals = new int[input.Length];
int[] evalexp = { 8, 8, 7, 7, 7, 7, 6, 6 };
Parallel.For(0, input.Length, i =>
{
double[] data = input[i];
double[] responses;
int num = msvm.Compute(data, MulticlassComputeMethod.Elimination, out responses);
Assert.AreEqual(output[i], num);
evals[i] = msvm.GetLastKernelEvaluations();
});
for (int i = 0; i < evals.Length; i++)
Assert.AreEqual(evals[i], evalexp[i]);
Parallel.For(0, input.Length, i =>
{
double[] data = input[i];
double[] responses;
int num = msvm.Compute(data, MulticlassComputeMethod.Voting, out responses);
Assert.AreEqual(output[i], num);
evals[i] = msvm.GetLastKernelEvaluations();
});
for (int i = 0; i < evals.Length; i++)
Assert.AreEqual(msvm.SupportVectorUniqueCount, evals[i]);
}