public void multiclass_calibration_generic_kernel()
{
// Let's say we have the following data to be classified
// into three possible classes. Those are the samples:
//
double[][] inputs =
{
// input output
new double[] { 0, 1, 1, 0 }, // 0
new double[] { 0, 1, 0, 0 }, // 0
new double[] { 0, 0, 1, 0 }, // 0
new double[] { 0, 1, 1, 0 }, // 0
new double[] { 0, 1, 0, 0 }, // 0
new double[] { 1, 0, 0, 1 }, // 1
new double[] { 0, 0, 0, 1 }, // 1
new double[] { 0, 0, 0, 1 }, // 1
new double[] { 1, 0, 1, 1 }, // 2
new double[] { 1, 1, 0, 1 }, // 2
new double[] { 0, 1, 1, 1 }, // 2
new double[] { 1, 1, 1, 1 }, // 2
};
int[] outputs = // those are the class labels
{
0, 0, 0, 0, 0,
1, 1, 1,
2, 2, 2, 2,
};
// Create the multi-class learning algorithm for the machine
var teacher = new MulticlassSupportVectorLearning<IKernel>()
{
// Configure the learning algorithm to use SMO to train the
// underlying SVMs in each of the binary class subproblems.
Learner = (param) => new SequentialMinimalOptimization<IKernel>()
{
UseKernelEstimation = false,
Kernel = Gaussian.FromGamma(0.5)
}
};
// Learn a machine
var machine = teacher.Learn(inputs, outputs);
// Create the multi-class learning algorithm for the machine
var calibration = new MulticlassSupportVectorLearning<IKernel>(machine)
{
// Configure the learning algorithm to use SMO to train the
// underlying SVMs in each of the binary class subproblems.
Learner = (param) => new ProbabilisticOutputCalibration<IKernel>(param.Model)
};
// Configure parallel execution options
calibration.ParallelOptions.MaxDegreeOfParallelism = 1;
// Learn a machine
calibration.Learn(inputs, outputs);
// Obtain class predictions for each sample
int[] predicted = machine.Decide(inputs);
// Get class scores for each sample
double[] scores = machine.Score(inputs);
// Get log-likelihoods (should be same as scores)
double[][] logl = machine.LogLikelihoods(inputs);
// Get probability for each sample
double[][] prob = machine.Probabilities(inputs);
// Compute classification error
double error = new ZeroOneLoss(outputs).Loss(predicted);
double loss = new CategoryCrossEntropyLoss(outputs).Loss(prob);
//string str = logl.ToCSharp();
double[] expectedScores =
{
1.87436400885238, 1.81168086449304, 1.74038320983522,
1.87436400885238, 1.81168086449304, 1.55446926953952,
1.67016543853596, 1.67016543853596, 1.83135194001403,
1.83135194001403, 1.59836868669125, 2.0618816310294
};
double[][] expectedLogL =
{
new double[] { 1.87436400885238, -1.87436400885238, -1.7463646841257 },
new double[] { 1.81168086449304, -1.81168086449304, -1.73142460658826 },
new double[] { 1.74038320983522, -1.58848669816072, -1.74038320983522 },
new double[] { 1.87436400885238, -1.87436400885238, -1.7463646841257 },
new double[] { 1.81168086449304, -1.81168086449304, -1.73142460658826 },
new double[] { -1.55446926953952, 1.55446926953952, -0.573599079216229 },
new double[] { -0.368823000428743, 1.67016543853596, -1.67016543853596 },
new double[] { -0.368823000428743, 1.67016543853596, -1.67016543853596 },
new double[] { -1.83135194001403, -1.20039293330558, 1.83135194001403 },
new double[] { -1.83135194001403, -1.20039293330558, 1.83135194001403 },
new double[] { -0.894598978116595, -1.59836868669125, 1.59836868669125 },
new double[] { -1.87336852014759, -2.0618816310294, 2.0618816310294 }
};
double[][] expectedProbs =
{
new double[] { 0.95209908906855, 0.0224197237689656, 0.0254811871624848 },
new double[] { 0.947314032745205, 0.0252864560196241, 0.0273995112351714 },
new double[] { 0.937543314993345, 0.0335955309754816, 0.028861154031173 },
new double[] { 0.95209908906855, 0.0224197237689656, 0.0254811871624848 },
new double[] { 0.947314032745205, 0.0252864560196241, 0.0273995112351714 },
new double[] { 0.0383670466237636, 0.859316640577158, 0.102316312799079 },
new double[] { 0.111669460983068, 0.857937888238824, 0.0303926507781076 },
new double[] { 0.111669460983068, 0.857937888238824, 0.0303926507781076 },
new double[] { 0.0238971617859334, 0.0449126146360623, 0.931190223578004 },
new double[] { 0.0238971617859334, 0.0449126146360623, 0.931190223578004 },
new double[] { 0.0735735561383806, 0.0363980776342206, 0.890028366227399 },
new double[] { 0.0188668069460003, 0.0156252941482294, 0.96550789890577 }
};
// Must be exactly the same as test above
Assert.AreEqual(0, error);
Assert.AreEqual(0.5, ((Gaussian)machine[0].Value.Kernel).Gamma);
Assert.AreEqual(0.5, ((Gaussian)machine[1].Value.Kernel).Gamma);
Assert.AreEqual(0.5, ((Gaussian)machine[2].Value.Kernel).Gamma);
Assert.AreEqual(1.0231652126930515, loss);
Assert.IsTrue(predicted.IsEqual(outputs));
Assert.IsTrue(expectedScores.IsEqual(scores, 1e-10));
Assert.IsTrue(expectedLogL.IsEqual(logl, 1e-10));
Assert.IsTrue(expectedProbs.IsEqual(prob, 1e-10));
}