Accord.Tests.Statistics.PartialLeastSquaresAnalysisTest.NipalsComputeTest2_new_method C# (CSharp) Метод

NipalsComputeTest2_new_method() приватный Метод

private NipalsComputeTest2_new_method ( ) : void
Результат void
        public void NipalsComputeTest2_new_method()
        {
            // Example data from Chiang, Y.Q., Zhuang, Y.M and Yang, J.Y, "Optimal Fisher
            //   discriminant analysis using the rank decomposition", Pattern Recognition,
            //   25 (1992), 101--111, as given by Yi Cao in his excellent PLS tutorial.

            double[][] x1 = new double[,]
            { 
                // Class 1
                { 5.1, 3.5, 1.4, 0.2 }, { 4.9, 3.0, 1.4, 0.2 }, { 4.7, 3.2, 1.3, 0.2 }, { 4.6, 3.1, 1.5, 0.2 },
                { 5.0, 3.6, 1.4, 0.2 }, { 5.4, 3.9, 1.7, 0.4 }, { 4.6, 3.4, 1.4, 0.3 }, { 5.0, 3.4, 1.5, 0.2 },
                { 4.4, 2.9, 1.4, 0.2 }, { 4.9, 3.1, 1.5, 0.1 }, { 5.4, 3.7, 1.5, 0.2 }, { 4.8, 3.4, 1.6, 0.2 },
                { 4.8, 3.0, 1.4, 0.1 }, { 4.3, 3.0, 1.1, 0.1 }, { 5.8, 4.0, 1.2, 0.2 }, { 5.7, 4.4, 1.5, 0.4 },
                { 5.4, 3.9, 1.3, 0.4 }, { 5.1, 3.5, 1.4, 0.3 }, { 5.7, 3.8, 1.7, 0.3 }, { 5.1, 3.8, 1.5, 0.3 }, 
                { 5.4, 3.4, 1.7, 0.2 }, { 5.1, 3.7, 1.5, 0.4 }, { 4.6, 3.6, 1.0, 0.2 }, { 5.1, 3.3, 1.7, 0.5 }, 
                { 4.8, 3.4, 1.9, 0.2 }, { 5.0, 3.0, 1.6, 0.2 }, { 5.0, 3.4, 1.6, 0.4 }, { 5.2, 3.5, 1.5, 0.2 }, 
                { 5.2, 3.4, 1.4, 0.2 }, { 4.7, 3.2, 1.6, 0.2 }, { 4.8, 3.1, 1.6, 0.2 }, { 5.4, 3.4, 1.5, 0.4 }, 
                { 5.2, 4.1, 1.5, 0.1 }, { 5.5, 4.2, 1.4, 0.2 }, { 4.9, 3.1, 1.5, 0.2 }, { 5.0, 3.2, 1.2, 0.2 }, 
                { 5.5, 3.5, 1.3, 0.2 }, { 4.9, 3.6, 1.4, 0.1 }, { 4.4, 3.0, 1.3, 0.2 }, { 5.1, 3.4, 1.5, 0.2 }, 
                { 5.0, 3.5, 1.3, 0.3 }, { 4.5, 2.3, 1.3, 0.3 }, { 4.4, 3.2, 1.3, 0.2 }, { 5.0, 3.5, 1.6, 0.6 }, 
                { 5.1, 3.8, 1.9, 0.4 }, { 4.8, 3.0, 1.4, 0.3 }, { 5.1, 3.8, 1.6, 0.2 }, { 4.6, 3.2, 1.4, 0.2 }, 
                { 5.3, 3.7, 1.5, 0.2 }, { 5.0, 3.3, 1.4, 0.2 }
           }.ToJagged();

            double[][] x2 = new double[,]
            {
                // Class 2
                {7.0, 3.2, 4.7, 1.4 }, { 6.4, 3.2, 4.5, 1.5 }, { 6.9, 3.1, 4.9, 1.5 }, { 5.5, 2.3, 4.0, 1.3 },
                {6.5, 2.8, 4.6, 1.5 }, { 5.7, 2.8, 4.5, 1.3 }, { 6.3, 3.3, 4.7, 1.6 }, { 4.9, 2.4, 3.3, 1.0 },
                {6.6, 2.9, 4.6, 1.3 }, { 5.2, 2.7, 3.9, 1.4 }, { 5.0, 2.0, 3.5, 1.0 }, { 5.9, 3.0, 4.2, 1.5 },
                {6.0, 2.2, 4.0, 1.0 }, { 6.1, 2.9, 4.7 ,1.4 }, { 5.6, 2.9, 3.9, 1.3 }, { 6.7, 3.1, 4.4, 1.4 },
                {5.6, 3.0, 4.5, 1.5 }, { 5.8, 2.7, 4.1, 1.0 }, { 6.2, 2.2, 4.5, 1.5 }, { 5.6, 2.5, 3.9, 1.1 },
                {5.9, 3.2, 4.8, 1.8 }, { 6.1, 2.8, 4.0, 1.3 }, { 6.3, 2.5, 4.9, 1.5 }, { 6.1, 2.8, 4.7, 1.2 },
                {6.4, 2.9, 4.3, 1.3 }, { 6.6, 3.0, 4.4, 1.4 }, { 6.8, 2.8, 4.8, 1.4 }, { 6.7, 3.0, 5.0, 1.7 },
                {6.0, 2.9, 4.5, 1.5 }, { 5.7, 2.6, 3.5, 1.0 }, { 5.5, 2.4, 3.8, 1.1 }, { 5.5, 2.4, 3.7, 1.0 },
                {5.8, 2.7, 3.9, 1.2 }, { 6.0, 2.7, 5.1, 1.6 }, { 5.4, 3.0, 4.5, 1.5 }, { 6.0, 3.4, 4.5, 1.6 },
                {6.7, 3.1, 4.7, 1.5 }, { 6.3, 2.3, 4.4, 1.3 }, { 5.6, 3.0, 4.1, 1.3 }, { 5.5, 2.5, 5.0, 1.3 },
                {5.5, 2.6, 4.4, 1.2 }, { 6.1, 3.0, 4.6, 1.4 }, { 5.8, 2.6, 4.0, 1.2 }, { 5.0, 2.3, 3.3, 1.0 },
                {5.6, 2.7, 4.2, 1.3 }, { 5.7, 3.0, 4.2, 1.2 }, { 5.7, 2.9, 4.2, 1.3 }, { 6.2, 2.9, 4.3, 1.3 },
                {5.1, 2.5, 3.0, 1.1 }, { 5.7, 2.8, 4.1, 1.3 }
            }.ToJagged();

            double[][] x3 = new double[,]
            {
                // Class 3
                { 6.3, 3.3, 6.0, 2.5}, { 5.8, 2.7, 5.1, 1.9 }, { 7.1, 3.0, 5.9, 2.1 }, { 6.3, 2.9, 5.6, 1.8 },
                { 6.5, 3.0, 5.8, 2.2}, { 7.6, 3.0, 6.6, 2.1 }, { 4.9, 2.5, 4.5, 1.7 }, { 7.3, 2.9, 6.3, 1.8 }, 
                { 6.7, 2.5, 5.8, 1.8}, { 7.2, 3.6, 6.1, 2.5 }, { 6.5, 3.2, 5.1, 2.0 }, { 6.4, 2.7, 5.3, 1.9 },
                { 6.8, 3.0, 5.5, 2.1}, { 5.7, 2.5, 5.0, 2.0 }, { 5.8, 2.8, 5.1, 2.4 }, { 6.4, 3.2, 5.3, 2.3 },
                { 6.5, 3.0, 5.5, 1.8}, { 7.7, 3.8, 6.7, 2.2 }, { 7.7, 2.6, 6.9, 2.3 }, { 6.0, 2.2, 5.0, 1.5 },
                { 6.9, 3.2, 5.7, 2.3}, { 5.6, 2.8, 4.9, 2.0 }, { 7.7, 2.8, 6.7, 2.0 }, { 6.3, 2.7, 4.9, 1.8 },
                { 6.7, 3.3, 5.7, 2.1}, { 7.2, 3.2, 6.0, 1.8 }, { 6.2, 2.8, 4.8, 1.8 }, { 6.1, 3.0, 4.9, 1.8 },
                { 6.4, 2.8, 5.6, 2.1}, { 7.2, 3.0, 5.8, 1.6 }, { 7.4, 2.8, 6.1, 1.9 }, { 7.9, 3.8, 6.4, 2.0 },
                { 6.4, 2.8, 5.6, 2.2}, { 6.3, 2.8, 5.1, 1.5 }, { 6.1, 2.6, 5.6, 1.4 }, { 7.7, 3.0, 6.1, 2.3 },
                { 6.3 ,3.4, 5.6, 2.4}, { 6.4, 3.1, 5.5, 1.8 }, { 6.0, 3.0, 4.8, 1.8 }, { 6.9, 3.1, 5.4, 2.1 },
                { 6.7, 3.1, 5.6, 2.4}, { 6.9, 3.1, 5.1, 2.3 }, { 5.8, 2.7, 5.1, 1.9 }, { 6.8, 3.2, 5.9, 2.3 },
                { 6.7, 3.3, 5.7, 2.5}, { 6.7, 3.0, 5.2, 2.3 }, { 6.3, 2.5, 5.0, 1.9 }, { 6.5, 3.0, 5.2, 2.0 },
                { 6.2, 3.4, 5.4, 2.3}, { 5.9, 3.0, 5.1, 1.8 }
            }.ToJagged();

            // Split data set into training (1:25) and testing (26:50)
            var idxTrain = Matrix.Indices(0, 25);
            var idxTest = Matrix.Indices(25, 50);

            double[][] a = x1.Submatrix(idxTrain);
            double[][] b = x2.Submatrix(idxTrain);
            double[][] c = x3.Submatrix(idxTrain);
            double[][] inputs = Matrix.Stack<double>(new double[][][] { a, b, c });


            double[][] outputs = Matrix.Expand(
                new double[,] 
                {
                    { 1, 0, 0 }, // repeat 25 times
                    { 0, 1, 0 }, // repeat 25 times
                    { 0, 0, 1 }, // repeat 25 times
                },
                new int[] { 25, 25, 25 }).ToJagged();


            var target = new PartialLeastSquaresAnalysis(
                AnalysisMethod.Standardize, PartialLeastSquaresAlgorithm.NIPALS);

            target.Learn(inputs, outputs);


            double[] xmean = target.Predictors.Means;
            double[] xstdd = target.Predictors.StandardDeviations;

            // Test X
            double[,] t = target.Predictors.Result;
            double[,] p = target.Predictors.FactorMatrix.ToMatrix();
            double[,] tp = t.Multiply(p.Transpose());
            for (int i = 0; i < tp.GetLength(0); i++)
                for (int j = 0; j < tp.GetLength(1); j++)
                    tp[i, j] = tp[i, j] * xstdd[j] + xmean[j];
            Assert.IsTrue(inputs.IsEqual(tp, 0.01));

            // Test Y
            double[] ymean = target.Dependents.Means;
            double[] ystdd = target.Dependents.StandardDeviations;
            double[,] u = target.Dependents.Result;
            double[,] q = target.Dependents.FactorMatrix.ToMatrix();
            double[,] uq = u.Multiply(q.Transpose());
            for (int i = 0; i < uq.GetLength(0); i++)
                for (int j = 0; j < uq.GetLength(1); j++)
                    uq[i, j] = uq[i, j] * ystdd[j] + ymean[j];

            Assert.IsTrue(Matrix.IsEqual(outputs, uq, 0.45));


            a = x1.Submatrix(idxTest);
            b = x2.Submatrix(idxTest);
            c = x3.Submatrix(idxTest);
            double[][] test = Matrix.Stack(new double[][][] { a, b, c });

            // test regression for classification
            var regression = target.CreateRegression();

            double[][] Y = regression.Compute(test);

            int cl;
            Matrix.Max(Y[0], out cl);
            Assert.AreEqual(0, cl);

            Matrix.Max(Y[11], out cl);
            Assert.AreEqual(0, cl);

            Matrix.Max(Y[29], out cl);
            Assert.AreEqual(1, cl);

            Matrix.Max(Y[30], out cl);
            Assert.AreEqual(1, cl);

            Matrix.Max(Y[52], out cl);
            Assert.AreEqual(2, cl);

            Matrix.Max(Y[70], out cl);
            Assert.AreEqual(2, cl);


            var target2 = new PartialLeastSquaresAnalysis()
            {
                Method = AnalysisMethod.Standardize,
                Algorithm = PartialLeastSquaresAlgorithm.SIMPLS
            };

            target2.Learn(inputs, outputs);

            // First columns should be equal
            Assert.IsTrue(Matrix.IsEqual(
                target.Predictors.Result.GetColumn(0).Abs(),
                target2.Predictors.Result.GetColumn(0).Abs(), atol: 0.00001));

            Assert.IsTrue(Matrix.IsEqual(
                target.Predictors.FactorMatrix.GetColumn(0).Abs(),
                target2.Predictors.FactorMatrix.GetColumn(0).Abs(), atol: 0.00001));

            // Others are approximations
            Assert.IsTrue(Matrix.IsEqual(
                target.Predictors.Result.GetColumn(1).Abs(),
                target2.Predictors.Result.GetColumn(1).Abs(), atol: 0.001));

            Assert.IsTrue(Matrix.IsEqual(
                target.Predictors.FactorMatrix.GetColumn(1).Abs(),
                target2.Predictors.FactorMatrix.GetColumn(1).Abs(), atol: 0.01));

            // Explained variance proportion should be similar
            Assert.IsTrue(Matrix.IsEqual(
                target.Predictors.FactorProportions.Submatrix(2),
                target2.Predictors.FactorProportions.Submatrix(2), atol: 0.05));

            Assert.IsTrue(Matrix.IsEqual(
                target.Dependents.FactorProportions,
                target2.Dependents.FactorProportions, atol: 0.8));

        }