public static void PredictingOutput_BP_sLDA(SparseMatrix TestData, paramModel_t paramModel, int BatchSize_normal, string ScoreFileName)
{
Console.WriteLine("----------------------------------------------------");
int nTest = TestData.nCols;
int nBatch = (int)Math.Ceiling(((float)nTest) / ((float)BatchSize_normal));
DNNRun_t DNNRun_NormalBatch = new DNNRun_t(paramModel.nHid, BatchSize_normal, paramModel.nHidLayer, paramModel.nOutput);
DNNRun_t DNNRun_EndBatch = new DNNRun_t(paramModel.nHid, nTest - (nBatch - 1) * BatchSize_normal, paramModel.nHidLayer, paramModel.nOutput);
DNNRun_t DNNRun = null;
int[] IdxSample_Tot = new int[nTest];
(new FileInfo(ScoreFileName)).Directory.Create();
StreamWriter ScoreFile = new StreamWriter(ScoreFileName);
for (int Idx = 0; Idx < nTest; Idx++)
{
IdxSample_Tot[Idx] = Idx;
}
// ---- Test in a batch-wise manner over the test data ----
for (int IdxBatch = 0; IdxBatch < nBatch; IdxBatch++)
{
// Extract the batch
int BatchSize;
if (IdxBatch < nBatch - 1)
{
BatchSize = BatchSize_normal;
DNNRun = DNNRun_NormalBatch;
}
else
{
BatchSize = nTest - IdxBatch * BatchSize_normal;
DNNRun = DNNRun_EndBatch;
}
SparseMatrix Xt = new SparseMatrix(paramModel.nInput, BatchSize);
SparseMatrix Dt = new SparseMatrix(paramModel.nOutput, BatchSize);
int[] IdxSample = new int[BatchSize];
Array.Copy(IdxSample_Tot, IdxBatch * BatchSize_normal, IdxSample, 0, BatchSize);
TestData.GetColumns(Xt, IdxSample);
// Forward activation
LDA_Learn.ForwardActivation_LDA(Xt, DNNRun, paramModel, false);
// Write the score into file
for (int IdxCol = 0; IdxCol < DNNRun.y.nCols; IdxCol++)
{
ScoreFile.WriteLine(String.Join("\t", DNNRun.y.DenseMatrixValue[IdxCol].VectorValue));
}
Console.Write(" Testing: Bat#{0}/{1}\r", (IdxBatch + 1), nBatch);
}
Console.WriteLine("----------------------------------------------------");
ScoreFile.Close();
}
}