public static void DumpingFeature_BP_LDA(SparseMatrix InputData, paramModel_t paramModel, int BatchSize_normal, string FeatureFileName, string DataName)
{
Console.WriteLine("----------------------------------------------------");
int nTest = InputData.nCols;
int nBatch = (int)Math.Ceiling(((float)nTest) / ((float)BatchSize_normal));
DNNRun_t DNNRun_NormalBatch = new DNNRun_t(paramModel.nHid, BatchSize_normal, paramModel.nHidLayer, paramModel.nOutput);
DNNRun_t DNNRun_EndBatch = new DNNRun_t(paramModel.nHid, nTest - (nBatch - 1) * BatchSize_normal, paramModel.nHidLayer, paramModel.nOutput);
DNNRun_t DNNRun = null;
Console.Write(" Dumping feature ({0}): Bat#{1}/{2}\r", DataName, 0, nBatch);
int[] IdxSample_Tot = new int[nTest];
for (int Idx = 0; Idx < nTest; Idx++)
{
IdxSample_Tot[Idx] = Idx;
}
StreamWriter FeatureFile = new StreamWriter(FeatureFileName);
for (int IdxBatch = 0; IdxBatch < nBatch; IdxBatch++)
{
// Extract the batch
int BatchSize = 0;
if (IdxBatch < nBatch - 1)
{
BatchSize = BatchSize_normal;
DNNRun = DNNRun_NormalBatch;
}
else
{
BatchSize = nTest - IdxBatch * BatchSize_normal;
DNNRun = DNNRun_EndBatch;
}
SparseMatrix Xt = new SparseMatrix(paramModel.nInput, BatchSize);
int[] IdxSample = new int[BatchSize];
Array.Copy(IdxSample_Tot, IdxBatch * BatchSize_normal, IdxSample, 0, BatchSize);
InputData.GetColumns(Xt, IdxSample);
// Forward activation
LDA_Learn.ForwardActivation_LDA(Xt, DNNRun, paramModel, false);
// Dump the feature into file
for (int Idx = 0; Idx < BatchSize; Idx++)
{
FeatureFile.WriteLine(String.Join("\t", DNNRun.theta_pool[DNNRun.nHidLayerEffective[Idx] - 1].DenseMatrixValue[Idx].VectorValue));
}
Console.Write(" Dumping feature ({0}): Bat#{1}/{2}\r", DataName, (IdxBatch + 1), nBatch);
}
Console.Write("\n");
Console.WriteLine("----------------------------------------------------");
FeatureFile.Close();
}