private bool TryProcessRow(_SqlMetaDataSet columns, object[] buffer, int[] map, TdsParserStateObject stateObj)
{
SqlBuffer data = new SqlBuffer();
for (int i = 0; i < columns.Length; i++)
{
_SqlMetaData md = columns[i];
Debug.Assert(md != null, "_SqlMetaData should not be null for column " + i.ToString(CultureInfo.InvariantCulture));
bool isNull;
ulong len;
if (!TryProcessColumnHeader(md, stateObj, i, out isNull, out len))
{
return false;
}
if (isNull)
{
GetNullSqlValue(data, md);
buffer[map[i]] = data.SqlValue;
}
else
{
// We only read up to 2Gb. Throw if data is larger. Very large data
// should be read in chunks in sequential read mode
// For Plp columns, we may have gotten only the length of the first chunk
if (!TryReadSqlValue(data, md, md.metaType.IsPlp ? (Int32.MaxValue) : (int)len, stateObj))
{
return false;
}
buffer[map[i]] = data.SqlValue;
if (stateObj._longlen != 0)
{
throw new SqlTruncateException(Res.GetString(Res.SqlMisc_TruncationMaxDataMessage));
}
}
data.Clear();
}
return true;
}