private static unsafe void GetOptimalCompressionLengths(byte* indata, int inLength, out int[] lengths, out int[] disps)
{
lengths = new int[inLength];
disps = new int[inLength];
int[] minLengths = new int[inLength];
for (int i = inLength - 1; i >= 0; i--)
{
// first get the compression length when the next byte is not compressed
minLengths[i] = int.MaxValue;
lengths[i] = 1;
if (i + 1 >= inLength)
minLengths[i] = 1;
else
minLengths[i] = 1 + minLengths[i + 1];
// then the optimal compressed length
int oldLength = Math.Min(0x1000, i);
// get the appropriate disp while at it. Takes at most O(n) time if oldLength is considered O(n) and 0x10110 constant.
// however since a lot of files will not be larger than 0x10110, this will often take ~O(n^2) time.
// be sure to bound the input length with 0x10110, as that's the maximum length for LZ-11 compressed blocks.
int maxLen = GetOccurrenceLength(indata + i, Math.Min(inLength - i, 0x10110),
indata + i - oldLength, oldLength, out disps[i]);
if (disps[i] > i)
throw new Exception("disp is too large");
for (int j = 3; j <= maxLen; j++)
{
int blocklen;
if (j > 0x110)
blocklen = 4;
else if (j > 0x10)
blocklen = 3;
else
blocklen = 2;
int newCompLen;
if (i + j >= inLength)
newCompLen = blocklen;
else
newCompLen = blocklen + minLengths[i + j];
if (newCompLen < minLengths[i])
{
lengths[i] = j;
minLengths[i] = newCompLen;
}
}
}
// we could optimize this further to also optimize it with regard to the flag-bytes, but that would require 8 times
// more space and time (one for each position in the block) for only a potentially tiny increase in compression ratio.
}