internal MpqFileStream(MpqFile file, Stream baseStream = null)
{
try
{
PatchInfoHeader? patchInfoHeader; // Used to differentiate between regulat files and patch files. Also contains the patch header :p
// Store bits of information as local variables, in order to adjust them later.
bool singleUnit = (file.Flags & MpqFileFlags.SingleBlock) != 0;
bool compressed = file.IsCompressed;
uint compressedSize = (uint)file.CompressedSize;
this.file = file;
this.offset = file.Offset;
// Process the patch information header first
if (file.IsPatch)
{
// Resolving the base file this early may be a waste if the patch ever happens to be a COPY patch… Anyway, it allows for checking the base file's integrity.
// But seriously, what's the point in COPY patches anyway ? Aren't those just like regular MPQ files, only with added (useless) weight ?
if ((baseStream = baseStream ?? file.Archive.ResolveBaseFileInternal(file)) == null)
throw new FileNotFoundException(string.Format(ErrorMessages.GetString("PatchBaseFileNotFound"), file.Name));
patchInfoHeader = ReadPatchInfoHeader(file.Archive, file.Offset);
offset += patchInfoHeader.Value.HeaderLength;
length = patchInfoHeader.Value.PatchLength; // No matter what crap may be written in the block table, it seems that this field is always right (I had to update the decompression method just for that…)
if (patchInfoHeader.Value.PatchLength <= file.CompressedSize)
{
// As it seems, there are some bogus entries in the block table of mpq patch archives. (Only for patch files though)
// If you browse the list of DBC files, i'd say there are about 10% of them which have a bogus block table entry.
// So, for detecting them, we'll use the same method as in stormlib. We'll try to read the patch header to know is the patch is compressed or not.
// By the way, we cannot detect whether the patch is compressed or not if it is encrypted.
if (file.IsEncrypted) throw new InvalidDataException(ErrorMessages.GetString("PatchInfoHeaderInvalidData"));
// Try to read the patch header in the data following the information header and adjust the compressed size dependign on the result:
// Since we are “sure” of the uncompressed size (given in the patch header), there is no point in compression if the compressed data isn't even one byte less.
// Thus, we can mostly safely decrease the compressed size by 1, which, by the way, is necessary to make decompression work in UpdateBuffer()…
compressedSize = patchInfoHeader.Value.PatchLength - ((compressed = !TestPatchHeader(file.Archive, offset)) ? (uint)1 : 0);
// It appears that the single unit flag is also lying on some patch entries. Files reported as blocky (such as some of the cataclysm mp3) are in fact single unit…
// Forcing this single unit flag to true when the file is compressed seems to be a good solution. Also, we may (or not :p) save a bit of memory by using blocks for uncompressed files.
singleUnit = compressed;
}
}
else
{
patchInfoHeader = null;
length = checked((uint)file.Size);
}
// Set up the stream the same way for both patches and regular files…
if (file.IsEncrypted)
{
if (file.Seed == 0) throw new SeedNotFoundException(file.BlockIndex);
else this.seed = file.Seed;
if ((file.Flags & MpqFileFlags.PositionEncrypted) != 0)
this.seed = (this.seed + (uint)file.Offset) ^ (uint)this.length;
}
if (singleUnit)
this.fileHeader = new uint[] { 0, compressedSize };
else if (compressed)
this.fileHeader = ReadBlockOffsets(file.Archive, this.seed, this.offset, (int)((length + file.Archive.BlockSize - 1) / file.Archive.BlockSize + 1));
else
{
this.fileHeader = new uint[(int)(length + file.Archive.BlockSize - 1) / file.Archive.BlockSize + 1];
this.fileHeader[0] = 0;
for (int i = 1; i < this.fileHeader.Length; i++)
{
this.fileHeader[i] = this.fileHeader[i - 1] + (uint)file.Archive.BlockSize;
if (this.fileHeader[i] > length) this.fileHeader[i] = (uint)this.length;
}
}
// Treat the files smaller than the block size as single unit. (But only now that we've read the file header)
singleUnit |= length <= file.Archive.BlockSize;
this.blockBuffer = new byte[singleUnit ? length : (uint)file.Archive.BlockSize];
if (compressed) this.compressedBuffer = new byte[singleUnit ? compressedSize : (uint)file.Archive.BlockSize];
this.lastBlockLength = this.length > 0 ? this.length % (uint)this.blockBuffer.Length : 0;
if (this.lastBlockLength == 0) this.lastBlockLength = (uint)this.blockBuffer.Length;
this.currentBlock = -1;
UpdateBuffer();
// If we finished initializing a stream to patch data, all there is left is to apply the patch
if (patchInfoHeader != null)
{
// The patching methods will read from this stream instance (whose constructor has yet to finish… !) and return the patched data.
this.blockBuffer = ApplyPatch(patchInfoHeader.Value, baseStream);
// Once the patch has been applied, transform this stream into a mere memory stream. (The same as with single unit files, in fact)
this.compressedBuffer = null;
this.fileHeader = new uint[] { 0, (uint)this.blockBuffer.Length };
this.position = 0;
this.currentBlock = 0;
this.readBufferOffset = 0;
this.length = (uint)this.blockBuffer.Length;
}
}
finally { if (baseStream != null) baseStream.Dispose(); }
}