private void DecodeThread()
{
var random = new Random();
for (;;)
{
Thread.Sleep(1000 * 3);
if (this.DecodeState == ManagerState.Stop) return;
DownloadItem item = null;
try
{
lock (_thisLock)
{
if (_settings.DownloadItems.Count > 0)
{
item = _settings.DownloadItems
.Where(n => !_workingSeeds.Contains(n.Seed))
.Where(n => n.State == DownloadState.Decoding || n.State == DownloadState.ParityDecoding)
.Where(n => n.Priority != 0)
.OrderBy(n => (n.Depth != n.Seed.Metadata.Depth) ? 0 : 1)
.OrderBy(n => (n.State == DownloadState.Decoding) ? 0 : 1)
.FirstOrDefault();
if (item != null)
{
_workingSeeds.Add(item.Seed);
}
}
}
}
catch (Exception)
{
return;
}
if (item == null) continue;
try
{
{
if ((item.Depth == 1 && !_cacheManager.Contains(item.Seed.Metadata.Key))
|| (item.Depth > 1 && !item.Index.Groups.All(n => _existManager.GetCount(n) >= n.InformationLength)))
{
item.State = DownloadState.Downloading;
}
else
{
var keys = new KeyCollection();
var compressionAlgorithm = CompressionAlgorithm.None;
var cryptoAlgorithm = CryptoAlgorithm.None;
byte[] cryptoKey = null;
if (item.Depth == 1)
{
keys.Add(item.Seed.Metadata.Key);
compressionAlgorithm = item.Seed.Metadata.CompressionAlgorithm;
cryptoAlgorithm = item.Seed.Metadata.CryptoAlgorithm;
cryptoKey = item.Seed.Metadata.CryptoKey;
}
else
{
item.State = DownloadState.ParityDecoding;
item.DecodeOffset = 0;
item.DecodeLength = item.Index.Groups.Sum(n => n.Length);
try
{
foreach (var group in item.Index.Groups.ToArray())
{
using (var tokenSource = new CancellationTokenSource())
{
var task = _cacheManager.ParityDecoding(group, tokenSource.Token);
while (!task.IsCompleted)
{
if (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item)) tokenSource.Cancel();
Thread.Sleep(1000);
}
keys.AddRange(task.Result);
}
item.DecodeOffset += group.Length;
}
}
catch (Exception)
{
continue;
}
compressionAlgorithm = item.Index.CompressionAlgorithm;
cryptoAlgorithm = item.Index.CryptoAlgorithm;
cryptoKey = item.Index.CryptoKey;
}
item.State = DownloadState.Decoding;
if (item.Depth < item.Seed.Metadata.Depth)
{
string fileName = null;
bool largeFlag = false;
try
{
item.DecodeOffset = 0;
item.DecodeLength = keys.Sum(n => (long)_cacheManager.GetLength(n));
using (var stream = DownloadManager.GetUniqueFileStream(Path.Combine(_workDirectory, "index")))
using (ProgressStream decodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) =>
{
isStop = (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item));
if (!isStop && (stream.Length > item.Seed.Length))
{
isStop = true;
largeFlag = true;
}
item.DecodeOffset = writeSize;
}, 1024 * 1024, true))
{
fileName = stream.Name;
_cacheManager.Decoding(decodingProgressStream, compressionAlgorithm, cryptoAlgorithm, cryptoKey, keys);
}
}
catch (StopIoException)
{
if (File.Exists(fileName))
{
File.Delete(fileName);
}
if (largeFlag)
{
throw new Exception("size too large.");
}
continue;
}
catch (Exception)
{
if (File.Exists(fileName))
{
File.Delete(fileName);
}
throw;
}
Index index;
using (var stream = new UnbufferedFileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager))
{
index = Index.Import(stream, _bufferManager);
}
File.Delete(fileName);
lock (_thisLock)
{
if (!_settings.DownloadItems.Contains(item)) continue;
item.DecodeOffset = 0;
item.DecodeLength = 0;
this.UncheckState(item.Index);
item.Index = index;
this.CheckState(item.Index);
foreach (var group in item.Index.Groups)
{
foreach (var key in group.Keys)
{
_cacheManager.Lock(key);
}
}
item.Indexes.Add(index);
item.Depth++;
item.State = DownloadState.Downloading;
}
}
else
{
item.State = DownloadState.Decoding;
string fileName = null;
bool largeFlag = false;
string downloadDirectory;
if (item.Path == null)
{
downloadDirectory = this.BaseDirectory;
}
else
{
if (Path.IsPathRooted(item.Path))
{
downloadDirectory = item.Path;
}
else
{
downloadDirectory = Path.Combine(this.BaseDirectory, item.Path);
}
}
Directory.CreateDirectory(downloadDirectory);
try
{
item.DecodeOffset = 0;
item.DecodeLength = keys.Sum(n => (long)_cacheManager.GetLength(n));
using (var stream = DownloadManager.GetUniqueFileStream(Path.Combine(downloadDirectory, string.Format("{0}.tmp", DownloadManager.GetNormalizedPath(item.Seed.Name)))))
using (ProgressStream decodingProgressStream = new ProgressStream(stream, (object sender, long readSize, long writeSize, out bool isStop) =>
{
isStop = (this.DecodeState == ManagerState.Stop || !_settings.DownloadItems.Contains(item));
if (!isStop && (stream.Length > item.Seed.Length))
{
isStop = true;
largeFlag = true;
}
item.DecodeOffset = writeSize;
}, 1024 * 1024, true))
{
fileName = stream.Name;
_cacheManager.Decoding(decodingProgressStream, compressionAlgorithm, cryptoAlgorithm, cryptoKey, keys);
if (stream.Length != item.Seed.Length) throw new Exception("Stream.Length != Seed.Length");
}
}
catch (StopIoException)
{
if (File.Exists(fileName))
{
File.Delete(fileName);
}
if (largeFlag)
{
throw new Exception("size too large.");
}
continue;
}
catch (Exception)
{
if (File.Exists(fileName))
{
File.Delete(fileName);
}
throw;
}
File.Move(fileName, DownloadManager.GetUniqueFilePath(Path.Combine(downloadDirectory, DownloadManager.GetNormalizedPath(item.Seed.Name))));
lock (_thisLock)
{
if (!_settings.DownloadItems.Contains(item)) continue;
item.DecodeOffset = 0;
item.DecodeLength = 0;
{
var usingKeys = new HashSet<Key>();
foreach (var index in item.Indexes)
{
foreach (var group in index.Groups)
{
usingKeys.UnionWith(group.Keys
.Where(n => _cacheManager.Contains(n))
.Reverse()
.Take(group.InformationLength));
}
}
_cacheManager.SetSeed(item.Seed.Clone(), usingKeys.ToArray());
}
_settings.DownloadedSeeds.Add(item.Seed.Clone());
_cacheManager.Unlock(item.Seed.Metadata.Key);
foreach (var index in item.Indexes)
{
foreach (var group in index.Groups)
{
foreach (var key in group.Keys)
{
_cacheManager.Unlock(key);
}
}
}
item.Indexes.Clear();
item.State = DownloadState.Completed;
}
}
}
}
}
catch (Exception e)
{
// Check
{
var list = new List<Key>();
list.Add(item.Seed.Metadata.Key);
foreach (var index in item.Indexes)
{
foreach (var group in index.Groups)
{
foreach (var key in group.Keys)
{
list.Add(key);
}
}
}
foreach (var key in list)
{
if (this.DecodeState == ManagerState.Stop) return;
if (!_cacheManager.Contains(key)) continue;
var buffer = new ArraySegment<byte>();
try
{
buffer = _cacheManager[key];
}
catch (Exception)
{
}
finally
{
if (buffer.Array != null)
{
_bufferManager.ReturnBuffer(buffer.Array);
}
}
}
}
item.State = DownloadState.Error;
Log.Error(e);
}
finally
{
_workingSeeds.Remove(item.Seed);
}
}
}