public void ScanPath(string path)
{
List<string> files = FileOperations.I.GetFilesRecursive(path, null, new Action<string>((s) =>
{
DupChecker.ProgressText="Retreiving files in: " + s;
}));
Dictionary<string, List<string>> filesWithSameSize = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase);
DupChecker.ProgressText="Checking for same file size. This may take a while"; // to minimize work load looking for duplicate files
this.DupChecker.ProgressMax = files.Count;
this.DupChecker.ProgressIndex = 0;
foreach (string file in files)
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText="Operation Canceled.";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
bool add = true;
FileInfo fi = new FileInfo(file);
if (Settings.Default.DupChecker_MinSize != 0)
{
if (fi.Length < Settings.Default.DupChecker_MinSize * 1000) add = false;
}
if (Settings.Default.DupChecker_MaxSize != 0)
{
if (fi.Length > Settings.Default.DupChecker_MaxSize * 1000) add = false;
}
if (Settings.Default.DupChecker_FileExtensions != "*.*")
{
string[] exts = Settings.Default.DupChecker_FileExtensions.Split(';');
if (!exts.Contains("*" + fi.Extension.ToLower())) add = false;
}
var strKeytoAdd=string.Empty;
Trace.WriteLine("Inside Scan Path Method");
if ((Settings.Default.DuplicateFilterFileSizeCriteara && fi.Length > 0) || !Settings.Default.DuplicateFilterFileSizeCriteara) // do not include 0 length files.
{
if (add)
{
if (!Settings.Default.DuplicateFilterFileSizeCriteara)
{
strKeytoAdd = fi.Name;
}
else
strKeytoAdd = fi.Length.ToString();
Trace.WriteLine("key to Add for path :"+fi.FullName);
if (filesWithSameSize.ContainsKey(strKeytoAdd))
{
filesWithSameSize[strKeytoAdd].Add(fi.FullName);
}
else
{
filesWithSameSize.Add(strKeytoAdd, new List<string>() { fi.FullName });
}
}
}
this.DupChecker.ProgressIndex++;
}
this.DupChecker.ProgressIndex = 0;
DupChecker.ProgressText = "Please wait while hashing files. This may take a while";
// get all the files we need to hash
List<string> files_to_hash = new List<string>();
Dictionary<string, List<string>> FileswithSameNameSize = new Dictionary<string, List<string>>();
foreach (string filesize in filesWithSameSize.Keys)
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText="Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
if (filesWithSameSize[filesize].Count > 1)
{
Trace.WriteLine("File With Same Size Count >2 :" + filesize);
files_to_hash.AddRange(filesWithSameSize[filesize].ToArray());
FileswithSameNameSize.Add(filesize, filesWithSameSize[filesize]);
}
this.DupChecker.ProgressIndex++;
}
List<string> hashed_files = new List<string>();
this.DupChecker.ProgressMax = files_to_hash.Count;
this.DupChecker.ProgressIndex = 0;
Dictionary<string, List<string>> files_with_same_hash = new Dictionary<string, List<string>>();
if (Settings.Default.DuplicateFilterFileSizeCriteara)
{
foreach (string filename in files_to_hash)
{
try
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText = "Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
string hash = FileOperations.I.HashFile(filename);
DupChecker.ProgressText = "Hashing: " + filename + " > " + hash;
Trace.WriteLine("Hashing: " + filename + " > " + hash);
hashed_files.Add(filename + "|" + hash);
}
catch (Exception ex)
{
Debug.WriteLine(ex.Message);
}
this.DupChecker.ProgressIndex++;
}
DupChecker.ProgressText = "Finalizing ...";
this.DupChecker.ProgressMax = hashed_files.Count;
this.DupChecker.ProgressIndex = 0;
foreach (string hashedfile in hashed_files)
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText = "Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
string[] tmp = hashedfile.Split('|');
string file = tmp[0];
string hash = tmp[1];
if (files_with_same_hash.ContainsKey(hash))
{
if (!files_with_same_hash[hash].Contains(file))
{
files_with_same_hash[hash].Add(file);
}
}
else
{
files_with_same_hash.Add(hash, new List<string>());
files_with_same_hash[hash].Add(file);
}
this.DupChecker.ProgressIndex++;
}
List<string> teremove = new List<string>();
foreach (string key in files_with_same_hash.Keys)
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText = "Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
if (files_with_same_hash[key].Count == 1) teremove.Add(key);
}
foreach (string key in teremove)
{
files_with_same_hash.Remove(key);
}
}
else
{
files_with_same_hash = FileswithSameNameSize;
}
DupChecker.ProgressText = "Adding to collection for previewing.";
App.Current.Dispatcher.Invoke((Action)delegate // <--- HERE
{
this.DupChecker.ProgressMax = files_with_same_hash.Count;
this.DupChecker.ProgressIndex = 0;
foreach (string entry in files_with_same_hash.Keys)
{
DupChecker.ProgressText = "Adding Files to collection " + entry;
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText = "Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
for (int i = 0; i < files_with_same_hash[entry].ToArray().Length; i++)
{
if (this.DupChecker.Cancel)
{
DupChecker.ProgressText = "Operation Canceled";
DupChecker.ProgressIndex = this.DupChecker.ProgressMax;
return;
}
string file_entries = files_with_same_hash[entry][i];
FileInfo fi = new FileInfo(file_entries);
Model_DuplicateChecker e = new Model_DuplicateChecker();
e.Hash = entry;
e.Selected = i != 0;
e.FileDetails = new Model_DuplicateChecker_FileDetails()
{
Filename = fi.Name,
Fullfilepath = fi.FullName,
ParentDirectory = fi.Directory.FullName
};
DupChecker.DupplicateCollection.Add(e);
}
}
});
DupChecker.ProgressText ="Done.";
// clear some memory
files.Clear();
filesWithSameSize.Clear();
files_to_hash.Clear();
hashed_files.Clear();
files_with_same_hash.Clear();
if (DupChecker.DupplicateCollection.Count > 0)
DupChecker.ProgressText = "Done. Select file(s) to be removed, then click on remove duplicates button.";
else
DupChecker.ProgressText = "Done. No duplicates found.";
}