private void Work(object sender, DoWorkEventArgs e)
{
_working = true;
if (Thread.CurrentThread.Name == null)
Thread.CurrentThread.Name = "Search Manager: " +
_searchCriteria.SearchText;
System.Diagnostics.Stopwatch stopwatch = new System.Diagnostics.Stopwatch();
stopwatch.Start();
// Set up the page managers, each running on an async thread
_pageManagers = new PageManager[_threadCount];
for (int i = 0; i < _pageManagers.Count(); i++)
{
// Keep track of the highest page we've
// attempted to scrape (page numbers are not zero-based)
_pageNumber = i+1;
// PageManagers internally fire off an async worker which each
// call the ResultReturned / WorkerFinished event handlers.
_pageManagers[i] = new PageManager(
_pageNumber,
_searchCriteria,
Scraper.LoadSearchPage, // inject method for testing here
ResultReturned,
WorkerFinished);
_pageManagers[i].RunWorkerAsync();
}
while (_working)
{
// User can cancel a search through the UI.
if (CancellationPending)
{
HaltAllOngoingWork();
}
}
stopwatch.Stop();
string msg = "Search time : {0} ms" + Environment.NewLine;
msg = string.Format(msg, stopwatch.ElapsedMilliseconds);
Debug.WriteLine(msg);
}