public static bool GetFileByURL(string desiredURL, string filePath, ref bool cancelled, bool workOffline = false)
{
bool preExists = false;
if (File.Exists(filePath))
{
//don't keep harassing the server if the file is less than one week old, for Pete's sake.
TimeSpan elapsed = (DateTime.Now - File.GetCreationTime(filePath));
preExists = true;
if (elapsed.TotalDays < 7)
{
_log.DebugFormat("File {0} is less than 7 days old, skipping", Path.GetFileName(filePath));
System.Threading.Thread.Sleep(100); //give the disk a chance to catch up
return true;
}
}
int retries = 0;
while ((!File.Exists(filePath) || preExists) && retries < 4)
{
try
{
if (workOffline && File.Exists(filePath))
{
_log.DebugFormat("Working Offline! - Skipping file stamp check");
return true;
}
_lastQuery = DateTime.Now;
System.Threading.Thread.Sleep(250); //just a little pre-nap so we don't hammer the server
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(desiredURL);
request.KeepAlive = false; //We're only doing this once
request.Credentials = CredentialCache.DefaultCredentials;
request.Timeout = Settings.TimeOutMs;
using (HttpWebResponse response = (HttpWebResponse)request.GetResponse())
{
Stream downloadStream = response.GetResponseStream();
long expectedLength = response.ContentLength;
DateTime lastModified = response.LastModified;
if (File.Exists(filePath))
{
string srcDate = response.LastModified.ToShortDateString();
string localDate = File.GetLastWriteTime(filePath).ToShortDateString();
if (localDate == srcDate)
{
_log.DebugFormat("File {0} already exists, and date stamps match, skipping",
Path.GetFileName(filePath));
FileUtilities.TryChangeCreationTime(filePath, DateTime.Now);
return true;
}
}
FileStream output = new FileStream(filePath, FileMode.Create);
Utilities.CopyToWithProgress(downloadStream, expectedLength, output, ref cancelled);
downloadStream.Close();
output.Close();
response.Close();
request.Abort();
if (cancelled)
{
//try to clean up after ourselves
FileUtilities.TryDelete(filePath);
_log.DebugFormat("Cancelling... Attempting to delete partially downloaded file {0}", Path.GetFileName(filePath));
}
if (!cancelled)
{
FileUtilities.TryChangeLastWriteTime(filePath, response.LastModified);
_log.DebugFormat("Downloaded of {0} was successful", Path.GetFileName(filePath));
if (Settings.ShowFilePaths)
{
_log.InfoFormat("Downloaded File {0} saved to {1}", Path.GetFileName(filePath), filePath);
}
}
else
{
return false;
}
}
return true;
}
catch (UnauthorizedAccessException cantWriteEx)
{
string msg = "The importer couldn't save the file, please run this application as administrator, or set the output directory.";
_log.Error(msg);
_log.Fatal("The importer cannot continue. Exiting...");
throw new ApplicationException(msg);
}
catch (Exception ex)
{
_log.Error("Error downloading file, retrying", ex);
//This is to avoid the server blocking too many connection requests
if ((DateTime.Now - _lastQuery).Milliseconds < WaitTimeMs)
{
int nap = (int)Math.Pow(2, retries) * WaitTimeMs - (DateTime.Now - _lastQuery).Milliseconds;
if (nap > 0)
{
_log.DebugFormat("Sleeping for {0}ms before starting download", nap);
System.Threading.Thread.Sleep(nap);
}
}
}
retries++;
}
_log.FatalFormat("Could not download file at {0} to location {1} after {2} retries. Please re-run the program to try again. If this problem persists, you can try manually downloading the file and copying it to the above path.", desiredURL, filePath, retries);
return false;
}