FileWatcher issue - c#

OK, so this time I created a service with a file watcher to process file once created.
it seems that my service crashes when the files being processed reaches 1000 (I'm receiving loads of messages).
here is my logic: files comes in, file watcher read the text send it to email, insert into DB, move original message to a folders.
on the service start, I'm processing pending messages first before start to watch (I'm talking about over 1000 of text file pending) and my service needs about a second to work on each file.
All goes OK, but when the total incoming files reaches 1000, it simply crash.
sometimes the service stops processing pending and only start looking for new files only.
I have the "InternalBufferSize = 64000" the max recommended.
Please help me with my code (I know it should be multi-threaded for better handling, but I'm not that expert):
protected override void OnStart(string[] args)
{
using(TREEEntities TEX = new TREEEntities())
{
var mp= TEX.TREE_settings.FirstOrDefault(x=>x.SET_key =="MSGDump");
MsgsPath = mp.SET_value;
var dc = TEX.TREE_settings.FirstOrDefault(x => x.SET_key == "DupCash");
DupCash = Convert.ToInt16(dc.SET_value);
}
if (Directory.Exists(MsgsPath))
{
if (!Directory.Exists(MsgsPath+"\\Archive"))
{
Directory.CreateDirectory(MsgsPath+"\\Archive");
}
if (!Directory.Exists(MsgsPath + "\\Duplicates"))
{
Directory.CreateDirectory(MsgsPath + "\\Duplicates");
}
if (!Directory.Exists(MsgsPath + "\\Unsent"))
{
Directory.CreateDirectory(MsgsPath + "\\Unsent");
}
}
else
{
Directory.CreateDirectory(MsgsPath);
Directory.CreateDirectory(MsgsPath + "\\Archive");
Directory.CreateDirectory(MsgsPath + "\\Duplicates");
Directory.CreateDirectory(MsgsPath + "\\Unsent");
}
processPending();//<--- process pending files after last service stop
fileSystemWatcher1.Path = MsgsPath;//<--- path to be watched
fileSystemWatcher1.EnableRaisingEvents = true;
fileSystemWatcher1.InternalBufferSize = 64000;
addToLog(DateTime.Now, "Service Started", 0, "Service", "Info");
addToLog(DateTime.Now, "File Watcher Started", 0, "Service", "Info");
//dupList.Clear();//<--- clear duplicates validation list
}
protected override void OnStop()
{
fileSystemWatcher1.EnableRaisingEvents = false;
addToLog(DateTime.Now, "File Watcher Stopped", 0, "Service", "Alert");
addToLog(DateTime.Now, "Service Stopped", 0, "Service", "Alert");
}
private void fileSystemWatcher1_Created(object sender, FileSystemEventArgs e)
{
try
{
//---------read from file------------
Thread.Sleep(200);//<---give the file some time to get released
string block;
using (StreamReader sr = File.OpenText(MsgsPath + "\\" + e.Name))
{
block = sr.ReadToEnd();
}
PRT = block.Substring(block.Length - 6, 6);//<--- get the printer name
seq = Convert.ToInt16(block.Substring(block.Length - 20, 20).Substring(0, 4));//<--- get the sequence number
switch (PRT)//<----track sequence number from the 3 printers
{
case "64261B"://<---prt1
int seqPlus1=0;
if(seqPrt1 == 9999)//<---ignore sequence change from 9999 to 1
{ seqPlus1 = 1; }
else { seqPlus1 = seqPrt1 + 1; }
if (seq != seqPlus1 && seqPrt1 != 0)//<---"0" to avoid first service start
{
int x = seq - seqPrt1 - 1;
for (int i = 1; i <= x; i++)
{
addToMissing(PRT, seqPlus1);
addToLog(DateTime.Now, "Missing Sequence Number On Printer: " + PRT + " - " + seqPlus1, seqPlus1, "Service", "Missing");
seqPlus1++;
}
seqPrt1 = seq;
}
else { seqPrt1 = seq; }
break;
case "24E9AA"://<---prt2
int seqPlus2=0;
if(seqPrt2 == 9999)
{ seqPlus2 = 1; }
if (seq != seqPlus2 && seqPrt2 != 0)
{
int x = seq - seqPrt2 - 1;
for (int i = 1; i <= x; i++)
{
addToMissing(PRT, seqPlus2);
addToLog(DateTime.Now, "Missing Sequence Number On Printer: " + PRT + " - " + seqPlus2, seqPlus2, "Service", "Missing");
seqPlus2++;
}
seqPrt2 = seq;
}
else { seqPrt2 = seq; }
break;
case "642602"://<---prt3
int seqPlus3=0;
if(seqPrt3 == 9999)
{ seqPlus3 = 1; }
if (seq != seqPlus3 && seqPrt3 != 0)
{
int x = seq - seqPrt3 - 1;
for (int i = 1; i <= x; i++)
{
addToMissing(PRT, seqPlus3);
addToLog(DateTime.Now, "Missing Sequence Number On Printer: " + PRT + " - " + seqPlus3, seqPlus3, "Service", "Missing");
seqPlus3++;
}
seqPrt3 = seq;
}
else { seqPrt3 = seq; }
break;
}
block = block.Remove(block.Length - 52);//<--- trim the sequence number and unwanted info
string[] Alladd;
List<string> sent = new List<string>();
if (!dupList.Contains(block)) //<--- if msg not found in duplicates validation list
{
//--------extract values--------------
if (block.Substring(0, 3) == "\r\nQ") //<--- if the msg. contains a priority code
{
Alladd = block.Substring(0, block.IndexOf(".")).Replace("\r\n", " ").Substring(4).Split(' ').Distinct().Where(x => !string.IsNullOrEmpty(x)).ToArray(); ;
}
else//<--- if no priority code
{
Alladd = block.Substring(0, block.IndexOf(".")).Replace("\r\n", " ").Substring(1).Split(' ').Distinct().Where(x => !string.IsNullOrEmpty(x)).ToArray(); ;
}
string From = block.Substring(block.IndexOf('.') + 1).Substring(0, 7);
string Msg = block.Substring(block.IndexOf('.') + 1);
Msg = Msg.Substring(Msg.IndexOf('\n') + 1);
//--------add msg content to the DB group table--------
using (TREEEntities TE1 = new TREEEntities())
{
TREE_group tg = new TREE_group()
{
GROUP_original = block,
GROUP_sent = Msg,
GROUP_dateTime = DateTime.Now,
GROUP_from = From,
GROUP_seq = seq,
GROUP_prt = PRT,
};
TE1.AddToTREE_group(tg);
TE1.SaveChanges();
GID = tg.GROUP_ID;
}
//--------validate addresses---------------
foreach (string TB in Alladd)
{
string email = "";
string typeB = "";
TREEEntities TE = new TREEEntities();
var q1 = from x in TE.TREE_users where x.USR_TypeB == TB && x.USR_flag == "act" select new { x.USR_email, x.USR_TypeB };
foreach (var itm in q1)
{
email = itm.USR_email;
typeB = itm.USR_TypeB;
}
//-------send mail if the user exist----
if (TB == typeB)
{
if (typeB == "BAHMVGF")
{
addToFtl(block);
}
try
{
sendMail SM = new sendMail();
SM.SendMail(Msg, "Message from: " + From, email);
//---save record in DB----
addToMsg(typeB, email,"sent","act",1,GID,seq);
sent.Add(typeB);
}
catch (Exception x)
{
addToMsg(typeB, email, "Failed", "act", 1, GID, seq);
addToLog(DateTime.Now, "Send message failed: " + x.Message, GID, "Service", "Warning");
}
}
//-------if no user exist----
else
{
if (TB == "BAHMVGF")
{
addToFtl(block);
}
addToMsg(TB, "No email", "Failed", "act", 1, GID, seq);
addToLog(DateTime.Now, "Send message failed, unknown Type-B address: " + TB, GID, "Service", "Warning");
}
}
if (sent.Count < Alladd.Count())//<--- if there is unsent addresses
{
StringBuilder b = new StringBuilder(block);
foreach (string add in sent)
{
b.Replace(add, "");//<--- remove address that has been sent from the original message and write new msg. to unsent folder
}
if (!Directory.Exists(MsgsPath + "\\Unsent"))
{
Directory.CreateDirectory(MsgsPath + "\\Unsent");
}
using (StreamWriter w = File.AppendText(MsgsPath + "\\Unsent\\" + e.Name))
{
w.WriteLine(b);
}
}
sent.Clear();
//---add to dupList to validate the next messages-------------
if (dupList.Count > DupCash)
{
dupList.RemoveAt(0);
}
dupList.Add(block);
//---move msg to archive folder-----------------
if (!Directory.Exists(MsgsPath + "\\Archive"))
{
Directory.CreateDirectory(MsgsPath + "\\Archive");
}
File.Move(MsgsPath + "\\" + e.Name, MsgsPath + "\\Archive\\" + e.Name);
}
else //<--- if message is a duplicate
{
addToLog(DateTime.Now, "Duplicated message, message not sent", seq, "Service", "Info");
//---move msg to duplicates folder-----------------
if (!Directory.Exists(MsgsPath + "\\Duplicates"))
{
Directory.CreateDirectory(MsgsPath + "\\Duplicates");
}
File.Move(MsgsPath + "\\" + e.Name, MsgsPath + "\\Duplicates\\" + e.Name);
}
}
catch (Exception x)
{
addToLog(DateTime.Now, "Error: " + x.Message, seq, "Service", "Alert");
if (!Directory.Exists(MsgsPath + "\\Unsent"))
{
Directory.CreateDirectory(MsgsPath + "\\Unsent");
}
//---move msg to Unsent folder-----------------
File.Move(MsgsPath + "\\" + e.Name, MsgsPath + "\\Unsent\\" + e.Name);
}
}

I wanted to add this as a comment but it exceeded the allowed number of characters so here goes.
First thing I noticed in your code that you are doing all the file handling inside the Created event handler. This is not a good practice, you should always let your FileSystemWatcher event handlers do minimum work as it might result an overflow which is probably what you are facing.
Instead it is better to delegate the work on a separate thread. You can add the events to a queue and let a background thread worry handling them. You can filter the files in the event handler so that your queue does not get filled with garbage.
Using Sleep inside the event handler is also considered a bad practice as you'll be blocking the FileSystemWatcher event.
The maximum buffer size allowed is 64K, it is not a recommended buffer size unless you are dealing with long paths. Increasing buffer size is expensive, because it comes from non-paged memory that cannot be swapped out to disk, so keep the buffer as small as possible. To avoid a buffer overflow, use the NotifyFilter and IncludeSubdirectories properties to filter out unwanted change notifications.
And finally I would suggest reading the FileSystemWatcher MSDN article and looking at several examples online before attempting to write code as the Windows watcher is somewhat delicate and prone to errors

Related

Why when giving files names numbers to the names using a counter it's giving it strange numbers?

The first saved file name on the hard disk is: 201701311645---0 then 201701311645---1 then 201701311645---20 then 201701311645---21 then 201701311645---40 and 201701311645---41
But i want it to be saved as: 201701311645---0 then 201701311645---1 then 201701311645---2 then 201701311645---3 then 201701311645---4 and 201701311645---5
In the top i added a counter variable
private int countFilesNames = 0;
Then in a dowork event i also reset the counter to 0 once so if i start the backgroundworker over again it will start from 0.
private void bgwDownloader_DoWork(object sender, DoWorkEventArgs e)
{
Int32 fileNr = 0;
countFilesNames = 0;
if (this.SupportsProgress) { calculateFilesSize(); }
if (!Directory.Exists(this.LocalDirectory)) { Directory.CreateDirectory(this.LocalDirectory); }
while (fileNr < this.Files.Count && !bgwDownloader.CancellationPending)
{
m_fileNr = fileNr;
downloadFile(fileNr);
if (bgwDownloader.CancellationPending)
{
fireEventFromBgw(Event.DeletingFilesAfterCancel);
cleanUpFiles(this.DeleteCompletedFilesAfterCancel ? 0 : m_fileNr, this.DeleteCompletedFilesAfterCancel ? m_fileNr + 1 : 1);
}
else
{
fileNr += 1;
}
}
}
Then in the downloadFile method
private void downloadFile(Int32 fileNr)
{
FileStream writer = null;
m_currentFileSize = 0;
fireEventFromBgw(Event.FileDownloadAttempting);
FileInfo file = this.Files[fileNr];
Int64 size = 0;
Byte[] readBytes = new Byte[this.PackageSize];
Int32 currentPackageSize;
System.Diagnostics.Stopwatch speedTimer = new System.Diagnostics.Stopwatch();
Int32 readings = 0;
Exception exc = null;
try
{
writer = new FileStream(this.LocalDirectory + "\\" + file.Name +
"---" + countFilesNames + ".png", System.IO.FileMode.Create);
}
catch(Exception err)
{
string ggg = err.ToString();
}
HttpWebRequest webReq;
HttpWebResponse webResp = null;
try
{
webReq = (HttpWebRequest)System.Net.WebRequest.Create(this.Files[fileNr].Path);
webResp = (HttpWebResponse)webReq.GetResponse();
size = webResp.ContentLength;
}
catch (Exception ex) { exc = ex; }
m_currentFileSize = size;
fireEventFromBgw(Event.FileDownloadStarted);
if (exc != null)
{
bgwDownloader.ReportProgress((Int32)InvokeType.FileDownloadFailedRaiser, exc);
}
else
{
m_currentFileProgress = 0;
while (m_currentFileProgress < size && !bgwDownloader.CancellationPending)
{
while (this.IsPaused) { System.Threading.Thread.Sleep(100); }
speedTimer.Start();
currentPackageSize = webResp.GetResponseStream().Read(readBytes, 0, this.PackageSize);
m_currentFileProgress += currentPackageSize;
m_totalProgress += currentPackageSize;
fireEventFromBgw(Event.ProgressChanged);
writer.Write(readBytes, 0, currentPackageSize);
readings += 1;
if (readings >= this.StopWatchCyclesAmount)
{
m_currentSpeed = (Int32)(this.PackageSize * StopWatchCyclesAmount * 1000 / (speedTimer.ElapsedMilliseconds + 1));
speedTimer.Reset();
readings = 0;
}
}
speedTimer.Stop();
writer.Close();
webResp.Close();
if (!bgwDownloader.CancellationPending) { fireEventFromBgw(Event.FileDownloadSucceeded); }
}
fireEventFromBgw(Event.FileDownloadStopped);
countFilesNames += 1;
}
I build the file name:
writer = new FileStream(this.LocalDirectory + "\\" + file.Name +
"---" + countFilesNames + ".png", System.IO.FileMode.Create);
The move the counter forward by 1:
countFilesNames += 1;
But i'm getting other files names then i wanted.
Maybe there is a better way to give the files names some identity ? The problem is that if i will not give the files names some identity it will overwrite the files all the time. The files names are the same the content is not so i need to give each file another name.
Why don't you just increment the counter only when a file is written (since the variable doesn't look like it is accessed elsewhere) and not below:
writer = new FileStream(this.LocalDirectory + "\\" + file.Name +
"---" + countFilesNames++ + ".png", System.IO.FileMode.Create);
This way the counter won't be incremented on errors.

Parallel.ForEach(...) System Out of Memory exception

I've looked at Parallel.ForEach - System Out of Memory Exception regarding this issue but not much of a solution was given. I'm very new to using Parallel.ForEach, so I'm trying to figure out what's going on.
Diagnostic tools caps out at 1023 repeating (I understand this is an x86 to x64 arch restriction, but I wanted to offer the program in both formats.) I also don't feel like any program should ever meet that threshold. When I compile the program in x64, I sit around 1.1-1.4GB with MaxDegree . For sake of testing, I am running GC.Collection() at the end of each Parallel.ForEach iteration (I understand this isn't good practice, I'm just trying to troubleshoot at this point.)
Here's what I'm seeing:
Now if I try to use a Partitioner method, such as:
var checkforfinished = Parallel.ForEach<ListViewItem>(Partitioner.Create(0,lstBackupUsers.Items.Count), lstBackupUsers.Items.Cast<ListViewItem>(), opts, name =>
The I get an error of:
"No overload for method 'ForEach' takes 4 arguments"
That's fine, I modify my Parallel.ForEach statement so it looks like this:
var checkforfinished = Parallel.ForEach(Partitioner.Create(0,lstBackupUsers.Items.Count), lstBackupUsers.Items, opts, name => (I removed my casts)
and then my ForEach method won't accept the statement because it wants me to explicitly tell it that it's addressing a listviewbox.items method.
I am so confused on what to do.
Do I create a Partitioner, and if I do, how do I make my Parallel.ForEach method understand how to address a listviewbox?
update 1
I want to try to give as many details as possible because this is just rough. I'm sure it's easy, I'm just overcomplicationg it by an nth degree.
I have my Parallel.ForEach(//) running in a background worker function. My DoWork process is over 300 lines (I'm not an expert in C#, I'm just putting things together for a program at work.)
Here are bullet points of its basic structure
User clicks a "Start backup" button as seen in the screenshot
Button begins a separate function that checks to see which method the user selected to grab a list of usernames from (LDAP or flat text file)
That function then sends off a bgw_dowork() request
Inside the DoWork request, it looks like a summary of:
Check preliminary statements (bgw.cancellationpending for example)
Move on to grabbing some settings from the configurationmanager.appsettings
Begin the "complex" Parallel.ForEach command which Reads the listbox record rows and foreach row performs a very long list of commands to complete an operation for one user
The entire program, especially bgw_dowork heavily uses Google's v3 Drive API to login as a user, grab a file as recorded by other functions that prepare the user directory to be backed up (separate functions which login as a user, record their files (and fileIds) and their directories/subdirectories) and the bgw_dowork performs a chunk of the actual download functionality, which then calls off to the other functions to finish moving the files after they have been downloaded.
The actual "code" I use is (and I promise it's not pretty...)
private void bgW_DoWork(object sender, DoWorkEventArgs e)
{
{
try
{
txtFile.ReadOnly = true;
btnStart.Text = "Cancel Backup";
var appSettings = ConfigurationManager.AppSettings;
string checkreplace = ConfigurationManager.AppSettings["checkreplace"];
string userfile = txtFile.Text;
int counter = 0;
int arraycount = 0;
if (bgW.CancellationPending)
{
e.Cancel = true;
stripLabel.Text = "Operation was canceled!";
}
else
{
for (int z = 0; z >= counter; z++)
{
if (bgW.CancellationPending)
{
e.Cancel = true;
stripLabel.Text = "Operation was canceled!";
break;
}
else
{
double totalresource = int.Parse(ConfigurationManager.AppSettings["multithread"]);
totalresource = (totalresource / 100);
//var opts = new ParallelOptions { MaxDegreeOfParallelism = Convert.ToInt32(Math.Ceiling((Environment.ProcessorCount * totalresource) * 1.0)) };
var opts = new ParallelOptions { MaxDegreeOfParallelism = 2 };
var part = Partitioner.Create(1, 100);
//foreach (ListViewItem name in lstBackupUsers.Items)
var checkforfinished = Parallel.ForEach(lstBackupUsers.Items.Cast<ListViewItem>(), name =>
{
try
{
string names = name.SubItems[0].Text;
lstBackupUsers.Items[arraycount].Selected = true;
lstBackupUsers.Items[arraycount].BackColor = Color.CornflowerBlue;
arraycount++;
stripLabel.Text = "";
Console.WriteLine("Selecting user: " + names.ToString());
txtLog.Text += "Selecting user: " + names.ToString() + Environment.NewLine;
txtCurrentUser.Text = names.ToString();
// Define parameters of request.
string user = names.ToString();
// Check if directory exists, create if not.
string savelocation = ConfigurationManager.AppSettings["savelocation"] + user + "\\";
if (File.Exists(savelocation + ".deltalog.tok"))
File.Delete(savelocation + ".deltalog.tok");
FileInfo testdir = new FileInfo(savelocation);
testdir.Directory.Create();
string savedStartPageToken = "";
var start = CreateService.BuildService(user).Changes.GetStartPageToken().Execute();
// This token is set by Google, it defines changes made and
// increments the token value automatically.
// The following reads the current token file (if it exists)
if (File.Exists(savelocation + ".currenttoken.tok"))
{
StreamReader curtokenfile = new StreamReader(savelocation + ".currenttoken.tok");
savedStartPageToken = curtokenfile.ReadLine().ToString();
curtokenfile.Dispose();
}
else
{
// Token record didn't exist. Create a generic file, start at "1st" token
// In reality, I have no idea what token to start at, but 1 seems to be safe.
Console.Write("Creating new token file.\n");
//txtLog.Text += ("Creating new token file.\n" + Environment.NewLine);
StreamWriter sw = new StreamWriter(savelocation + ".currenttoken.tok");
sw.Write(1);
sw.Dispose();
savedStartPageToken = "1";
}
string pageToken = savedStartPageToken;
int gtoken = int.Parse(start.StartPageTokenValue);
int mytoken = int.Parse(savedStartPageToken);
txtPrevToken.Text = pageToken.ToString();
txtCurrentToken.Text = gtoken.ToString();
if (gtoken <= 10)
{
Console.WriteLine("Nothing to save!\n");
//txtLog.Text += ("User has nothing to save!" + Environment.NewLine);
}
else
{
if (pageToken == start.StartPageTokenValue)
{
Console.WriteLine("No file changes found for " + user + "\n");
//txtLog.Text += ("No file changes found! Please wait while I tidy up." + Environment.NewLine);
}
else
{
// .deltalog.tok is where we will place our records for changed files
Console.WriteLine("Changes detected. Making notes while we go through these.");
lblProgresslbl.Text = "Scanning Drive directory.";
// Damnit Google, why did you change how the change fields work?
if (savedStartPageToken == "1")
{
statusStripLabel1.Text = "Recording folder list ...";
txtLog.Text = "Recording folder list ..." + Environment.NewLine;
exfunctions.RecordFolderList(savedStartPageToken, pageToken, user, savelocation);
statusStripLabel1.Text = "Recording new/changed files ... This may take a bit!";
txtLog.Text += Environment.NewLine + "Recording new/changed list for: " + user;
exfunctions.ChangesFileList(savedStartPageToken, pageToken, user, savelocation);
}
else
{
//proUserclass = proUser;
statusStripLabel1.Text = "Recording new/changed files ... This may take a bit!";
txtLog.Text += Environment.NewLine + "Recording new/changed list for: " + user + Environment.NewLine;
exfunctions.ChangesFileList(savedStartPageToken, pageToken, user, savelocation);
}
// Get all our files for the user. Max page size is 1k
// after that, we have to use Google's next page token
// to let us get more files.
StreamWriter logFile = new StreamWriter(savelocation + ".recent.log");
string[] deltafiles = File.ReadAllLines(savelocation + ".deltalog.tok");
int totalfiles = deltafiles.Count();
int cnttototal = 0;
Console.WriteLine("\nFiles to backup:\n");
if (deltafiles == null)
{
return;
}
else
{
double damn = ((gtoken - double.Parse(txtPrevToken.Text)));
damn = Math.Round((damn / totalfiles));
if (damn <= 0)
damn = 1;
foreach (var file in deltafiles)
{
try
{
if (bgW.CancellationPending)
{
stripLabel.Text = "Backup canceled!";
e.Cancel = true;
break;
}
DateTime dt = DateTime.Now;
string[] foldervalues = File.ReadAllLines(savelocation + "folderlog.txt");
cnttototal++;
bgW.ReportProgress(cnttototal);
proUser.Maximum = int.Parse(txtCurrentToken.Text);
stripLabel.Text = "File " + cnttototal + " of " + totalfiles;
double? mathisfun;
mathisfun = ((100 * cnttototal) / totalfiles);
if (mathisfun <= 0)
mathisfun = 1;
double mathToken = double.Parse(txtPrevToken.Text);
mathToken = Math.Round((damn + mathToken));
// Bring our token up to date for next run
txtPrevToken.Text = mathToken.ToString();
File.WriteAllText(savelocation + ".currenttoken.tok", mathToken.ToString());
int proval = int.Parse(txtPrevToken.Text);
int nowval = int.Parse(txtCurrentToken.Text);
if (proval >= nowval)
proval = nowval;
proUser.Value = (proval);
lblProgresslbl.Text = ("Current progress: " + mathisfun.ToString() + "% completed.");
// Our file is a CSV. Column 1 = file ID, Column 2 = File name
var values = file.Split(',');
string fileId = values[0];
string fileName = values[1];
string mimetype = values[2];
mimetype = mimetype.Replace(",", "_");
string folder = values[3];
string ext = null;
int folderfilelen = foldervalues.Count();
fileName = GetSafeFilename(fileName);
Console.WriteLine("Filename: " + values[1]);
logFile.WriteLine("ID: " + values[0] + " - Filename: " + values[1]);
logFile.Flush();
// Things get sloppy here. The reason we're checking MimeTypes
// is because we have to export the files from Google's format
// to a format that is readable by a desktop computer program
// So for example, the google-apps.spreadsheet will become an MS Excel file.
switch (mimetype)
{
(switch statement here removed due to body length issues for this post.)
}
if (ext.Contains(".doc") || ext.Contains(".xls"))
{
string whatami = null;
if (ext.Contains(".xls"))
{
whatami = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
}
else if (ext.Contains(".doc"))
{
whatami = "application/vnd.openxmlformats-officedocument.wordprocessingml.document";
}
else if (ext.Contains(".ppt"))
{
whatami = "application/vnd.openxmlformats-officedocument.presentationml.presentation";
}
if (fileName.Contains(".mov") || ext == ".ggl" || fileName.Contains(".mp4"))
{
txtLog.Text += Environment.NewLine + "Skipping file.";
return;
}
var requestfileid = CreateService.BuildService(user).Files.Export(fileId, whatami);
statusStripLabel1.Text = (savelocation + fileName + ext);
txtCurrentUser.Text = user;
string dest1 = Path.Combine(savelocation, fileName + ext);
var stream1 = new System.IO.FileStream(dest1, FileMode.OpenOrCreate, FileAccess.ReadWrite);
scrolltobtm();
requestfileid.MediaDownloader.ProgressChanged +=
(IDownloadProgress progress) =>
{
switch (progress.Status)
{
case DownloadStatus.Downloading:
{
Console.WriteLine(progress.BytesDownloaded);
logFile.WriteLine("Downloading: " + progress.BytesDownloaded);
txtLog.Text += ("Downloading ... " + progress.BytesDownloaded + Environment.NewLine);
scrolltobtm();
logFile.Flush();
break;
}
case DownloadStatus.Completed:
{
Console.WriteLine("Download complete.");
logFile.WriteLine("[" + user + "] Download complete for: " + requestfileid.ToString());
txtLog.Text += ("[" + user + "] Download complete for: " + fileName + Environment.NewLine);
logFile.Flush();
break;
}
case DownloadStatus.Failed:
{
Console.WriteLine("Download failed.");
logFile.WriteLine("Download failed.");
logFile.Flush();
break;
}
}
};
scrolltobtm();
GC.Collect();
GC.WaitForPendingFinalizers();
requestfileid.Download(stream1);
stream1.Close();
stream1.Dispose();
}
else
{
scrolltobtm();
var requestfileid = CreateService.BuildService(user).Files.Get(fileId);
//Generate the name of the file, and create it as such on the local filesystem.
statusStripLabel1.Text = (savelocation + fileName + ext);
string dest1 = Path.Combine(savelocation, fileName + ext);
var stream1 = new System.IO.FileStream(dest1, FileMode.OpenOrCreate, FileAccess.ReadWrite);
requestfileid.MediaDownloader.ProgressChanged +=
(IDownloadProgress progress) =>
{
switch (progress.Status)
{
case DownloadStatus.Downloading:
{
Console.WriteLine(progress.BytesDownloaded);
logFile.WriteLine("Downloading: " + progress.BytesDownloaded);
txtLog.Text += ("Downloading ... " + progress.BytesDownloaded + Environment.NewLine);
scrolltobtm();
logFile.Flush();
break;
}
case DownloadStatus.Completed:
{
Console.WriteLine("Download complete.");
logFile.WriteLine("Download complete for: " + requestfileid.ToString());
txtLog.Text += (Environment.NewLine + "[" + user + "] Download complete for: " + fileName + Environment.NewLine);
logFile.Flush();
break;
}
case DownloadStatus.Failed:
{
Console.WriteLine("Download failed.");
logFile.WriteLine("Download failed.");
logFile.Flush();
break;
}
}
};
scrolltobtm();
GC.Collect();
GC.WaitForPendingFinalizers();
requestfileid.Download(stream1);
stream1.Close();
stream1.Dispose();
}
}
catch (Google.GoogleApiException ex)
{
Console.Write("\nInfo: ---> " + ex.Message.ToString() + "\n");
}
}
}
exfunctions.MoveFiles(savelocation);
Console.WriteLine("\n\n\tBackup completed for selected user!");
txtLog.Text += ("\n\nBackup completed for selected user.\n\n");
statusStripLabel1.Text = "";
//logFile.Close();
//logFile.Dispose();
}
}
}
catch (Google.GoogleApiException ex)
{
Console.WriteLine("Info: " + ex.Message.ToString());
}
}
);
if (checkforfinished.IsCompleted == true)
{
MessageBox.Show("Parallel.ForEach() Finished!");
Console.WriteLine("Parallel.ForEach() Finished!");
}
else
{
MessageBox.Show("Parallel.ForEach() not completed!");
Console.WriteLine("Parallel.ForEach() not completed!");
}
}
}
}
}
catch (Google.GoogleApiException ex)
{
Console.WriteLine("Info: " + ex.Message.ToString());
}
}
}
You can see where I initiate the Parallel.ForEach(...) and then see what it is in charge of doing. It's a lot, and I understand it's not pretty, so I appreciate constructive criticism.

Copying a file that is a virus caused my program to terminate

I have made a tool that will copy files from a source to a destination. However during the copy, the software came across a virus that was flagged by the anti-virus software (Symantec).
The anti-virus then caused my software to close down, and quarantine the program as a "dropper".
Is there anyway I can gracefully handle this scenario, rather than shutting down my program completely?
I appreciate that the action was the result of the anti-virus, but is there anything I can do to help the situation? For example, Robocopy does not just terminate when it comes across a virus.
Here is my copy code;
void CopyFileExactly(CopyParameterBundle cpb, bool overwrite)
{
string CTP = "", CFP = "";
CFP = cpb.SourcePath;
if (cpb.RenameFile)
CTP = cpb.DestPath ;
else
CTP = cpb.DestPath;
//Check firstly if the file to copy exists
if (!File.Exists(CFP))
{
throw new FileNotFoundException();
}
//Check if destination file exists
//If it does, make it not read only so we can update MAC times
if (File.Exists(CTP))
{
var target = GetFile(CTP);//new FileInfo(CTP);
if (target.IsReadOnly)
target.IsReadOnly = false;
}
var origin = GetFile(CFP);//new FileInfo(CFP);
GetFile(CTP).Directory.Create();
//(new FileInfo(CTP)).Directory.Create();
origin.CopyTo(CTP, (overwrite ? true : false));
if (!File.Exists(CTP))
{
throw new FileNotFoundException("Destination file not found!");
}
var destination = GetFile(CTP);//new FileInfo(CTP);
if (destination.IsReadOnly)
{
destination.IsReadOnly = false;
destination.CreationTime = origin.CreationTime;
destination.LastWriteTime = origin.LastWriteTime;
destination.LastAccessTime = origin.LastAccessTime;
destination.IsReadOnly = true;
}
else
{
destination.CreationTime = origin.CreationTime;
destination.LastWriteTime = origin.LastWriteTime;
destination.LastAccessTime = origin.LastAccessTime;
}
if (performMD5Check)
{
var md5Check = compareFileMD5(CFP, CTP);
cpb.srcMD5Hash = md5Check.Item2;
cpb.dstMD5Hash = md5Check.Item3;
if (!md5Check.Item1)
throw new MD5MismatchException("MD5 Hashes do NOT match!");
}
}
The calling code;
void BeginCopy(int DegreeOfParallelism, int retryCount, int retryDelay)
{
object _lock;
//Setup cancellation token
po.CancellationToken = cts.Token;
//Set max number of threads
po.MaxDegreeOfParallelism = DegreeOfParallelism;
//Exceptio logging queue
var exceptions = new ConcurrentQueue<Exception>();
var completeItems = new ConcurrentQueue<CopyParameterBundle>();
var erroredItems = new ConcurrentQueue<CopyParameterBundle>();
//Logger logger = new Logger(sLogPath);
//logger.Write("Starting copy");
Task.Factory.StartNew(() =>
{
Parallel.ForEach(CopyParameters,
po,
(i, loopState, localSum) =>
{
localSum = retryCount;
do
{
try
{
//Go off and attempt to copy the file
DoWork(i);
//Incrememt total count by 1 if successfull
i.copyResults.TransferTime = DateTime.Now;
i.copyResults.TransferComplete = true;
completeItems.Enqueue(i);
//logger.Write("Copied file from: " + i.SourcePath + "\\" + i.SourceFile + " => " + i.DestPath + "\\" + i.SourceFile);
break;
}
catch (Exception ex)
{
//this.richTextBox1.AppendText("[-] Exception on: " + i.SourcePath + "\\" + i.SourceFile + " => " + ex.Message.ToString() + System.Environment.NewLine);
//Exception was thrown when attempting to copy file
if (localSum == 0)
{
//Given up attempting to copy. Log exception in exception queue
exceptions.Enqueue(ex);
this.SetErrorText(exceptions.Count());
//Write the error to the screen
this.Invoke((MethodInvoker)delegate
{
this.richTextBox1.AppendText("[-] Exception on: " + i.SourcePath + "\\" + i.SourceFile + " => " + ex.Message.ToString() + System.Environment.NewLine);
i.copyResults.TransferComplete = false;
i.copyResults.TransferTime = DateTime.Now;
i.copyResults.exceptionMsg = ex;
erroredItems.Enqueue(i);
//logger.Write("ERROR COPYING FILE FROM : " + i.SourcePath + "\\" + i.SourceFile + " => " + i.DestPath + "\\" + i.SourceFile + " => " + ex.Message.ToString() + " => " + ex.Source);
});
}
//Sleep for specified time before trying again
Thread.Sleep(retryDelay);
localSum--;
}
//Attempt to Repeat X times
} while (localSum >= 0);
//Check cancellation token
po.CancellationToken.ThrowIfCancellationRequested();
Interlocked.Increment(ref TotalProcessed);
this.SetProcessedText(TotalProcessed);
//Update Progress Bar
this.Invoke((MethodInvoker)delegate
{
this.progressBar1.Value = (TotalProcessed);
});
});
//aTimer.Stop();
this.Invoke((MethodInvoker)delegate
{
this.label9.Text = "Process: Writing Log";
});
WriteLog(sLogPath, completeItems, erroredItems);
this.Invoke((MethodInvoker)delegate
{
this.label9.Text = "Process: Done!";
});
if (exceptions.Count == 0)
MessageBox.Show("Done!");
else
MessageBox.Show("Done with errors!");
EnableDisableButton(this.button2, true);
EnableDisableButton(this.button4, false);
});
}
What happened is most likely that the antivirus was aware of the virus file, so when it detected that a change in the file system (moving the file) occurred, it terminated the program because by moving the virus to a different location in your computer, it could cause problems (since it's a virus). It was flagged as dropper, basically a type of program that is designed to install the virus.
Edit: i forgot to mention that to solve the problem you will most likely need to license your program.

System.Net.WebException while downloading file

I have a list of mp3 which I am downloading. After some files are downloaded, not all of them - around 5-7, I get WebException. I did a stacktrace and this is the result.
Exception thrown: 'System.Net.WebException' in System.dll
Debug message: The operation has timed out
InnerEx: at System.Net.HttpWebRequest.GetResponse()
at iBlock.Main._InetGetHTMLSearch(String sArtist) in C:\Users\...\Main.cs:line 590
My _InetGetHTMLSearch looks like this
private void _InetGetHTMLSearch(string sArtist)
{
aLinks.Clear();
if (AudioDumpQuery == string.Empty)
{
//return string.Empty;
}
string[] sStringArray;
string sResearchURL = "http://www.audiodump.biz/music.html?" + AudioDumpQuery + sArtist.Replace(" ", "+");
string aRet;
HttpWebRequest webReq = (HttpWebRequest)HttpWebRequest.Create(sResearchURL);
webReq.UserAgent = "Mozilla / 5.0(Macintosh; Intel Mac OS X 10_9_3) AppleWebKit / 537.75.14(KHTML, like Gecko) Version / 7.0.3 Safari / 7046A194A";
webReq.Referer = "http://www.audiodump.com/";
webReq.Timeout = 5000;
try
{
webReq.CookieContainer = new CookieContainer();
webReq.Method = "GET";
using (WebResponse response = webReq.GetResponse())
{
using (Stream stream = response.GetResponseStream())
{
StreamReader reader = new StreamReader(stream);
aRet = reader.ReadToEnd();
//Console.WriteLine(aRet);
string[] aTable = _StringBetween(aRet, "<BR><table", "table><BR>", RegexOptions.Singleline);
if (aTable != null)
{
string[] aInfos = _StringBetween(aTable[0], ". <a href=\"", "<a href=\"");
if (aInfos != null)
{
for (int i = 0; i < aInfos.Length; i++)
{
//do some magic here
}
}
else
{
//debug
}
}
else
{
//debug 2
}
}
response.Dispose();
}
}
catch (Exception ex)
{
Console.WriteLine("Debug message: " + ex.Message + "InnerEx: " + ex.StackTrace);
aLinks.Clear();
return;
//throw exception
}
}
what this method does is simple. A simple search of the sArtist given at audiodump.com
I have a timer which runs very fast, every 10ms.
private void MainTimer_Tick(object sender, EventArgs e)
{
_DoDownload(DoubleDimList[i][y], ref mp3ToPlay);
if (muted) Mute(0);
if (Downloading)
{
StatusLabel.Text = "Downloading: " + DoubleDimList[i][y];
}
}
Now this timer handles the download in the background in a Global scope.
The _DoDownload methos which basically starts the entire process looks like this
private void _DoDownload(string dArtist, ref string dPath)
{
if (!Contain && skip <= 3 && !Downloading)
{
try
{
_InetGetHTMLSearch(dArtist);
if (aLinks.Count < 1)
{
//skip and return
Console.WriteLine("Skipping: " + dArtist);
IniWriteValue(_playlists[i], "Track " + y, dArtist + " -iBlockSkip");
y++;
return;
}
string path = mp3Path + "\\" + dArtist + ".mp3";
if (DownloadOne(aLinks[0], path, false))
{
hTimmer.Start();
Downloading = true;
}
}
catch (Exception Ex)
{
MessageBox.Show("Download start error: " + Ex.Message);
}
}
else if (Downloading)
{
try {
int actualBytes = strm.Read(barr, 0, arrSize);
fs.Write(barr, 0, actualBytes);
bytesCounter += actualBytes;
double percent = 0d;
if (fileLength > 0)
percent =
100.0d * bytesCounter /
(preloadedLength + fileLength);
label1.Text = Math.Round(percent) + "%";
if (Math.Round(percent) >= 100)
{
string path = mp3Path + "\\" + dArtist + ".mp3";
label1.Text = "";
dPath = path;
aLinks.Clear();
hTimmer.Stop();
hTimmer.Reset();
fs.Flush();
fs.Close();
lastArtistName = "N/A";
Downloading = false;
y++;
if (y >= DoubleDimList[i].Count)
{
i++;
}
}
if (Math.Round(percent) <= 1)
{
if (hTimmer.ElapsedMilliseconds >= 3000)
{
string path = mp3Path + "\\" + dArtist + ".mp3";
hTimmer.Stop();
hTimmer.Reset();
fs.Flush();
fs.Close();
System.IO.File.Delete(path);
Contain = false;
skip += 1;
Downloading = false;
}
} }
catch(Exception Ex)
{
MessageBox.Show("Downloading error: " + Ex.Message);
}
}
}
Now once the exception is thrown it messes up the entire project. As you see in the last method, if _InetGetHTMLSearch doesn't update the search(returns nothing) I am skipping and moving to next search. However the exception will be thrown in every next search. I tried setting new cookies in every search but still didn't work.
Any solutions how to avoid this issue?
P.S. I have to say that if I change the timer's Interval to 500ms it will download more mp3 before the exception is thrown but not all of them.
Edit: The issue here is obvious. The request timesout but even if I set it to Timeout.Infinite it will hand there forever

Oracle Connection and TCP Client/Server Connection at same time

I am writing a program that will live on a "Super PC" in my lab at work. Its job is to constantly query our customers databases proactively looking for common errors that we encounter.
It accomplishes this by using an adjustable timer and simply running down the list of queries and databases and interpreting the results.(Queries and Database connections are added using a Configuration UI)
This program has a TCP client/server connection with another app that I have written that lives on my team members personal machines. Messages get sent from the Server(Query) program to the client program alerting my team of errors found in the Databases.
The problem I keep encountering is occasionally a message gets sent through the socket at the exact same time a DB connection is made or a query is run and it causes the server program to crash with no explanation.
The methods that run the queries is always called in its own thread as well as the server connection is made in its own thread.
In addition I have placed all of the DB methods and Client Server methods in Try/Catch blocks that are setup to catch all exceptions and display the stack trace, but for some reason when it crashes its not hitting any of the catch.
The runQueries Methods is called off a C# .Net System.Timers.Timer which is supposed to launch a new thread on every tick.
Below are my Client/Server Methods and Query Methods
private void serverMeth()
{
try
{
/*conbox.Text = conbox.Text + "The server is running at port 8001..." + Environment.NewLine;
conbox.Text = conbox.Text + "The local End point is :" + myList.LocalEndpoint + Environment.NewLine;
conbox.Text = conbox.Text + "Waiting for a connection....." + Environment.NewLine;*/
msglist.Add("The server is running at port 8001...");
msglist.Add("The local end point is: " + myList.LocalEndpoint);
msglist.Add("Waiting for a connection...");
writeToLog("Listening for connection at "+myList.LocalEndpoint);
/* Start Listeneting at the specified port */
while (true)
{
gatherErrors();
myList.Start();
//Socket s = myList.AcceptSocket();
s = myList.AcceptSocket();
//conbox.Text = conbox.Text + "Connection accepted from " + s.RemoteEndPoint + Environment.NewLine;
msglist.Add("Connection accepted from " + s.RemoteEndPoint);
writeToLog("Connection Established #" + myList.LocalEndpoint);
byte[] b = new byte[20000];
int k = s.Receive(b);
//conbox.Text = conbox.Text + "Recieved..." + Environment.NewLine;
msglist.Add("Recieved...");
writeToLog("Message Recieved from Command Center");
//for (int i = 0; i < k; i++)
//{ conbox.Text = conbox.Text + Convert.ToChar(b[i]); }
string message = ""; //allows it to store the entire message in one line
string dt = DateTime.Now.ToString("MM-dd-yyyy hh:mm:ss");
string eid = "TEST ID";
for (int i = 0; i < k; i++)
{ message = message + Convert.ToChar(b[i]); }
if (message.Contains(uniquekey))
{
writeToLog("Message contains correct passcode");
message = message.Replace(uniquekey, "");
String[] splits = message.Split(',');
message = message.Replace("," + splits[1], "");
writeToLog("Connection from " + splits[1]);
msglist.Add(message); //this takes the completed message and adds it.
//DO IGNORE STUF HERE
if (!message.Equals(""))
{
//Message contains error key
Error erro = null;
for (int i = 0; i < errorss.Count; i++)
{
if (errorss[i].getKey().Equals(message)) {
erro = errorss[i];
}
}
Stage st = null;
if (erro != null)
{
for (int i = 0; i < stages.Count; i++)
{
if (stages[i].errContainCheck(erro))
{
st = stages[i];
}
}
}
if (st != null)
{
st.addIgnore(erro);
writeToLog("Error: " + erro.getKey() + "ignored");
}
}
//conbox.Text = conbox.Text + Environment.NewLine;
msglist.Add(" ");
string msg = "";
string log = "Error(s): ";
for (int i = 0; i < errorss.Count; i++)
{
msg += errorss[i].getTime() + "|"+errorss[i].getMsg()+"|" + errorss[i].getSite()+"|"+errorss[i].getKey();
msg += ",";
log += errorss[i].getKey()+",";
}
log+= "sent to Command Center";
ASCIIEncoding asen = new ASCIIEncoding();
s.Send(asen.GetBytes(msg));
//conbox.Text = conbox.Text + "\nSent Acknowledgement" + Environment.NewLine;
msglist.Add("\nSent Acknowledgement");
writeToLog(log);
}
else
{
message = "Unauthorized access detected. Disregarding message.";
//.Add(message); //this takes the completed message and adds it.
//conbox.Text = conbox.Text + Environment.NewLine;
msglist.Add(" ");
writeToLog("Passcode mismatch");
ASCIIEncoding asen = new ASCIIEncoding();
s.Send(asen.GetBytes("Access Denied. Unique key mismatch."));
//conbox.Text = conbox.Text + "\nSent Acknowledgement" + Environment.NewLine;
msglist.Add("\nSent Denial Acknowledgement");
}
/* clean up */
s.Close();
myList.Stop();
if (quit == true)
{
break;
}
}
}
catch (Exception err)
{
//conbox.Text = conbox.Text + "Error..... " + err.StackTrace + Environment.NewLine;
msglist.Add("Error..... " + err.StackTrace);
writeToLog("ERROR: "+err.StackTrace);
}
}
private void sasDownload()
{
int selItemList;
selItemList = saerrlist.SelectedIndex;
saerrlist.Items.Clear(); //clears the list before redownload
saerrgrid.Rows.Clear();
try
{
TcpClient tcpclnt = new TcpClient();
//clibox.Text = clibox.Text + "Connecting....." + Environment.NewLine;
tcpclnt.Connect(staralertip.Text, 8001);
// use the ipaddress as in the server program
//clibox.Text = clibox.Text + "Connected" + Environment.NewLine;
//clibox.Text = clibox.Text + "Enter the string to be transmitted : " + Environment.NewLine;
String str = "982jsdf293jsadd02jkdas20dka2";
Stream stm = tcpclnt.GetStream();
if (ackClick)
{
str += ackKey;
ackClick = false;
}
String name = "User not found";
for (int i = 0; i < users.Count(); i++)
{
if(users[i].sys_id.Equals(SNLists.loggedinuser)){
name = users[i].name;
break;
}
}
str += "," + name;
ASCIIEncoding asen = new ASCIIEncoding();
byte[] ba = asen.GetBytes(str);
//clibox.Text = clibox.Text + "Transmitting....." + Environment.NewLine;
stm.Write(ba, 0, ba.Length);
byte[] bb = new byte[20000];
int k = stm.Read(bb, 0, 20000);
string incmsg = "";
for (int i = 0; i < k; i++)
incmsg = incmsg + Convert.ToChar(bb[i]);
string tempmsg = "";
foreach (char c in incmsg)
{
if (c != ',')
{
tempmsg = tempmsg + c;
}
else if (c == ',')
{
saerrlist.Items.Add(tempmsg);
saerrgrid.Rows.Add(tempmsg.Split('|')[0], tempmsg.Split('|')[1], tempmsg.Split('|')[2], tempmsg.Split('|')[3]);
tempmsg = "";
}
}
saerrlist.Items.Add(tempmsg);
//saerrgrid.Rows.Add(tempmsg.Split('|')[0], tempmsg.Split('|')[1], tempmsg.Split('|')[2]);
//MessageBox.Show(incmsg);
tcpclnt.Close();
}
catch (Exception err)
{
//MessageBox.Show("Error..... " + err.StackTrace, "STAR Command Center: Connectivity Error");
staralertTimer.Enabled = false;
MessageBox.Show("Error downloading recent errors from STAR Alert System.\n\nPlease confirm STAR Alert is running " +
"and hit \"Refresh\" in the STAR Alert tab." + "\n" + err, "STAR Command Center: Connectivity Error");
}
saerrlist.SelectedIndex = selItemList;
}
public void runQueries()
{
OracleConnection con = new OracleConnection();
int siteNum = -1;
try
{
writeToLog("Call to runQueries");
List<List<Error>> results = new List<List<Error>>();
for (int i = 0; i < ppreset.getSites().Count(); i++)
{
siteNum = i;
Site s = ppreset.getSites()[i];
if (s.getStatus().Equals("ACTIVE"))
{
//OracleConnection con = new OracleConnection();
String it = "User Id=" + s.getLogin() + ";Password=" + s.getPassword() + ";Data Source=" + s.getDbName();
con.ConnectionString = it;
//con.OpenAsync();
con.Open();
writeToLog("Connection opened for site: " + s.getSite());
List<Error> subResults = new List<Error>();
for (int j = 0; j < s.getQueries().Count(); j++)
{
Query q = s.getQueries()[j];
string sql = q.getCode();
List<string> columns = getColumns(sql);
List<List<String>> mast = new List<List<String>>();
for (int m = 0; m < columns.Count(); m++)
{
mast.Add(new List<String>());
}
OracleCommand cmd = new OracleCommand(sql, con);
cmd.CommandType = CommandType.Text;
OracleDataReader dr = cmd.ExecuteReader();
writeToLog("Execute SQL, Begin Reading results...");
// dr.Read();
//List<String> results = new List<String>();
if (dr.HasRows)
{
//MessageBox.Show("has rows");
while (dr.Read())
{
//string result = "";
for (int p = 0; p < columns.Count(); p++)
{
if (columns.Count() != 0)
{
mast[p].Add(dr[columns[p]].ToString());
}
}
//results.Add(result);
}
}
subResults.AddRange(ruleCheck(mast, q.getRules(), s.getQueries()[j], ppreset.getSites()[i].getSite()));
cmd.Dispose();
writeToLog("Done reading");
}
results.Add(subResults);
// con.Dispose();
con.Close();
writeToLog("Connection Closed for site: " + s.getSite());
}
}
//we now have all of the error strings gathered from running all queries on all sites in the given preset
//lets send them to a method that will change the status' of the modules and makem RAVVEEEEE
//update(results);
if (errors == null && results.Count != 0)
{
//MessageBox.Show("errors = results");
errors = results;
writeToLog("First error found...errors = results");
for (int i = 0; i < results.Count; i++)
{
for (int j = 0; j < results[i].Count; j++)
{
if (foncig.lallerdisc == true)
{
sendLyncMsg(results[i][j].getMsg());
writeToLog("Lync Msg sent");
}
}
}
}
else
{
for (int i = 0; i < results.Count; i++)
{
for (int j = 0; j < results[i].Count; j++)
{
errors[i].Add(results[i][j]);
writeToLog("Error: " + results[i][j].getKey() + " added");
// MessageBox.Show("Error added to errors");
//////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
//LYNC STUFF CAN GO HERE///////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////
/////FOR EACH ERROR ADDED TO MASTER LIST SEND LYNC///////////////////////////
//////////////////////////////////////////////////////////////////////////////
if (foncig.lallerdisc == true)
{
sendLyncMsg(results[i][j].getMsg());
writeToLog("Lync msg sent");
}
}
}
}
}
catch (Exception e)
{//MessageBox.Show("Err: " + e);
badConn[siteNum] += "x";
writeToLog("Connection error strike added to " + ppreset.getSites()[siteNum].getSite());
con.Close();
checkForBadConn();
writeToLog( e.StackTrace);
}
//foncig.errors = errors;
//here we will check all of the errors in results against all of the errors in errors
//if the error is not in errors add it
//errors = results;
}
Query timer initialization
queryTimer = new System.Timers.Timer(120000);
queryTimer.Elapsed += queryTimer_Tick;
queryTimer.Enabled = true;
Tick method for query timer
private void queryTimer_Tick(object sender, EventArgs e)
{
GC.Collect();
try
{
runQueries();
}
catch (OracleException err)
{
// MessageBox.Show("Err: " + err);
}
}
How the server thread is started
server = new Thread(serverMeth);
server.Start();
Does anyone have any idea why this might be happening? Both programs work as described and do what they are supposed to its just when more and more clients begin connecting to the server it becomes more and more likely that a TCP connection and DB connection will occur at the same time.
Update 12:20 9/15/14
So I'm trying to upload a pic of the log from the last crash... but i don't have enough rep points... derp, anyway
This time the last line in the log is from a log purge(deletes log entries older than 24 hours)
However the program will only crash when I have the client program set to auto refresh. IE Client app has a button to request messages from server or user can select auto-refresh which sets request function to timer.
My only other thought is that multiple threads are trying to write to the log file at the same time, however my write to log method uses writeLinesAsync() so that shouldn't be a problem?
This issue turned out to be caused by multiple threads trying to write to the log file at the same time.
I resolved it by following #user469104's advice and creating an internal lock object to lock the writeToLog methods.
Both client and server apps have been running for multiple days now without breaking.
Thanks Everyone!

Categories