my requirement was reading some 300 text files each of around 40-70 MB line by line to perform some kind of checks . As the files are huge so I thought of using TPL concept i.e. tasks. if I am not using task concept, it is taking around 7 minutes but if I am doing it by using task , it is taking long time don't know why. I will be so thankful if someone please review my code and tell me where I am doing wrong in using task. Any help would be appreciated. Below is my code :
private void browse_Click(object sender, EventArgs e)
{
try
{
string newFileName1 = "";
string newFileName2 = "";
week = textBox2.Text;
if (week == null || week == "")
{
MessageBox.Show("Week cannot be null.");
return;
}
DialogResult result = folderBrowserDialog1.ShowDialog();
if (result == DialogResult.OK)
{
DateTime starttime = DateTime.Now;
string folderPath = Path.GetDirectoryName(folderBrowserDialog1.SelectedPath);
string folderName = Path.GetFileName(folderPath);
DirectoryInfo dInfo = new DirectoryInfo(folderPath);
foreach (DirectoryInfo folder in dInfo.GetDirectories())
{
newFileName1 = "Files_with_dates_mismatching_the_respective_week_" + folder.Name + ".txt";
newFileName2 = "Files_with_wrong_date_format_" + folder.Name + ".txt";
if (File.Exists(folderPath + "/" + newFileName1))
{
File.Delete(folderPath + "/" + newFileName1);
}
if (File.Exists(folderPath + "/" + newFileName2))
{
File.Delete(folderPath + "/" + newFileName2);
}
FileInfo[] folderFiles = folder.GetFiles();
if (folderFiles.Length != 0)
{
List<Task> tasks = new List<Task>();
foreach (var file in folderFiles)
{
var task = Task.Factory.StartNew(() =>
{
bool taskResult = ReadFile(file.FullName, folderPath, folderName, week);
return taskResult;
});
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
DateTime stoptime = DateTime.Now;
TimeSpan totaltime = stoptime.Subtract(starttime);
label6.Text = Convert.ToString(totaltime);
textBox1.Text = folderPath;
DialogResult result2 = MessageBox.Show("Read the files successfully.", "Important message", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
}
}
}
catch (Exception)
{
throw;
}
}
public bool ReadFile(string file, string folderPath, string folderName, string week)
{
int LineCount = 0;
string fileName = Path.GetFileNameWithoutExtension(file);
using (FileStream fs = File.Open(file, FileMode.Open))
using (BufferedStream bs = new BufferedStream(fs))
using (StreamReader sr = new StreamReader(bs))
{
for (int i = 0; i < 2; i++)
{
sr.ReadLine();
}
string oline;
while ((oline = sr.ReadLine()) != null)
{
LineCount = ++LineCount;
string[] eachLine = oline.Split(';');
string date = eachLine[30].Substring(1).Substring(0, 10);
DateTime dt;
bool valid = DateTime.TryParseExact(date, "dd/MM/yyyy", CultureInfo.InvariantCulture, DateTimeStyles.None, out dt);
if (!valid)
{
Filecount = ++Filecount;
StreamWriter sw = new StreamWriter(folderPath + "/" + "Files_with_wrong_date_format_" + folderName + ".txt", true);
sw.WriteLine(fileName + " " + "--" + " " + "Line number :" + " " + LineCount);
sw.Close();
}
else
{
DateTime Date = DateTime.ParseExact(date, "d/M/yyyy", CultureInfo.InvariantCulture);
int calculatedWeek = new GregorianCalendar(GregorianCalendarTypes.Localized).GetWeekOfYear(Date, CalendarWeekRule.FirstFourDayWeek, DayOfWeek.Saturday);
if (calculatedWeek == Convert.ToInt32(week))
{
}
else
{
Filecount = ++Filecount;
StreamWriter sw = new StreamWriter(folderPath + "/" + "Files_with_dates_mismatching_the_respective_week_" + folderName + ".txt", true);
sw.WriteLine(fileName + " " + "--" + " " + "Line number :" + " " + LineCount);
sw.Close();
}
}
}
}
return true;
}
Since you are using Task.WaitAll(tasks.ToArray()). Wait will synchronously block until the task completes. But can you try with await (Not in TPL), await will asynchronously wait until the task completes
Related
I have been trying to organize a code that it is a mess! The first and my biggest problem at this point is that one of my StreamWriters or StreamReader is being left open. Using this link, I am trying to organize my code. But my problem is that I am not sure where should I close it:
My code is:
public static void ProcessFile(string[] ProcessFile, int id_customer, string directoryinprocess)
{
StreamWriter Writer = null, Writer2 = null, Writer3 = null;
foreach (string filename in ProcessFile)
{
// Used for the output name of the file
var dir = Path.GetDirectoryName(filename);
var fileName = Path.GetFileNameWithoutExtension(filename);
var ext = Path.GetExtension(filename);
var folderbefore = Path.GetFullPath(Path.Combine(dir, #"..\"));
int rowCount = 0;
string path_body_out = "";
string outputname = folderbefore + "output_temp\\" + fileName;
if (filename.Contains("RO_"))
{
Writer = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext) { AutoFlush = true };
Writer2 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_body_out" + ext) { AutoFlush = true };
path_body_out = dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext;
} // end of if
else
{
Writer3 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_out" + ext) { AutoFlush = true };
} // end of else
using (StreamReader Reader = new StreamReader(#filename))
{
while (!Reader.EndOfStream)
{
string inputLine = string.Empty;
inputLine = Reader.ReadLine();
rowCount++;
if (filename.Contains("RO_"))
{
if (rowCount <= 4)
{
Writer.WriteLine(inputLine);
}
if (rowCount >= 5)
{
Writer2.WriteLine(inputLine);
}
}
else
{
{ Writer3.WriteLine(inputLine); }
}
} // end of the while
} // end of using Stremreader
if (path_body_out.Contains("_hd_intermediate"))
{
ManipulateHeaderFilesTypeRo(dir, path_body_out);
}
else
{ }
} // end of the foreach
string[] extensions = { "_fv", "_body", "_out" };
string[] fileEntriesout = System.IO.Directory.EnumerateFiles(directoryinprocess, "*.csv", System.IO.SearchOption.AllDirectories)
.Where(file => extensions.Any(ex => Path.GetFileNameWithoutExtension(file).EndsWith(ex)))
.ToArray();
foreach (string filenameout in fileEntriesout)
{
string destinytablename = null;
if (filenameout.Contains("_hd_intermediate_fv"))
{ destinytablename = "TBL_DATA_TYPE_RO_HEADER"; }
else if (filenameout.Contains("_body_out"))
{ destinytablename = "TBL_DATA_TYPE_RO_BODY"; }
else
{ destinytablename = "TBL_DATA_TYPE_LOAD"; }
string id_file = Get_id_file(filenameout, id_customer);
DataTable csvFileData = GetDataTabletFromCSVFile(filenameout, id_file);
InsertDataIntoSQLServerUsingSQLBulkCopy(csvFileData, destinytablename);
} // end of the foreach
//} // end of the foreach
} // end of ProcessFile
Question:
How should I close the part:
if (filename.Contains("RO_"))
{
Writer = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext) { AutoFlush = true };
Writer2 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_body_out" + ext) { AutoFlush = true };
path_body_out = dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext;
} // end of if
else
{
Writer3 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_out" + ext) { AutoFlush = true };
} // end of else
using (StreamReader Reader = new StreamReader(#filename))
{
while (!Reader.EndOfStream)
{
string inputLine = string.Empty;
inputLine = Reader.ReadLine();
rowCount++;
if (filename.Contains("RO_"))
{
if (rowCount <= 4)
{
Writer.WriteLine(inputLine);
}
if (rowCount >= 5)
{
Writer2.WriteLine(inputLine);
}
}
else
{
{ Writer3.WriteLine(inputLine); }
Should I close here?
if (filename.Contains("RO_"))
{
Writer = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext) { AutoFlush = true };
Writer2 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_body_out" + ext) { AutoFlush = true };
path_body_out = dir + "\\" + "output_temp\\" + fileName + "_hd_intermediate" + ext;
} // end of if
else
{
Writer3 = new StreamWriter(dir + "\\" + "output_temp\\" + fileName + "_out" + ext) { AutoFlush = true };
} // end of else
Or here?
if (filename.Contains("RO_"))
{
if (rowCount <= 4)
{
Writer.WriteLine(inputLine);
}
if (rowCount >= 5)
{
Writer2.WriteLine(inputLine);
}
}
else
{
{ Writer3.WriteLine(inputLine); }
}
If you can't reorganize this code so that every StreamWriter instance can be wrapped in a using(), then perhaps you can do something like this:
StreamWriter Writer = null, Writer2 = null, Writer3 = null;
try
{
// your existing code
}
catch
{
// Handle
}
finally
{
if (Writer != null)
Writer.Close();
if (Writer2 != null)
Writer2.Close();
if (Writer3 != null)
Writer3.Close();
}
This ensures that no matter what error(s) happen within the try that your writers will be closed.
In my opinion, conditionally instantiating objects is a smell and you should work on having different implementations based on filename.Contains("RO_"). You could use the strategy pattern and have different file processor interface implementations, choosing the correct one based on the filename. Each implementation would only know how to write to the locations it needs. This would allow you to correctly use a using() around each writer.
Nomrally if you are using disposable objects, I would say use a using block. However, since you are conditionally instatiating disposable objects, I think the use of a try-finally block would be your best bet.
Declare disposable objects and intialize them to null outside of a try block.
Initialize the disposable objects to the instances you want inside of a try block. Take care not to change this reference anywhere inside of your try-block once you have created a disposable object.
Also inside of your try block, do everything you need to do with the disposable objects.
After your try block create a finally block (a catch block is optional, but you will need a finally block for this method to do its job.) and inside the finally block, check if the variables you declared to hold the disposable objects aren't null. and if they are not null, close them and make them null.
StreamWriter writer = null;
try {
if (condA) {
writer = new StreamWriter("filePath1");
} else if (condB) {
writer = new StreamWriter("filePath2");
} else {
writer = new StreamWriter("filePath3");
}
// do things with writer
} catch (Exception ex) {
} finally {
if (writer != null) {
writer.close();
writer = null;
}
}
So, I am trying to create a file at a specific path but the code I have doesn't allows me to create folders.
This is the code I have:
public void LogFiles()
{
string data = string.Format("LogCarga-{0:yyyy-MM-dd_hh-mm-ss}.txt", DateTime.Now);
for (int linhas = 0; linhas < dataGridView1.Rows.Count; linhas++)
{
if (dataGridView1.Rows[linhas].Cells[8].Value.ToString().Trim() != "M")
{
var pathWithEnv = #"%USERPROFILE%\AppData\Local\Cargas - Amostras\_logs\";
var filePath = Environment.ExpandEnvironmentVariables(pathWithEnv);
using (FileStream fs = new FileStream(filePath, FileMode.OpenOrCreate))
{
using (StreamWriter writer = File.AppendText(filePath + data))
{
string carga = dataGridView1.Rows[linhas].Cells[0].Value.ToString();
string referencia = dataGridView1.Rows[linhas].Cells[1].Value.ToString();
string quantidade = dataGridView1.Rows[linhas].Cells[2].Value.ToString();
string dataemissao = dataGridView1.Rows[linhas].Cells[3].Value.ToString();
string linha = dataGridView1.Rows[linhas].Cells[4].Value.ToString();
string marca = dataGridView1.Rows[linhas].Cells[5].Value.ToString().Trim();
string descricaoweb = dataGridView1.Rows[linhas].Cells[6].Value.ToString().Trim();
string codprod = dataGridView1.Rows[linhas].Cells[7].Value.ToString().Trim();
string tipoemb = dataGridView1.Rows[linhas].Cells[8].Value.ToString().Trim();
string nomepc = System.Environment.MachineName;
writer.WriteLine(carga + ", " + referencia + ", " + quantidade + ", " + dataemissao + ", " + linha + ", " + marca + ", " + descricaoweb + ", " + codprod + ", "
+ tipoemb + ", " + nomepc);
}
}
}
}
}
This %USERPROFILE%\AppData\Local\ in the universal path and I want to automatically create the \Cargas - Amostras\_logs\.
Do you have any idea how to do it?
The simpelest solution is replace
using (FileStream fs = new FileStream(filePath, FileMode.OpenOrCreate))
with
System.IO.Directory.CreateDirectory(filePath)
That will create the directory if it does not exist or do nothing if it does.
You need to create two checks, for your first folder and then second directory.
var pathWithEnv = #"%USERPROFILE%\AppData\Local\Cargas - Amostras\";
if (System.IO.Directory.Exists(pathWithEnv))
{
pathWithEnv = System.IO.Path.Combine(pathWithEnv, #"_logs\");
if (System.IO.Directory.Exists(pathWithEnv))
{
//Do what you want to do, both directories are found.
}
else
{
System.IO.Directory.CreateDirectory(pathWithEnv);
//Do what you want to do, both directories are available.
}
}
else
{
System.IO.Directory.CreateDirectory(pathWithEnv);
pathWithEnv = System.IO.Path.Combine(pathWithEnv, #"_logs\");
if (System.IO.Directory.Exists(pathWithEnv))
{
//Do what you want to do, both directories are available now.
}
else
{
System.IO.Directory.CreateDirectory(pathWithEnv);
//Do what you want to do, both directories are created.
}
}
I'm developing an application that reads a xml file folder , and each file it to do some checks and copied to a new folder based on some criteria.
But memory usage continues to grow when it arrives in the foreach loop , and I believe that should not happen , because the variables do not increase at each iteration , are only overwritten.
Here is my code:
using System;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Windows.Forms;
using System.Xml;
namespace XMLOrganizer
{
public partial class Form1 : Form
{
string selectedFolder;
public Form1()
{
InitializeComponent();
comboBox1.DropDownStyle = ComboBoxStyle.DropDownList;
comboBox1.SelectedIndex = 0;
}
private void button1_Click(object sender, EventArgs e)
{
folderBrowserDialog1.ShowDialog();
selectedFolder = folderBrowserDialog1.SelectedPath;
organizeBtn.Enabled = true;
}
private void organizeBtn_Click(object sender, EventArgs e)
{
if (comboBox1.SelectedIndex == -1)
{
MessageBox.Show("Selecione o tipo de nota", "Erro!", MessageBoxButtons.OK, MessageBoxIcon.Exclamation,
MessageBoxDefaultButton.Button1);
return;
}
if (comboBox1.SelectedIndex != 2)
{
OrganizeXml(label2, selectedFolder, comboBox1);
}
//ORGANIZAR LOTES
else
{
string folder = selectedFolder;
label2.Text = "Arquivos sendo processados, aguarde...";
label2.Refresh();
string[] files = Directory.GetFiles(folder, "*.xml", SearchOption.AllDirectories);
int atualFile = 1, totalXML = files.Length;
foreach (string file in files)
{
XmlDocument xmlDocument = new XmlDocument();
xmlDocument.Load(file);
XmlNodeList enviNFe = xmlDocument.GetElementsByTagName("enviNFe");
string versao = ((XmlElement)enviNFe[0]).Attributes["versao"].Value;
XmlNodeList NFe = ((XmlElement)enviNFe[0]).GetElementsByTagName("NFe");
Directory.CreateDirectory(selectedFolder + #"\NOTAS");
label2.Text = "Processando arquivo " + atualFile + " de " + totalXML;
string notaXML;
foreach (XmlElement nota in NFe)
{
notaXML = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><nfeProc versao=\"" + versao + "\" xmlns=\"http://www.portalfiscal.inf.br/nfe\">" + nota.OuterXml + "</nfeProc>";
XmlNodeList infNFe = nota.GetElementsByTagName("infNFe");
string chave = infNFe[0].Attributes["Id"].Value.Replace("NFe", "");
File.WriteAllText(selectedFolder + "\\NOTAS\\" + chave + ".xml", notaXML);
}
}
OrganizeXml(label2, selectedFolder + "\\NOTAS", comboBox1);
}
}
private static void OrganizeXml(Label label2, string selectedFolder, ComboBox comboBox1)
{
string folderMove = String.Empty;
string folder = selectedFolder;
label2.Text = "Arquivos sendo processados, aguarde...";
label2.Refresh();
string[] files = Directory.GetFiles(folder, "*.xml", SearchOption.AllDirectories);
int i = 1, arquivos = files.Length;
Directory.CreateDirectory(folder + #"\ORGANIZADO");
if (comboBox1.SelectedIndex != 2)
{
Directory.CreateDirectory(folder + #"\ORGANIZADO\OUTROS");
Directory.CreateDirectory(folder + #"\ORGANIZADO\LOTES");
}
foreach (string file in files)
{
XmlDocument xmlDocument = new XmlDocument();
try
{
xmlDocument.Load(file);
if (xmlDocument.DocumentElement.Name != "nfeProc")
{
XmlNodeList NFe = xmlDocument.GetElementsByTagName("NFe");
var nota = ((XmlElement) NFe[0]);
if (nota != null)
{
XmlNodeList infNFe = ((XmlElement) NFe[0]).GetElementsByTagName("infNFe");
string chave = infNFe[0].Attributes["Id"].Value.Replace("NFe", "");
string versao = infNFe[0].Attributes["versao"].Value;
string notaXML = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><nfeProc versao=\"" + versao +
"\" xmlns=\"http://www.portalfiscal.inf.br/nfe\">" + nota.OuterXml +
"</nfeProc>";
string dirNote = Path.GetDirectoryName(file);
File.WriteAllText(dirNote + "\\fix_" + chave + ".xml", notaXML);
}
}
//
//
//
}
catch (XmlException)
{
XmlDocument doc = new XmlDocument();
string arquivo = ReadFileToString(file);
arquivo = RemoveSpecialCharacters(arquivo);
if (arquivo == "")
{
File.Move(file, folder + #"\ORGANIZADO\OUTROS\corrupt_" + Path.GetFileName(file));
continue;
}
try
{
doc.LoadXml(arquivo);
doc.PreserveWhitespace = true;
doc.Save(file);
}
catch (XmlException)
{
File.Move(file, folder + #"\ORGANIZADO\OUTROS\corrupt_" + Path.GetFileName(file));
files = files.Where(f => f != file).ToArray();
}
}
}
foreach (string file in files)
{
string arquivoLoad = file;
XmlDocument xmlDocument = new XmlDocument();
xmlDocument.Load(arquivoLoad);
XmlNodeList NFe = xmlDocument.GetElementsByTagName("NFe");
XmlNodeList enviNFe = xmlDocument.GetElementsByTagName("enviNFe");
if (NFe.Count == 0)
{
if (File.Exists(folder + #"\ORGANIZADO\OUTROS\no_NFe_" + Path.GetFileName(arquivoLoad)))
{
Random rnd = new Random();
File.Copy(arquivoLoad,
folder + #"\ORGANIZADO\OUTROS\no_NFe_" + rnd.Next(1, 5000) + Path.GetFileName(arquivoLoad));
}
else
{
File.Copy(arquivoLoad, folder + #"\ORGANIZADO\OUTROS\no_NFe_" + Path.GetFileName(arquivoLoad));
}
continue;
}
XmlNodeList infNFe = ((XmlElement)NFe[0]).GetElementsByTagName("infNFe");
string chave = infNFe[0].Attributes["Id"].Value.Replace("NFe", "");
if (xmlDocument.DocumentElement.Name != "nfeProc")
{
File.Move(arquivoLoad, folder + #"\ORGANIZADO\OUTROS\no_nfeProc_" + Path.GetFileName(arquivoLoad));
arquivoLoad = Path.GetDirectoryName(file) + "\\fix_" + chave + ".xml";
}
if (enviNFe.Count > 0)
{
if (File.Exists(folder + #"\ORGANIZADO\LOTES\" + Path.GetFileName(arquivoLoad)))
{
Random rnd = new Random();
File.Copy(arquivoLoad, folder + #"\ORGANIZADO\LOTES\" + rnd.Next(1, 5000) + Path.GetFileName(arquivoLoad));
}
else
{
File.Copy(arquivoLoad, folder + #"\ORGANIZADO\LOTES\" + Path.GetFileName(arquivoLoad));
}
continue;
}
//XmlNodeList infNFe = ((XmlElement)NFe[0]).GetElementsByTagName("infNFe");
XmlNodeList ide = ((XmlElement)infNFe[0]).GetElementsByTagName("ide");
string tpNF = ((XmlElement)ide[0]).GetElementsByTagName("tpNF")[0].InnerText;
//if (tpNF == "0") continue;
XmlNodeList emit = ((XmlElement)infNFe[0]).GetElementsByTagName("emit");
string emitInfoCod;
if (((XmlElement)emit[0]).GetElementsByTagName("CNPJ").Count > 0)
{
emitInfoCod = ((XmlElement)emit[0]).GetElementsByTagName("CNPJ")[0].InnerText;
}
else if (((XmlElement)emit[0]).GetElementsByTagName("CPF").Count > 0)
{
emitInfoCod = ((XmlElement)emit[0]).GetElementsByTagName("CPF")[0].InnerText;
}
else
{
emitInfoCod = "0";
}
string ide_dEmi = (((XmlElement)ide[0]).GetElementsByTagName("dEmi").Count > 0)
? ((XmlElement)ide[0]).GetElementsByTagName("dEmi")[0].InnerText
: ((XmlElement)ide[0]).GetElementsByTagName("dhEmi")[0].InnerText;
string[] data = ide_dEmi.Split('-');
string folderName = data[0] + "\\" + data[1];
string organizeStyle = String.Empty;
if (comboBox1.SelectedIndex == 0 || comboBox1.SelectedIndex == 2)
{
organizeStyle = folder + #"\ORGANIZADO\" + emitInfoCod + #"\" + folderName;
}
else
{
organizeStyle = folder + #"\ORGANIZADO\" + folderName + #"\" + emitInfoCod;
}
if (!Directory.Exists(organizeStyle))
{
Directory.CreateDirectory(organizeStyle);
}
folderMove = organizeStyle + "\\";
if (!File.Exists(folderMove + chave + ".xml"))
{
File.Copy(arquivoLoad, folderMove + chave + ".xml");
}
label2.Text = "Arquivos sendo processados, aguarde... (" + i + " / " + arquivos + ")";
label2.Refresh();
i++;
}
label2.Text = "Notas organizadas com sucesso!";
label2.Refresh();
}
public static string ReadFileToString(string filePath)
{
using (StreamReader streamReader = new StreamReader(filePath))
{
string text = streamReader.ReadToEnd();
streamReader.Close();
return text;
}
}
public static string RemoveSpecialCharacters(string str)
{
return Regex.Replace(str, #"[^\u0000-\u007F]", string.Empty);
}
private void exitBtn_Click(object sender, EventArgs e)
{
Application.Exit();
}
}
}
How can I determine what 's going on?
The value of a variable pointing to a reference type is not the object at all, its just the memory address where the object lives.
So when you do the following:
while (true)
{
var myVariable = new MyReferenceType();
}
The only memory you are really reusing is the variable itself (think of a 32 or 64 bit pointer). But on every iteration your are allocating somewhere in memory space enough to fit the new object you've just created and that memory is most definitely not the memory reserved to the previous object.
This is essentially why your memory usage is growing. The "old" objects of previous iterations with no live reference will eventually get collected by the GC, but that could be never if the GC decides that it has enough memory available to avoid it.
I am reading around 300 txt files using the task concept. My requirement is to read each file , pick the date from every line , check in which week it comes and then compare it with the folder name which are actually the week names (like 41 , 42) whether they both are same or not. if not then write that file name and the line number. As the number of files and the size of files is huge , I am trying to use task concept to that I can speed up the process. Below is my code.
Any help would be appreciated. Thanks in advance.
private void browse_Click(object sender, EventArgs e)
{
try
{
string newFileName = "";
DialogResult result = folderBrowserDialog1.ShowDialog();
if (result == DialogResult.OK)
{
DateTime starttime = DateTime.Now;
string folderPath = Path.GetDirectoryName(folderBrowserDialog1.SelectedPath);
DirectoryInfo dInfo = new DirectoryInfo(folderPath);
string[] Allfiles = Directory.EnumerateFiles(folderPath, "*.txt", SearchOption.AllDirectories).ToArray();
foreach (DirectoryInfo folder in dInfo.GetDirectories())
{
newFileName = "Files_with_duplicate_TGMKT_Names_in_child_Folder_" + folder.Name + ".txt";
if (File.Exists(folderPath + "/" + newFileName))
{
File.Delete(folderPath + "/" + newFileName);
}
FileInfo[] folderFiles = folder.GetFiles();
if (folderFiles.Length != 0)
{
List<Task> tasks = new List<Task>();
foreach (var file in folderFiles)
{
var task = Task.Factory.StartNew(() =>
{
bool taskResult = ReadFile(file.FullName,folderPath,newFileName);
return taskResult;
});
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
DateTime stoptime = DateTime.Now;
TimeSpan totaltime = stoptime.Subtract(starttime);
label6.Text = Convert.ToString(totaltime);
textBox1.Text = folderPath;
DialogResult result2 = MessageBox.Show("Read the files successfully.", "Important message", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
}
}
}
catch (Exception)
{
throw;
}
}
public bool ReadFile(string file , string folderPath , string newFileName)
{
int LineCount = 0;
string fileName = Path.GetFileNameWithoutExtension(file);
using (FileStream fs = File.Open(file, FileMode.Open))
using (BufferedStream bs = new BufferedStream(fs))
using (StreamReader sr = new StreamReader(bs))
{
for (int i = 0; i < 2; i++)
{
sr.ReadLine();
}
string oline;
while ((oline = sr.ReadLine()) != null)
{
LineCount = ++LineCount;
string[] eachLine = oline.Split(';');
string date = eachLine[30].Substring(1).Substring(0, 10);
DateTime Date = DateTime.ParseExact(date, "d", CultureInfo.InvariantCulture);
int week = new GregorianCalendar(GregorianCalendarTypes.Localized).GetWeekOfYear(Date, CalendarWeekRule.FirstFourDayWeek, DayOfWeek.Saturday);
if (Convert.ToString(week) == folderName)
{
}
else
{
using (StreamWriter sw = new StreamWriter(folderPath + "/" + newFileName, true))
{
Filecount = ++Filecount;
sw.WriteLine(fileName + " " + "--" + " " + "Line number :" + " " + LineCount);
}
}
}
}
return true;
}
Your call MessageBox.Show inside ReadFile, which means every file has to write a message. This way you will get 300 messages.
Try to put your message after the WaitAll call outside the ReadFile method.
if (files.Length != 0)
{
List<Task> tasks = new List<Task>();
foreach (var file in files)
{
var task = Task.Factory.StartNew(() =>
{
bool taskResult = ReadFile(file);
return taskResult;
});
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
// Message here
DialogResult result2 = MessageBox.Show("Read the files successfully.", "Important message", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
im writing a code in C# that watches a folder and when a file gets created the code makes some actions and writes the results to a log file.
im having this very strange behaviour. when i file gets created in the watched folder the function that handles the change is executed twise! even if it is only one change event.
initially i used FileSystemWatcher. but i after looking it up, i saw that it has meny stability issued so i switched to MyFileSystemWatcher which is a much more stable impliminatation. but im still getting duplications in my log file. i have no idea why the code that is in chanrge for looking up the change runs twise. here is the code sample
protected void Folder_Watch(string path)
{
if (!Directory.Exists(path))
{
try
{
System.IO.Directory.CreateDirectory(path);
}
catch (Exception ex)
{
File.AppendAllText(logPath + "\\SSHErrorLog.log", "[]*******" + DateTime.Now + " Error: " + ex.Message.ToString() + Environment.NewLine);
}
}
MyFileSystemWatcher m_Watcher = new MyFileSystemWatcher(path);
//m_Watcher.Path = path;
m_Watcher.Filter = "*.*";
m_Watcher.NotifyFilter = NotifyFilters.FileName;
m_Watcher.Created += new FileSystemEventHandler(OnChanged);
m_Watcher.EnableRaisingEvents = true;
}
here is the onChange function
private void OnChanged(object source, FileSystemEventArgs e)
{
File.AppendAllText(logPath + "\\SSHConnectionLog.log", "[]*******" + DateTime.Now + " OnChanged function: " + Environment.NewLine);
// Decrypt the file.
DecryptFile(keyPath + "\\id_rsa_Encrypted", keyPath + "\\id_rsa", sSecretKey);
// Remove the Key from memory.
//PKey = new PrivateKeyFile(keyPath + "\\id_rsa");
keyResult.Text = "RSA keys Were Generated at:" + keyPath;
//ScpClient client = new ScpClient("remnux", "adi", PKey);
Chilkat.SFtp client = new Chilkat.SFtp();
string[] tempPath = e.FullPath.Split('\\');
string fullPathNew = string.Empty;
for (int i = 0; i < tempPath.Length - 1; i++)
{
fullPathNew += tempPath[i];
}
if (Directory.Exists(fullPathNew))
{
sshConnect(client);
File_Upload(e.FullPath, client);
}
else
{
try
{
sshConnect(client);
System.IO.Directory.CreateDirectory(fullPathNew);
File_Upload(e.FullPath, client);
}
catch (Exception ex)
{
File.AppendAllText(logPath + "\\SSHErrorLog.log", "[]*******" + DateTime.Now + " Error in OnChanged function: " + ex.Message.ToString() + Environment.NewLine);
}
}
}
any help would be very much appriciated!
handled the onChange function. added time and file name to handle duplicate hits
private void OnChanged(object source, FileSystemEventArgs e)
{
string[] temp = new string[3];
string[] tempNow = new string[3];
string[] tempSeconds = new string[2];
string[] tempNowSeconds = new string[2];
int temp1 = 0;
int temp2 = 0;
if(string.IsNullOrEmpty(changeName))
{
changeName = e.Name;
}
if (string.IsNullOrEmpty(changeTime))
{
changeTime = DateTime.Now.ToString();
temp = this.changeTime.Split(':');
tempNow = DateTime.Now.ToString().Split(':');
tempSeconds = temp[2].Split(' ');
tempNowSeconds = temp[2].Split(' ');
temp1 = Convert.ToInt16(tempSeconds[0]);
temp2 = Convert.ToInt16(tempNowSeconds[0]);
// Decrypt the file.
DecryptFile(keyPath + "\\id_rsa_Encrypted", keyPath + "\\id_rsa", sSecretKey);
// Remove the Key from memory.
PKey = new PrivateKeyFile(keyPath + "\\id_rsa");
keyResult.Text = "RSA keys Were Generated at:" + keyPath;
ScpClient client = new ScpClient("remnux", "adi", PKey);
string[] tempPath = e.FullPath.Split('\\');
string fullPathNew = string.Empty;
for (int i = 0; i < tempPath.Length - 1; i++)
{
fullPathNew += tempPath[i];
}
if (Directory.Exists(fullPathNew))
{
sshConnect(client);
File_Upload(e.FullPath, client);
}
else
{
try
{
sshConnect(client);
System.IO.Directory.CreateDirectory(fullPathNew);
File_Upload(e.FullPath, client);
}
catch (Exception ex)
{
File.AppendAllText(logPath + "\\SSHErrorLog.log", "[]*******" + DateTime.Now + " Error in OnChanged function: " + ex.Message.ToString() + Environment.NewLine);
}
}
}
if (!this.changeTime.Equals(DateTime.Now.ToString()))
{
temp = this.changeTime.Split(':');
tempNow = DateTime.Now.ToString().Split(':');
tempSeconds = temp[2].Split(' ');
tempNowSeconds = temp[2].Split(' ');
temp1 = Convert.ToInt16(tempSeconds[0]);
temp2 = Convert.ToInt16(tempNowSeconds[0]);
if (temp[2] != tempNow[2])
{
if ((temp1 < temp2 + 10 || temp1 > temp2 +40) && e.Name != changeName)
{
// Decrypt the file.
DecryptFile(keyPath + "\\id_rsa_Encrypted", keyPath + "\\id_rsa", sSecretKey);
// Remove the Key from memory.
PKey = new PrivateKeyFile(keyPath + "\\id_rsa");
keyResult.Text = "RSA keys Were Generated at:" + keyPath;
ScpClient client = new ScpClient("remnux", "adi", PKey);
string[] tempPath = e.FullPath.Split('\\');
string fullPathNew = string.Empty;
for (int i = 0; i < tempPath.Length - 1; i++)
{
fullPathNew += tempPath[i];
}
if (Directory.Exists(fullPathNew))
{
sshConnect(client);
File_Upload(e.FullPath, client);
}
else
{
try
{
sshConnect(client);
System.IO.Directory.CreateDirectory(fullPathNew);
File_Upload(e.FullPath, client);
}
catch (Exception ex)
{
File.AppendAllText(logPath + "\\SSHErrorLog.log", "[]*******" + DateTime.Now + " Error in OnChanged function(second if): " + ex.Message.ToString() + Environment.NewLine);
}
}
}
}
}
}