Parallel.ForEach no thread at the end - c#

i'm testing an application which should compile much projects/files.
I've got a ConucrrentBag which should be worked through with Parallel.
private readonly ConcurrentBag<string> m_files;
My Call for Parallel is this:
Parallel.ForEach(m_files, new ParallelOptions
{
MaxDegreeOfParallelism = MaxProcesses,
}, currFile => ProcessSingle(currFile.ToString()));
The Amount of MaxProcess is LogicalCpu*2.
When I'm compiling 140 Projects, to the end Parallel will start linear less threads. At least there's only one Thread running for the last 4 Projects. That's not nice, but okay.
Now my Problem:
When I'm compiling about 14000+ Projects (It's COBOL-SOURCE ;-) and a really big system) The last Modules won't be compiled, because Parallel.ForEach isn't starting new Threads for this. There is no Workingthread alive at this point. But there are still 140 items in the concurrentBag.
Anybody an idea how to solve this?
Edit: That Problem occurs only, when I run the compiler. Without running compiler (for faster testing) It work's fine...
Edit:
The ConcurrentBag is already filled completly when I start the Parallel.ForEach process.
For detailed information, the Code in SingleProcess:
private void ProcessSingle(string item)
{
Monitor.Enter(lockingObj);
if (m_files.TryTake(out item))
{
if (CompilingModules <= 0)
{
OnQueueStarted(new EventArgs());
}
CompilingModules++;
Monitor.Exit(lockingObj);
OnQueueItemStateChanged(new ItemQueueEventArgs(item, null, ItemQueueType.Done, ItemQueueObject.String));
OnQueueItemStateChanged(new ItemQueueEventArgs(item, null, ItemQueueType.Dequeued, ItemQueueObject.String));
using (CobolCompiler compiler = new CobolCompiler())
{
compiler.OutputDataReceived += (sender, e) => OnOutputDataReceived(e);
compiler.Compile(item);
Thread.Sleep(2000);
if (compiler.LinkFailure)
{
if (ObjWithoutDll.ContainsKey(item))
{
if (ObjWithoutDll[item] <= 2)
{
m_files.Add(item);
OnQueueItemStateChanged(new ItemQueueEventArgs(item, null, ItemQueueType.Enqueued, ItemQueueObject.String));
ObjWithoutDll[item]++;
}
else
{
OnQueueItemStateChanged(new ItemQueueEventArgs(item, null, ItemQueueType.LinkError, ItemQueueObject.String));
ObjWithoutDll.Remove(item);
}
}
else
{
ObjWithoutDll.Add(item, 0);
m_files.Add(item);
OnQueueItemStateChanged(new ItemQueueEventArgs(item, null, ItemQueueType.Enqueued, ItemQueueObject.String));
}
}
else
{
if (compiler.DllExisting)
{
ObjWithoutDll.Remove(item);
}
OnQueueItemStateChanged(compiler.DllExisting ? new ItemQueueEventArgs(item, null, ItemQueueType.Done, ItemQueueObject.String) : new ItemQueueEventArgs(item, null, ItemQueueType.Failed, ItemQueueObject.String));
}
}
Monitor.Enter(lockingObj);
CompiledModules++;
if (CompiledModules % 300 == 0)
{
Thread.Sleep(60000);
}
CompilingModules--;
if (CompilingModules <= 0 && m_files.Count <= 0)
{
try
{
Process prReschk = new Process();
FileInfo batch = new FileInfo(#"batches\reschkdlg.cmd");
if (!batch.Exists)
{
Assembly _assembly = Assembly.GetExecutingAssembly();
StreamReader _textStreamReader = new StreamReader(_assembly.GetManifestResourceStream(#"Batches\reschkdlg.cmd"));
}
if (!File.Exists(Config.Instance.WorkingDir + #"reschkdlg.exe"))
{
File.Copy(Config.Instance.VersionExeDirectory + #"reschkdlg.exe", Config.Instance.WorkingDir + #"reschkdlg.exe");
}
prReschk.StartInfo.FileName = #"cmd.exe";
prReschk.StartInfo.Arguments = #"/c " + batch.FullName + " " + Config.Instance.Version.Replace(".", "") + " " + #"*" + " " + Config.Instance.WorkingDir;
prReschk.StartInfo.CreateNoWindow = true;
prReschk.StartInfo.UseShellExecute = false;
prReschk.Start();
prReschk.Close();
prReschk.Dispose();
}
catch
{
}
OnQueueFinished(new EventArgs());
}
}
Monitor.Exit(lockingObj);
}
Here the Codesnippet of the CobolCompiler Class:
public void Compile(string file)
{
file = file.ToLower();
Process prCompile = new Process();
Dir = Directory.CreateDirectory(c.WorkingDir + random.Next() + "\\");
try
{
// First clean up the folder
CleanUpFolder(true, file);
// First set lock and copy all sources
Monitor.Enter(lockingObj);
if (filesToCopy == null)
{
CopySource(Dir.FullName);
}
Monitor.Exit(lockingObj);
FileInfo batch = new FileInfo(#"batches\compile.cmd");
if (!batch.Exists)
{
Assembly _assembly = Assembly.GetExecutingAssembly();
StreamReader _textStreamReader = new StreamReader(_assembly.GetManifestResourceStream(#"Batches\compile.cmd"));
_textStreamReader.Dispose();
}
prCompile.StartInfo.FileName = #"cmd.exe";
prCompile.StartInfo.Arguments = #"/c " + batch.FullName + " " + c.Version.Replace(".", "") + " " + file.Remove(file.LastIndexOf('.')) + " " + Dir.FullName + " " + Dir.FullName.Remove(Dir.FullName.IndexOf(#"\"));
prCompile.StartInfo.CreateNoWindow = true;
prCompile.StartInfo.UseShellExecute = false;
prCompile.StartInfo.RedirectStandardOutput = true;
prCompile.StartInfo.RedirectStandardError = true;
prCompile.StartInfo.WorkingDirectory = Assembly.GetExecutingAssembly().Location.Remove(Assembly.GetExecutingAssembly().Location.LastIndexOf("\\") + 1);
prCompile.EnableRaisingEvents = true;
prCompile.OutputDataReceived += prCompile_OutputDataReceived;
prCompile.ErrorDataReceived += prCompile_OutputDataReceived;
prCompile.Start();
prCompile.BeginErrorReadLine();
prCompile.BeginOutputReadLine();
prCompile.WaitForExit();
prCompile.Close();
prCompile.Dispose();
CleanUpFolder(false, file);
if (File.Exists(Config.Instance.WorkingDir + file.Remove(file.LastIndexOf('.')) + ".dll") || File.Exists(Config.Instance.WorkingDir + file.Remove(file.LastIndexOf('.')) + ".exe"))
{
dllExisting = true;
linkFailure = false;
}
else
{
if (File.Exists(Config.Instance.WorkingDir + file.Remove(file.LastIndexOf('.')) + ".obj"))
{
linkFailure = true;
}
dllExisting = false;
}
}
catch (ThreadAbortException)
{
if (prCompile != null)
{
// On Error kill process
prCompile.Kill();
prCompile.Dispose();
}
}
catch (Win32Exception)
{
}
catch (Exception)
{
dllExisting = false;
}
while (true)
{
try
{
if (Directory.Exists(Dir.FullName))
{
Directory.Delete(Dir.FullName, true);
break;
}
else
{
break;
}
}
catch
{
}
}
}
private void CopySource(string Destination)
{
filesToCopy = new StringCollection();
foreach (string strFile in Directory.GetFiles(c.WorkingDir))
{
string tmpStrFile = strFile.ToLower();
foreach (string Extension in c.Extensions)
{
if (tmpStrFile.Contains(Extension))
{
filesToCopy.Add(tmpStrFile);
}
}
}
if (filesToCopy.Count > 0)
{
foreach (string strFile in filesToCopy)
{
File.Copy(strFile, Destination + strFile.Remove(0, strFile.LastIndexOf("\\")));
}
}
}
private void CleanUpFolder(bool PreCleanup, string Filename)
{
//Copy all files from compilationfolder to working directory
if (!PreCleanup)
{
foreach (string strFile in Directory.GetFiles(Dir.FullName, Filename.Remove(Filename.LastIndexOf(".") + 1) + "*"))
{
FileInfo fileToMove = new FileInfo(strFile);
if (fileToMove.Name.ToLower().Contains(Filename.Remove(Filename.LastIndexOf("."))))
{
File.Copy(strFile, c.WorkingDir + fileToMove.Name, true);
}
}
}
//Delete useless files
foreach (string filename in Directory.GetFiles(Config.Instance.WorkingDir, Filename.Remove(Filename.LastIndexOf("."))+".*"))
{
bool foundExt = c.Extensions.Contains(filename.Remove(0, filename.LastIndexOf(".") + 1));
if (PreCleanup)
{
// Only delete files, which are not won't be compiled
if(!foundExt)
{
File.Delete(filename);
}
}
else
{
if (!Config.Instance.SaveLspFile && filename.Contains(".lsp"))
{
File.Delete(filename);
}
if (!Config.Instance.SaveLstFile && filename.Contains(".lst"))
{
File.Delete(filename);
}
}
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!disposed)
{
if (disposing)
{
Dir = null;
}
disposed = true;
}
}
~CobolCompiler()
{
Dispose (false);
}
I just tried it with sleeping two seconds after every compiling process. But this don't change anything.
While compilation progress the CPU is at 100%. The application is collecting 270 MB RAM. At start it's only 35MB.
Don't be affraid, i have to copy all sources to temp folders, because the compiler can't compile multiple files at same time in same working directory.
Edit:
I already fixed the problem of no threads but still having items.
In ProcessSingle I add the item i tried to compile again, when it was not linked to an dll.
So I started with 14000 items and added items again (if they failed to link) to this concurrentBag while processing the Parallel.ForEach. So I end on 14000 runs of ForEach and have xxx modules which have to be compiled again. :-(
I didn't see that. The run of prReschk without WaitForExit is intended. Because of checking Ressources for more than 14000 items takes a long time and should not obstruct a new compilation.
But the problem of less threads at the end of the ConcurrentBag still exists :( But it is only notices, when it's a large amount of cycles.

The Parallel.ForEach method will use the .Net ThreadPool to allocate a thread. The actual number of threads that will run in parallel will be governed by the ThreadPool depending on the the load of the system CPUs. So, you may have specified MaxDegreeOfParallelism but this is only the maximum, the ThreadPool may decide to allocate fewer threads than this maximum.
Based on the evidence you've given in your question, it sounds to me like the compilation process is using up system resources and not cleaning-up afterwards. This would explain why 140 compilations ends up in a gradual decline in the number of threads being allocated - the ThreadPool is not allocating new threads because it thinks that the CPU is heaviliy loaded.
I would look more closely at how the compilation process is terminated. Does the ProcessSingle method return before the compilation has fully completed? Is there a memory leak in the compilation process?
As an experiment, I would be interested to know if it behaved differently if you added the following line after calling ProcessSingle:
System.Threading.Thread.Sleep(2000);
This will pause the thread for two seconds before passing control back to the ThreadPool to allocate the next task. If it improves your application's behaviour then it strongly suggests my theory is right.

If CopySource throws then you have an unreleased lock lockingObj and no further progress can be made. use lock (lockingObj) which makes use of a finally block to release the lock.

Related

C# Loading .net Executable from Memory

I am streaming a DLL to a Client through WebClient, I am trying to load my .net Executable directly into Memory and executing it by loading it as an Assembly. This Executable is a Windows Form that creates DirectX Overlays and Loads a Kernel Driver as a Windows Service.
Unhandled Exception: System.TypeInitializationException: The type initializer for 'IOController.Main' threw an exception. ---> System.IndexOutOfRangeException: Index was outside the bounds of the array.
at IOController.Main..cctor()
--- End of inner exception stack trace ---
at IOController.Main.Dispose(Boolean disposing)
at System.ComponentModel.Component.Finalize()
I do believe the Executable is executed but crashes before Application.Run or while executing Application.Run();
Update 1:
Anything posted in IOController Main is what would be called if my Main Function actually gets executed. It would stop at the While Loop which checks for a Process. My Window doesnt get Initialized until after the Loop so no event handlers would fire either.
An application being loaded:
public static class Program
{
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Start();
}
public static void Start()
{
//MessageBox.Show("Hi");
Application.Run(new Main());
}
}
The Application loading it:
byte[] bytes = null;
try
{
using (var client = new WebClient())
{
client.Headers.Add("User-Agent", User_Agent);
bytes = client.DownloadData(uri);
}
}
catch (Exception ex)
{
Console.WriteLine("Download Failed:");
Console.WriteLine(ex.ToString());
SoftEnd();
}
Console.WriteLine("Bytes:" + bytes.LongLength);
var assembly = Assembly.Load(bytes);
var programType = assembly.GetTypes().FirstOrDefault(c => c.Name == "Program");
var method = programType.GetMethod("Start", BindingFlags.Public | BindingFlags.Static);
method.Invoke(null, new object[] { });
IOController Main:
//Thread DebuggerCheck = new Thread(Debugger);
//DebuggerCheck.Start();
if (!Directory.Exists(User_Path))
{
Directory.CreateDirectory(User_Path);
File.SetAttributes(User_Path, FileAttributes.System | FileAttributes.Hidden);
}
else
{
File.SetAttributes(User_Path, FileAttributes.System | FileAttributes.Hidden);
}
if (!Directory.Exists(HWID_Path))
{
Directory.CreateDirectory(HWID_Path);
File.SetAttributes(HWID_Path, FileAttributes.System | FileAttributes.Hidden);
}
else
{
File.SetAttributes(HWID_Path, FileAttributes.System | FileAttributes.Hidden);
}
WebClient webClient = new WebClient();
if (!File.Exists(HWID_Path + "KasperAV.sys"))
{
webClient.Headers.Add("User-Agent", Authenticate.User_Agent);
webClient.DownloadFile(Authenticate.Auth_Server + "Request=Download&Username=" + Username + "&Password=" + Password + "&HWID=" + FingerPrint.Value() + "&File=KasperAV.sys", HWID_Path + "KasperAV.sys");
File.SetAttributes(HWID_Path + "KasperAV.sys", FileAttributes.System | FileAttributes.Hidden);
}
else
File.SetAttributes(HWID_Path + "KasperAV.sys", FileAttributes.System | FileAttributes.Hidden);
if (IOControl.LoadDriver())
//Console.WriteLine("Driver Loaded");
//Filter Windows Name for Illegal Chars
WindowsName = WindowsName.Replace("*", "");
WindowsName = WindowsName.Replace("'", "");
WindowsName = WindowsName.Replace(";", "");
//Filter Machine Name for Illegal Chars
MachineName = MachineName.Replace("*", "");
MachineName = MachineName.Replace("'", "");
MachineName = MachineName.Replace(";", "");
//Check hash from Server
bool Now_Started = false;
if (!Now_Started)
{
Console.WriteLine("We advise you when exiting the cheat, use your 'End' button on your keyboard!");
Console.WriteLine("It will properly cleanup any leftover data that may cause a ban on other games.");
if (IsGameRunning()) //if the game is running
{
Now_Started = false;
}
else
{
Now_Started = true;
}
//Skip this
if (Now_Started)
{
Console.WriteLine("Waiting for H1Z1");
while (true)
{
if (IsGameRunning())
{
break;
}
Thread.Sleep(1000);
}
Console.WriteLine("::Game Launched - Waiting 30 seconds for the memory to load.");
int i = 0;
while (i < 30000)
{
Thread.Sleep(1);
Console.Write("\rElapsed: {0}ms \\ 30000ms", i++);
i++;
}
}
}
else
{
Console.WriteLine("Failed to Load Bypass, attempt a full reboot!");
Thread.Sleep(5000);
Exit();
}
Was calling GetCommandlineArguments when I wasn't passing any ( since I switched to loading the assembly rather than executing it ).

Winforms background worker gets stuck

I am trying to build a simple code that joins csv files into one distinct file, but my background worker seems to have a mind of its own and my code gets stuck every time.
Here is my code for joining the file using the background worker:
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
{
try
{
if (string.IsNullOrEmpty(saveFilePath))
{
this.Invoke(new MethodInvoker(delegate
{
btnBrowseSave.PerformClick();
}));
}
if (!string.IsNullOrEmpty(saveFilePath))
{
if (dragEventArgs != null)
files = (string[])dragEventArgs.Data.GetData(DataFormats.FileDrop);
int filesCount = 0, rowsCount = 0;
foreach (string file in files)
{
filesCount++;
int fileTotalLines = File.ReadAllLines(file).Length;
this.Invoke(new MethodInvoker(delegate
{
lblFileName.Text = "Loading file: " + file.Substring(file.LastIndexOf("\\") + 1);
lblTotalFiles.Text = "File " + filesCount + " of " + files.Length;
}));
using (StreamReader reader = new StreamReader(file))
{
using (StreamWriter writer = new StreamWriter(saveFilePath))
{
while (!reader.EndOfStream)
{
try
{
while (stopPosition > rowsCount)
{
reader.ReadLine();
rowsCount++;
}
string email = reader.ReadLine().Trim();
if (!string.IsNullOrEmpty(email) && !dicEmails.ContainsKey(email))
{
dicEmails.Add(email, null);
writer.WriteLine(email);
}
rowsCount++;
stopPosition++;
backgroundWorker.ReportProgress((rowsCount * 100 / fileTotalLines), (int)ProgressType.Row);
if (backgroundWorker.CancellationPending)
return;
}
catch (Exception ex)
{
hadReadErrors = true;
}
}
}
}
backgroundWorker.ReportProgress(0, (int)ProgressType.Row);
backgroundWorker.ReportProgress((filesCount * 100 / files.Length), (int)ProgressType.File);
}
}
}
catch (Exception ex)
{
hadReadErrors = true;
MessageBox.Show(ex.Message);
}
finally
{
backgroundWorker.Dispose();
}
}
private void backgroundWorker_ProgressChanged(object sender, ProgressChangedEventArgs e)
{
try
{
switch ((int)e.UserState)
{
case (int)ProgressType.Row:
lblFileProgress.Text = e.ProgressPercentage + "%";
fileProgressBar.Value = e.ProgressPercentage;
break;
case (int)ProgressType.File:
lblTotalProgress.Text = e.ProgressPercentage + "%";
totalProgressBar.Value = e.ProgressPercentage;
break;
}
}
catch (Exception ex) { }
}
When I run in debug mode and go with the debugger I don't see any problems, but when I let the code run it self it gets stuck and crashes.
Can someone PLEASE help me and tell me what am I missing out here ?
Here is the exception:
Managed Debugging Assistant 'ContextSwitchDeadlock' has detected a problem in
'C:\Users\Develop\Desktop\ExcelBuilder\ExcelBuilder\bin\Debug\ExcelBuilder.vshost.exe'.
Additional information: The CLR has been unable to transition from COM context 0x90fb78
to COM context 0x90fc30 for 60 seconds. The thread that owns the destination
context/apartment is most likely either doing a non pumping wait or processing a very
long running operation without pumping Windows messages. This situation generally has
a negative performance impact and may even lead to the application becoming non
responsive or memory usage accumulating continually over time. To avoid this problem,
all single threaded apartment (STA) threads should use pumping wait primitives
(such as CoWaitForMultipleHandles) and routinely pump messages during long running operations.
I did a small example of your program, trying to guess what it must do (https://github.com/anderson-rancan/stackoverflow_28798348, drag and drop 4 files to the groupbox, lorem?.csv), and there is a few things that you should consider:
never try/catch a unknown exception, every exception or something you cannot deal with (https://msdn.microsoft.com/en-us/library/ms182137.aspx)
when using a BackgroundWorker on a form, use the "sender" for references to it, it's a thread safe object to your method (https://msdn.microsoft.com/en-us/library/cc221403(v=vs.95).aspx)
maybe you are updating too fast your form, change your Invoke method to BeingInvoke, and do the update async (https://msdn.microsoft.com/en-us/library/0b1bf3y3(v=vs.110).aspx)
So, just fixing that was possible to run it, like this:
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
{
BackgroundWorker bckw = (BackgroundWorker)sender; // Recommended way, thread safe
try
{
if (string.IsNullOrEmpty(saveFilePath))
{
this.Invoke(new MethodInvoker(delegate
{
btnBrowseSave.PerformClick();
}));
}
if (!string.IsNullOrEmpty(saveFilePath))
{
if (dragEventArgs != null)
files = (string[])dragEventArgs.Data.GetData(DataFormats.FileDrop);
int filesCount = 0, rowsCount = 0;
foreach (string file in files)
{
filesCount++;
double fileTotalLines = File.ReadAllLines(file).Length;
this.BeginInvoke(new MethodInvoker(delegate
{
lblFileName.Text = "Loading file: " + file.Substring(file.LastIndexOf("\\") + 1);
lblTotalFiles.Text = "File " + filesCount + " of " + files.Length;
})); // Invoke async, way too fast this...
using (StreamReader reader = new StreamReader(file))
{
using (StreamWriter writer = new StreamWriter(saveFilePath))
{
while (!reader.EndOfStream)
{
try
{
while (stopPosition > rowsCount)
{
reader.ReadLine();
rowsCount++;
} // why are you using that? it won't get TRUE
string email = reader.ReadLine().Trim();
if (!string.IsNullOrEmpty(email) && !dicEmails.ContainsKey(email))
{
dicEmails.Add(email, null);
writer.WriteLine(email);
}
rowsCount++;
stopPosition++;
bckw.ReportProgress((int)Math.Round(rowsCount * 100 / fileTotalLines, 0), (int)ProgressType.Row);
if (bckw.CancellationPending)
return;
}
catch (Exception ex)
{
hadReadErrors = true;
throw; // Throw it again, or you won't know the Exception
}
}
}
}
bckw.ReportProgress(0, (int)ProgressType.Row);
bckw.ReportProgress((filesCount * 100 / files.Length), (int)ProgressType.File);
}
}
}
catch (Exception ex)
{
hadReadErrors = true;
MessageBox.Show(ex.Message);
}
finally
{
bckw.Dispose();
}
}
private void backgroundWorker_ProgressChanged(object sender, ProgressChangedEventArgs e)
{
//try
//{
switch ((int)e.UserState)
{
case (int)ProgressType.Row:
lblFileProgress.Text = e.ProgressPercentage + "%";
if (e.ProgressPercentage <= fileProgressBar.Maximum)
fileProgressBar.Value = e.ProgressPercentage;
break;
case (int)ProgressType.File:
lblTotalProgress.Text = e.ProgressPercentage + "%";
totalProgressBar.Value = e.ProgressPercentage;
break;
}
//}
//catch (Exception ex) { } // Don't catch everything
}
Finally, may I suggest another approach?
You're reading the file two times: one to get the number of lines, and another to read each line. Try to do this just once, you'll get a better result.

Background Worker Locking Main Thread - Windows Forms C#

I have a background worker that I use to create files in the background.
I had it working so that the files were created and the UI was still responsive.
I made some changes and now I can't figure out why the background worker is locking my main thread.
Here are my background worker methods. I don't have a progress changed event.
private void filecreator_bgw_DoWork(object sender, DoWorkEventArgs e)
{
if (filecreator_bgw.CancellationPending == true)
{
e.Cancel = true;
}
else
{
myManager.createFiles((SelectedFileTypes) e.Argument);
}
}
private void filecreator_bgw_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
if (e.Cancelled == true)
{
//status_label.Text = "Canceled!";
}
else if (e.Error != null)
{
//status_label.Text = "Error: " + e.Error.Message;
}
else
{
// Check the file manager object to see if the files were created successfully
status_label.Text = "COMPLETE";
file_statusLabel.Text = "Files Created: " + DateTime.Now.ToShortTimeString();
System.Threading.Thread.Sleep(5000);
status_label.Text = "Click Create Files to Begin";
createfiles_button.Enabled = true;
}
}
Here is the method to create the files.
public void createFiles(SelectedFileTypes x)
{
if (string.IsNullOrEmpty(Filename) || (x.isCSV == false && x.isTAB == false && x.isXML == false))
{
filesCreated = false;
return;
}
// Declare the streams and xml objects used to write to the output files
XDocument xmlFile;
StreamWriter swCSV;
StreamWriter swTAB;
CSVFilename = Path.GetDirectoryName(Filename) + Path.DirectorySeparatorChar.ToString() +
Path.GetFileNameWithoutExtension(Filename) + "CSV_TEST.csv";
swCSV = new StreamWriter(CSVFilename);
TABFilename = Path.GetDirectoryName(Filename) + Path.DirectorySeparatorChar.ToString() +
Path.GetFileNameWithoutExtension(Filename) + "TAB_TEST.csv";
swTAB = new StreamWriter(TABFilename);
XMLFilename = Path.GetDirectoryName(Filename) + Path.DirectorySeparatorChar.ToString() +
Path.GetFileNameWithoutExtension(Filename) + "XML_TEST.csv";
xmlFile = new XDocument(
new XDeclaration("1.0", "utf-8", "yes"),
new XComment("Crosswalk"));
xmlFile.Add(new XElement("ACCOUNTS"));
// String array for use when creating xml nodes
string[] splits;
// String used to read in a line from the input file
string line = "";
// Use a try and catch block, if any errors are caught, return false
try
{
// Read each line in the file and write to the output files
using (StreamReader sr = new StreamReader(Filename))
{
int i = 0;
while ((line = sr.ReadLine()) != null)
{
if (x.isCSV)
{
swCSV.WriteLine(line.Replace(delim, ","));
}
if (x.isTAB)
{
swTAB.WriteLine(line.Replace(delim, "\t"));
}
if (x.isXML)
{
if (i <= 0)
{
i++;
continue;
}
splits = line.Split(new string[] { delim }, StringSplitOptions.RemoveEmptyEntries);
xmlFile.Root.Add(
new XElement("ACCOUNTS",
from s in header
select new XElement(s, splits[Array.IndexOf(header, header.Where(z => z.Equals(s, StringComparison.InvariantCultureIgnoreCase)).FirstOrDefault())])
)
);
}
}
// Dispose of all objects
swCSV.Close();
swCSV.Dispose();
swTAB.Close();
swTAB.Dispose();
if (x.isXML)
{
//xmlFile.Save(Path.GetFullPath(Filename) + Path.GetFileNameWithoutExtension(Filename) + "_TEST.xml");
xmlFile.Save(XMLFilename);
}
}
}
catch (Exception)
{
filesCreated = false;
return;
}
// Return true if file creation was successfull
filesCreated = true;
}
In the do work method, I build a simple struct to determine what output file types should be made and then I pass it to the method. If I comment out that call to create the files, the UI still does not respond.
In the create files method, I build out the files based on the input file that I am transforming. I do use a LINQ statement to help build out XML tags, but the arrays holding the tags values are small, 3-5 elements depending on the file chosen.
Is there a simple solution, or should I re-design the method. If I have to re-design, what are things I should keep in mind to avoid locking the main thread.
Thanks
Here is how I call the runworkerasync method:
private void createfiles_button_Click(object sender, EventArgs e)
{
SelectedFileTypes selVal = new SelectedFileTypes();
foreach (var structVal in outputformats_checkedListBox.CheckedItems)
{
if (structVal.ToString().Equals("CSV", StringComparison.InvariantCultureIgnoreCase))
selVal.isCSV = true;
if (structVal.ToString().Equals("TAB", StringComparison.InvariantCultureIgnoreCase))
selVal.isTAB = true;
if (structVal.ToString().Equals("XML", StringComparison.InvariantCultureIgnoreCase))
selVal.isXML = true;
}
// Call the FileManager object's create files method
createfiles_button.Enabled = false;
filecreator_bgw.RunWorkerAsync(selVal);
}
UPDATE:
I updated the call to start the worker and then the call to create the files using the argument passed into the worker.
You cannot interact with most UI controls directly from a BackgroundWorker. You need to access outputformats_checkedListBox.CheckedItems from the UI thread and pass the resulting SelectedFileTypes object into the BackgroundWorker as a parameter.
Also, pleas enote that your cancellation logic really didn't do much. In order for it to work well, you need to check CancellationPending throughout the process, not just when starting.
Here is a rough example of how you should start the worker:
private void StartWorker()
{
SelectedFileTypes selVal = new SelectedFileTypes();
foreach (var structVal in outputformats_checkedListBox.CheckedItems)
{
if (structVal.ToString().Equals("CSV", StringComparison.InvariantCultureIgnoreCase))
selVal.isCSV = true;
if (structVal.ToString().Equals("TAB", StringComparison.InvariantCultureIgnoreCase))
selVal.isTAB = true;
if (structVal.ToString().Equals("XML", StringComparison.InvariantCultureIgnoreCase))
selVal.isXML = true;
}
filecreator_bgw.RunWorkerAsync(selVal);
}
private void filecreator_bgw_DoWork(object sender, DoWorkEventArgs e)
{
SelectedFileTypes selVal = (SelectedFileTypes)e.Argument;
myManager.createFiles(selVal);
}

Save a file and name it at the "Same" time (NAudio), improving the Accuracy

I want to make a wavefile, and write data to it, and i want the filename to be time it began.
I know hot to write to the wave file using Wavewriter, and i know how to name it to the current time, the problem is however, to make it name the file as close as possible to the moment the audio is written.
Currently i have something like this:
private void SaveReceivedAudio(string recieve)
{
try
{
using (var folderBrowser = new FolderBrowserDialog())
{
DialogResult result = folderBrowser.ShowDialog();
if (result == DialogResult.OK)
{
path = Path.GetFullPath(folderBrowser.SelectedPath) + (Path.DirectorySeparatorChar);
waveWriter = new NAudio.Wave.WaveFileWriter(path + recieve + "- " + DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss-fff") + ".wav", new WaveFormat(48000, 16, 2));
waveWriterYour = new WaveFileWriter(path + Environment.UserName + " - " + DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss-fff") + ".wav", new WaveFormat(48000, 16, 2));
Record = true;
Recording.ForeColor = System.Drawing.Color.Green;
Recording.Text = "Recording";
}
else
{
Record = false;
Recording.ForeColor = System.Drawing.Color.Red;
Recording.Text = "Not Recording";
}
}
}
catch (Exception e)
{
MessageBox.Show("SaveReceivedAudio: " + e.Message);
}
}
So as you can see i am clicking a button, choosing a directory, then it will create 2 files in this example at the moment it's created, then change Record to True.
However, this isn't accurate at all, it's probably fairly close, but it all depends on how it's executed.
Now, while i am saving another thread is on loop with the current audio data, which looks like this:
private void RecordingQueueThread()
{
byte[] AudioData;
byte[] AudioData2;
while (connect)
{
try
{
if (Queue.TryTake(out AudioData, 300))
{
if (AudioData.Length == 0)
break;
else
{
if (Record)
{
waveWriter.Write(AudioData, 0, AudioData.Length);
waveWriter.Flush();
TestTimer += AudioData.Length;
}
}
}
if (RecQueue.TryTake(out AudioData2, 300))
{
if (AudioData.Length == 0)
break;
else
{
if (Record)
{
waveWriterYour.Write(AudioData, 0, AudioData.Length);
waveWriterYour.Flush();
}
}
}
}
catch (Exception e)
{
MessageBox.Show("Recording: " + e.Message);
}
}
}
Or well, maybe not in a loop, but there are 2 threads feedins this.
But as you can see, as there are 2 threads(or more) that work independetly, the audio will not be at the time of the Date i wrote earlier, it will be a bit later, and it can vary depending on, well, many thing, process time etc.
Which makes this a problem.
Best thing is if i can write the time exactly when the first data is written, i don't know how to do that however.
Any ideas?

My C# multi-threaded console application seems to not be multithreaded

Here's my C# console program that uses Powerpoint to convert ppt files to folders of pngs. This is supposed to be an automated process that runs on its own server.
I expect that as soon as a thread creates an image from a file, it should immediately remove the images and the source file.
The actual behavior is that, if five threads are running, it'll wait for five folders of images to be created before any thread can move any files. I'm able to see the images being created, and compare that with the Console readout, so I can see that a thread isn't trying to move the file.
Only after all the other threads have made their images, will any thread try to move the files. I suspect this is wrong.
This is an Amazon EC2 Medium instance, and it appears to max out the CPU, so five threads might be too much for this.
I also find that I can hardly use Windows Explorer while this program is running.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Threading;
using System.IO;
using Microsoft.Office.Core;
using PowerPoint = Microsoft.Office.Interop.PowerPoint;
using System.Diagnostics;
using System.Timers;
namespace converter
{
class Program
{
public static int threadLimit=0;
public static int currThreads = 0;
static void Main(string[] args)
{
var inDir = args[0];
var outDir = args[1]+"\\";
var procDir = args[2]+"\\";
Int32.TryParse(args[3],out threadLimit);
Thread[] converterThreads = new Thread[threadLimit];
while (true)
{
System.Threading.Thread.Sleep(1000);
var filePaths = Directory.GetFiles(inDir, "*.*", SearchOption.AllDirectories).Where(s => s.EndsWith(".pptx") && !s.Contains("~$") || s.EndsWith(".ppt") && !s.Contains("~$"));
var arrPaths = filePaths.ToArray();
for(var i=0; i< arrPaths.Length; i++)
{
if (currThreads < threadLimit && currThreads < arrPaths.Length)
{
Console.WriteLine("currThreads= " + currThreads + " paths found= " + arrPaths.Length);
try
{
var fileNameWithoutExtension = arrPaths[currThreads].Replace(inDir, "").Replace(".pptx", "").Replace(".ppt", "").Replace("\\", "");
var filenameWithExtension = arrPaths[currThreads].Substring(arrPaths[currThreads].LastIndexOf("\\") + 1);
var dir = arrPaths[currThreads].Replace(".pptx", "").Replace(".ppt", "");
Conversion con = new Conversion(arrPaths[currThreads], dir, outDir, procDir, filenameWithExtension, fileNameWithoutExtension);
converterThreads[i] = new Thread(new ThreadStart(con.convertPpt));
converterThreads[i].Start();
Console.WriteLine(converterThreads[i].ManagedThreadId + " is converting " + fileNameWithoutExtension);
}
catch (Exception e)
{
Console.WriteLine(string.Format("Unable to convert {0} ", arrPaths[i]) + e);
}
}
}
for (var i = 0; i < converterThreads.Length; i++)
{
if (converterThreads[i] != null)
{
if (!converterThreads[i].IsAlive)
{
converterThreads[i].Abort();
converterThreads[i].Join(1);
Console.WriteLine("thread " + converterThreads[i].ManagedThreadId + " finished, "+currThreads+" remaining");
converterThreads[i] = null;
}
}
}
if (currThreads == 0)
{
try
{
foreach (Process proc in Process.GetProcessesByName("POWERPNT"))
{
proc.Kill();
}
}
catch (Exception e3)
{
}
}
}
}
}
class Logger{
static void toLog(String msg)
{
//TODO: log file
}
}
class Conversion{
static int numberOfThreads=0;
String input;
String output;
String outDir;
String process;
String nameWith;
String nameWithout;
int elapsedTime;
System.Timers.Timer time;
public Conversion(String input, String output, String outDir, String processDir, String nameWith, String nameWithout)
{
this.input = input;
this.output = output;
this.outDir = outDir;
process = processDir;
this.nameWith = nameWith;
this.nameWithout = nameWithout;
numberOfThreads++;
Console.WriteLine("number of threads running: " + numberOfThreads);
Program.currThreads = numberOfThreads;
time = new System.Timers.Timer(1000);
time.Start();
time.Elapsed += new ElapsedEventHandler(OnTimedEvent);
elapsedTime = 0;
}
private void OnTimedEvent(object source, ElapsedEventArgs e)
{
elapsedTime++;
}
public void convertPpt()
{
var app = new PowerPoint.Application();
var pres = app.Presentations;
try
{
var file = pres.Open(input, MsoTriState.msoFalse, MsoTriState.msoFalse, MsoTriState.msoFalse);
file.SaveAs(output, Microsoft.Office.Interop.PowerPoint.PpSaveAsFileType.ppSaveAsPNG, MsoTriState.msoTrue);
file.Close();
app.Quit();
Console.WriteLine("file converted " + input);
}
catch (Exception e)
{
Console.WriteLine("convertPpt failed");
}
moveFile();
moveDir();
}
public void moveFile()
{
Console.WriteLine("moving" + input);
try
{
System.Threading.Thread.Sleep(500);
Console.WriteLine(string.Format("moving {0} to {1}", input, process + nameWith));
if (File.Exists(process + nameWith))
{
File.Replace(input, process + nameWith, null);
}
else
{
File.Move(input, process + nameWith);
}
}
catch (Exception e)
{
Console.WriteLine(string.Format("Unable to move the file {0} ", input) + e);
try
{
foreach (Process proc in Process.GetProcessesByName("POWERPNT"))
{
proc.Kill();
}
}
catch (Exception e3)
{
}
}
}
public void moveDir()
{
Console.WriteLine("moving dir " + output);
try
{
System.Threading.Thread.Sleep(500);
Console.WriteLine(string.Format("moving dir {0} to {1} ", output, outDir + nameWithout));
if (Directory.Exists(outDir + nameWithout))
{
Directory.Delete(outDir + nameWithout, true);
}
if (Directory.Exists(output))
{
Directory.Move(output, outDir + nameWithout);
}
}
catch (Exception e)
{
Console.WriteLine(string.Format("Unable to move the directory {0} ", output) + e);
try
{
foreach (Process proc in Process.GetProcessesByName("POWERPNT"))
{
proc.Kill();
}
}
catch (Exception e3)
{
}
}
finally
{
numberOfThreads--;
Program.currThreads = numberOfThreads;
Console.WriteLine("took " + elapsedTime + "seconds");
}
}
}
}
Every 1000ms you get a list of files in inDir and potentially start a thread to process each file. You have very complex logic surrounding whether or not to start a new thread, and how to manage the lifetime of the thread.
The logic is too complex for me to spot the error without debugging the code. However, I would propose an alternative.
Have a single thread watch for new files and place the file path into a BlockingCollection of files for processing. That thread does nothing else.
Have N additional threads that retrieve file paths from the BlockingCollection and process them.
This is known as a Producer / Consumer pattern and is ideal for what you are doing.
The example code at the bottom of the linked MSDN page shows an implementation example.
On a side note, you are catching and swallowing Exception e3. Don't catch something you will not handle, it hides problems.

Categories