Having some issue with Threadpooling here that I need some help with please. I am trying to write a Generator, and I need to allow users generate up to 10,000 lines with the code below. Problem with this is the line
WaitHandle.WaitAll(doneEvents);
Can only handle 64 WaitAll at a time, How can I best apply thread pooling to my code in this case?
public void GenerateInsertStatements(int iRequiredRows)
{
// One event is used for each row object
ManualResetEvent[] doneEvents = new ManualResetEvent[iRequiredRows];
Row[] rows = new Row[iRequiredRows];
for (int i = 0; i < iRequiredRows; i++)
{
doneEvents[i] = new ManualResetEvent(false);
Row row = new Row(this.Name, this.TableColumns, doneEvents[i]);
rows[i] = row;
ThreadPool.QueueUserWorkItem(row.ThreadPoolCallback, i);
}
WaitHandle.WaitAll(doneEvents);
using (sr = new StreamWriter(this.Name + ".sql"))
{
for(int i=0; i<rows.Length; i++)
{
WriteStatementToFile(i, rows[i].GeneratedInsertStatement);
}
}
}
Thanks in advance
I would use just one WaitHandle and one int. Like:
int done_when_zero; // This is a field of the class
ManualResetEvent evt = new ManualResetEvent (false); // Field
...
done_when_zero = iRequiredRows; // This goes before the loop
...
evt.WaitOne (); // this goes after the loop
evt.Reset (); // Prepare for next execution if needed
And then, at the end of ThreadPoolCallback:
if (Interlocked.Decrement (ref done_when_zero)) <= 0)
evt.Set ();
As it was already suggested using a counter and a single ManualResetEvent should work fine for you. Below is ThreadPoolWait class taken from .NET Matters: ThreadPoolWait and HandleLeakTracer (see Figure 3 Better Implementation of ThreadPoolWait for more info)
public class ThreadPoolWait : IDisposable
{
private int _remainingWorkItems = 1;
private ManualResetEvent _done = new ManualResetEvent(false);
public void QueueUserWorkItem(WaitCallback callback)
{
QueueUserWorkItem(callback, null);
}
public void QueueUserWorkItem(WaitCallback callback, object state)
{
ThrowIfDisposed();
QueuedCallback qc = new QueuedCallback();
qc.Callback = callback;
qc.State = state;
lock (_done) _remainingWorkItems++;
ThreadPool.QueueUserWorkItem(new WaitCallback(HandleWorkItem), qc);
}
public bool WaitOne() { return WaitOne(-1, false); }
public bool WaitOne(TimeSpan timeout, bool exitContext)
{
return WaitOne((int)timeout.TotalMilliseconds, exitContext);
}
public bool WaitOne(int millisecondsTimeout, bool exitContext)
{
ThrowIfDisposed();
DoneWorkItem();
bool rv = _done.WaitOne(millisecondsTimeout, exitContext);
lock (_done)
{
if (rv)
{
_remainingWorkItems = 1;
_done.Reset();
}
else _remainingWorkItems++;
}
return rv;
}
private void HandleWorkItem(object state)
{
QueuedCallback qc = (QueuedCallback)state;
try { qc.Callback(qc.State); }
finally { DoneWorkItem(); }
}
private void DoneWorkItem()
{
lock (_done)
{
--_remainingWorkItems;
if (_remainingWorkItems == 0) _done.Set();
}
}
private class QueuedCallback
{
public WaitCallback Callback;
public object State;
}
private void ThrowIfDisposed()
{
if (_done == null) throw new ObjectDisposedException(GetType().Name);
}
public void Dispose()
{
if (_done != null)
{
((IDisposable)_done).Dispose();
_done = null;
}
}
}
Probably not the most efficient solution, but it should work regardless of the 64 wait handles limit :
for(int i = 0; i < iRequiredRows; i++)
doneEvents[i].WaitOne();
Related
My problem is:
I want to run through all ListStudent; it has five rows.
I am using for() to loop through all element in ListStudent and start thread corresponding.
It must run StartProcess(dtStudent.Rows[0].ToString(), 1); to ``StartProcess(dtStudent.Rows[5].ToString(), 1);`
But when I tried to debug, at rows while (run_process[idxThread].bwIsRun == true) - variable idxThread always is 5.
And I want it only run first thread because I will input to a program. After, when I click again btnProcess it will continue to next dtStudent.Rows.
Have any method to do this? Thanks..
I don't have to know an issue with my code.
This all my code to do this:
LThread[] run_process =new LThread[0];
int num_process = 0;
public void btnProcess()
{
DataTable dtStudent = mysql_db.ExcelLoad("ListStudent");
int total_row_student = dtStudent.Rows.Count; // 5 rows
if (num_process != total_row_student)
{
run_process = new LThread[total_row_student];
for (int idx = 0; idx < total_row_student; idx++)
{
run_process[idx] = new LThread();
run_process[idx].StartedEvent += new LThread.startDelegate(delegate (string arg)
{
StartProcess(dtStudent.Rows[idx - 1]["number"].ToString(), idx - 1);
});
}
}
num_process = total_row_student;
if (num_process == 0)
return;
flag = true;
Start_all_thread();
}
private void Start_all_thread()
{
for (int i = 0; i < run_process.Length; i++)
if (run_process[i] != null)
run_process[i].Start();
}
private void Stop_all_thread()
{
for (int i = 0; i < run_process.Length; i++)
if (run_process[i] != null)
run_process[i].Stop();
}
private void StartProcess(string output, int idxThread)
{
while (run_process[idxThread].bwIsRun == true)
{
if (flag == false)
continue;
// some code at here
Stop_all_thread();
}
}
And class LThread.cs is define:
public class LThread2
{
public delegate void startDelegate(string ID);
public event startDelegate StartedEvent;
public Boolean bwIsRun;
MicroTimer microTimer = new MicroTimer();
public LThread2()
{
microTimer.MicroTimerElapsed +=
new MicroTimer.MicroTimerElapsedEventHandler(OnTimedEvent);
microTimer.Interval = 2000;
}
private static int RandNumber(int Low, int High)
{
Random rndNum = new Random(int.Parse(Guid.NewGuid().ToString().Substring(0, 8), System.Globalization.NumberStyles.HexNumber));
int rnd = rndNum.Next(Low, High);
return rnd;
}
public void Start()
{
try
{
bwIsRun = true;
microTimer.Enabled = true;
}
catch { }
}
public void Stop()
{
try
{
bwIsRun = false;
microTimer.Enabled = false;
}
catch { }
}
private void OnTimedEvent(object sender,MicroTimerEventArgs timerEventArgs)
{
StartedEvent(RandNumber(100, 10000).ToString());
}
}
/// <summary>
/// Class emulates long process which runs in worker thread
/// and makes synchronous user UI operations.
/// </summary>
public class LThread : BackgroundWorker
{
#region Members
public delegate void startDelegate(string ID);
public event startDelegate StartedEvent;
private static int RandNumber(int Low, int High)
{
Random rndNum = new Random(int.Parse(Guid.NewGuid().ToString().Substring(0, 8), System.Globalization.NumberStyles.HexNumber));
int rnd = rndNum.Next(Low, High);
return rnd;
}
protected override void OnDoWork(DoWorkEventArgs e)
{
StartedEvent(RandNumber(100,10000).ToString()); //put whatever parameter suits you or nothing
base.OnDoWork(e);
e.Result = e.Argument;
}
BackgroundWorker bwThread;
// Main thread sets this event to stop worker thread:
public Boolean bwIsRun;
int m_time_delay = 10000;
Delegate m_form_method_run;
Delegate m_form_method_stop;
Form m_type_form;
#endregion
#region Functions
public void Start()
{
try
{
bwIsRun = true;
this.RunWorkerAsync();
}
catch { }
}
public void Stop()
{
try
{
bwIsRun = false;
}
catch { }
}
private void StartToListen(object sender, DoWorkEventArgs e)
{
while (true)
{
Thread.Sleep(m_time_delay);
if (bwIsRun == true)
{
m_type_form.Invoke(m_form_method_run);
}
else
{
BackgroundWorker bwAsync = sender as BackgroundWorker;
if (bwAsync.CancellationPending)
{
e.Cancel = true;
return;
}
break;
}
}
}
#endregion
}
You issue is probably in the for (int idx = 0; idx < total_row_student; idx++) for-loop. You're starting threads that will start long after the loop is finished so the variable idx is already at 5 before any of the threads actually start.
Try changing the code to this to fix:
for (int idx = 0; idx < total_row_student; idx++)
{
int local_idx = idx;
run_process[idx] = new LThread();
run_process[idx].StartedEvent += new LThread.startDelegate(delegate (string arg)
{
StartProcess(dtStudent.Rows[local_idx - 1]["number"].ToString(), local_idx - 1);
});
}
I have a simple pattern to run code only once. It's mostly used to Update something on the UI, while it may change very often in the Background.
private bool _updating;
private void UpdateSomething()
{
if (!_updating)
{
_updating = true;
Application.Current.Dispatcher.BeginInvoke(new Action(() =>
{
_updating = false;
DoSomething();
}), DispatcherPriority.Background);
}
}
I would prefer to put the boilerplate code inside a simple method:
public static void RunOnce(Action action, ref bool guard)
{
if (!guard)
{
guard = true;
Application.Current.Dispatcher.BeginInvoke(new Action(() =>
{
guard = false;
action();
}), DispatcherPriority.Background);
}
}
und call it like this:
void UpdateSomething()
{
RunOnce(DoSomething, ref _updating);
}
However, this does not work as you cannot have ref parameters inside anonymous methods.
Is there any workaround, e.g. to pin the ref parameter and free it when the method was executed?
You could do something like this:
public static void RunOnce(Action action, ref RunOnceToken token)
{
if (token == null || token.IsCompleted)
{
token = new RunOnceToken(
Application.Current.Dispatcher.BeginInvoke(
action,
DispatcherPriority.Background));
}
}
public sealed class RunOnceToken : IDisposable
{
private DispatcherOperation _operation;
public RunOnceToken(DispatcherOperation operation)
{
if (operation != null &&
operation.Status != DispatcherOperationStatus.Completed &&
operation.Status != DispatcherOperationStatus.Aborted)
{
_operation = operation;
_operation.Completed += OnCompletedOrAborted;
_operation.Aborted += OnCompletedOrAborted;
}
}
private void OnCompletedOrAborted(object sender, EventArgs e)
{
this.Dispose();
}
public bool IsCompleted
{
get { return _operation == null; }
}
public void Dispose()
{
var operation = _operation;
if (operation == null)
return;
_operation = null;
operation.Completed -= OnCompletedOrAborted;
operation.Aborted -= OnCompletedOrAborted;
}
}
Your example usage would change to:
private RunOnceToken _updateToken;
private void UpdateSomething()
{
RunOnce(DoSomething, ref _updateToken);
}
It doesn't really matter if you never clear your copy of the token, because the wrapped DispatcherOperation gets cleared out upon completion to avoid leaking action or any values it captures.
In case it wasn't obvious, none of this is concurrency-safe; I assume everything above is only accessed from the UI thread.
One useful enhancement might be to add an optional DispatcherPriority argument to RunOnce such that you can control the priority level used to schedule action (perhaps canceling an already-scheduled operation if it was scheduled at a lower priority).
I was no aware about DispatcherOperation existence, however seen Mike Strobel answer I wrote following code. I'm not 100% sure about it but it seems to work without to much boilerplate.
public static class DispatcherExtensions {
public static int clearInterval = 10_000;
private static long time => DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
private static long lastClearTime = time;
private static Dictionary<int, DispatcherOperation> currOperations = new Dictionary<int, DispatcherOperation>();
private static object sync = new object();
public static void invokeLastAsync(this Dispatcher d, Action a, DispatcherPriority p = DispatcherPriority.Background, [CallerFilePath]object key1 = null, [CallerLineNumber]object key2 = null) {
lock (sync) {
DispatcherOperation dop;
var k = key1.GetHashCode() ^ key2.GetHashCode();
if (currOperations.ContainsKey(k)) {
dop = currOperations[k];
currOperations.Remove(k);
dop.Abort();
}
dop = d.BeginInvoke(a, p);
clearOperations(false);
currOperations.Add(k, dop);
}
}
public static void clearOperations(bool force = true) {
var ct = time;
if (!force && ct - lastClearTime < clearInterval) return;
var nd = new Dictionary<int, DispatcherOperation>();
foreach (var ao in currOperations) {
var s = ao.Value.Status;
if (s == DispatcherOperationStatus.Completed
|| s == DispatcherOperationStatus.Aborted)
nd.Add(ao.Key, ao.Value);
}
currOperations = nd;
lastClearTime = ct;
}
}
Basically extension method take file path and line number as a key to store DispacherOperation instance in a dictionary, and If the key already have an operation, its aborted and replaced with new operation. Periodically, the dictionary is cleared, from completed/aborted actions that are no longer invoked.
The usage is very simple:
private int initCount = 0;
private int invokeCount = 0;
private void updateSomething() {
initCount++;
view.Dispatcher.invokeLastAsync(() => {
Console.WriteLine($#"invoked {++invokeCount}/{initCount}");
});
}
I haven't run to any issue with this so far. Maybe someone else could see some weak spot.
have such code.
Start threads:
Thread[] thr;
static object locker = new object();
bool liking = true;
private void button2_Click(object sender, EventArgs e)
{
button2.Enabled = false;
button3.Enabled = true;
string post = create_note();
decimal value = Program.Data.numericUpDown1;
int i = 0;
int j = (int)(value);
thr = new Thread[j];
for (; i < j; i++)
{
thr[i] = new Thread(() => invite(post));
thr[i].IsBackground = true;
thr[i].Start();
}
}
public void invite(string post)
{
while (liking)
{
if (//some comdition)
exit all threads, and start string post = create_note(); again
}
}
If some condition in invite(string post) comes true I need to stop all threads, and go to string post = create_note(); again, get string post and start threads again.
How to do it?
Instead of manual thread management, use Parallel.For with CancellationToken:
var cts = new CancellationTokenSource();
var options = new ParallelOptions
{
CancellationToken = cts.Token,
MaxDegreeOfParallelism = System.Environment.ProcessorCount
};
var result = Parallel.For(0, j, options, i =>
{
invite(post);
options.CancellationToken.ThrowIfCancellationRequested();
});
When you want to cancel parallel calculations, just call cts.Cancel() from external code.
You can use lock and create a class that manage your threads like that :
public class SyncClass
{
public Thread[] thr;
private int NumberOfWorkingThreads { get; set; }
private object Sync = new object();
public int ThreadNumber { get; private set; }
public event EventHandler TasksFinished;
public SyncClass(int threadNumber)
{
thr = new Thread[threadNumber];
ThreadNumber = threadNumber;
NumberOfWorkingThreads = ThreadNumber;
//LunchThreads(threadNumber);
}
protected void OnTasksFinished()
{
if (TasksFinished == null)
return;
lock (Sync)
{
NumberOfWorkingThreads--;
if (NumberOfWorkingThreads == 0)
TasksFinished(this, new EventArgs());
}
}
public void LunchThreads()
{
string post = create_note();
for (int i = 0; i < ThreadNumber; i++)
{
thr[i] = new Thread(() => invite(post));
thr[i].IsBackground = true;
thr[i].Start();
}
}
private void invite(string post)
{
while (true)
{
if (true)
{
break;
}
}
OnTasksFinished();
}
}
Use the event to notify the end of all threads then the class will be used like that:
private void Operation()
{
var sync = new SyncClass(10);
sync.TasksFinished += sync_TasksFinished;
sync.LunchThreads();
}
void sync_TasksFinished(object sender, EventArgs e)
{
Operation();
}
I've recently made my simple graphics library multi-threaded. It is now faster - And the simulation jitters a lot, as if various places had cached old position data and then applied it after it had gone "stale".
Basically, the boxes move, then jerk back, then move, then jerk back...There's no collision as of yet, so it's not that.
Not sure what code to post.
Thanks.
Edit: Whatever it is, also causes lag spikes.
Edit2:
TaskManager:
public class TaskManager
{
public delegate void MethodDel(float timestep);
private Queue<MethodDel> queue;
private List<TaskHandler> handlers;
private float value;
public float Value
{
get
{
return value;
}
set
{
this.value = value;
}
}
public TaskManager()
{
this.queue = new Queue<MethodDel>();
this.handlers = new List<TaskHandler>(System.Environment.ProcessorCount);
for (int t = 0; t < this.handlers.Capacity; ++t)
this.handlers.Add(new TaskHandler(this));
foreach (var handler in handlers)
handler.Start();
this.value = 0;
}
public void Start()
{
foreach (var handler in handlers)
handler.Wake();
}
public void Stop()
{
lock (queue)
queue.Clear();
foreach (var handler in handlers)
handler.StopWhenDone();
}
public void StopWhenDone()
{
foreach (var handler in handlers)
handler.StopWhenDone();
}
public void AddToQueue(MethodDel method)
{
lock (queue)
queue.Enqueue(method);
}
public bool GetFromQueue(out MethodDel method)
{
lock (queue)
{
if (queue.Count == 0) { method = null; return false; }
method = queue.Dequeue();
return true;
}
}
public int GetQueueCount()
{
return queue.Count;
}
public void Wait()
{
// Have to wait for them one at a time because the main thread is STA.
WaitHandle[] waitHandles = new WaitHandle[1];
// for (int t = 0; t < handlers.Count; ++t) waitHandles[t] = handlers[t].WaitHandle;
// WaitHandle.WaitAll(waitHandles);
for (int t = 0; t < handlers.Count; ++t) { waitHandles[0] = handlers[t].WaitHandle; WaitHandle.WaitAll(waitHandles); }
}
}
TaskHandler:
public class TaskHandler
{
private TaskManager manager;
private Thread thread;
private bool stopWhenDone;
private ManualResetEvent waitHandle;
public ManualResetEvent WaitHandle
{
get
{
return waitHandle;
}
}
public TaskHandler(TaskManager manager)
{
this.manager = manager;
}
public void Start()
{
waitHandle = new ManualResetEvent(false);
stopWhenDone = false;
thread = new Thread(Run);
thread.IsBackground = true;
thread.SetApartmentState(ApartmentState.MTA);
thread.Start();
}
public void StopWhenDone()
{
this.stopWhenDone = true;
}
private void Run()
{
TaskManager.MethodDel curMethod;
while (true)
{
while (!stopWhenDone || manager.GetQueueCount() > 0)
{
if (manager.GetFromQueue(out curMethod))
{
curMethod(manager.Value);
}
}
waitHandle.Set();
waitHandle.WaitOne();
}
}
public void Wake()
{
waitHandle.Set();
}
}
The main Update loop:
public virtual void Update(float timestep)
{
taskManager.Value = timestep; taskManager.Start();
foreach (Camera camera in cameraLookup.Values)
// camera.Update(timestep);
taskManager.AddToQueue(camera.Update);
taskManager.StopWhenDone();
taskManager.Wait();
/* foreach (IAffector affector in affectorLookup.Values)
affector.Update(timestep); */
foreach (IAffector affector in affectorLookup.Values)
taskManager.AddToQueue(affector.Update);
taskManager.StopWhenDone();
taskManager.Wait();
// taskManager.StopWhenDone();
// taskManager.Wait();
foreach (IConstraint constraint in constraintLookup.Values)
// constraint.Update(timestep);
taskManager.AddToQueue(constraint.Update);
taskManager.StopWhenDone();
taskManager.Wait();
foreach (Physic physic in physicLookup.Values)
// physic.Update(timestep);
taskManager.AddToQueue(physic.Update);
taskManager.StopWhenDone();
taskManager.Wait();
foreach (Body body in bodyLookup.Values)
// body.Update(timestep);
taskManager.AddToQueue(body.Update);
taskManager.StopWhenDone();
taskManager.Wait();
foreach (Model model in modelLookup.Values)
// model.Update(timestep);
taskManager.AddToQueue(model.Update);
taskManager.StopWhenDone();
taskManager.Wait();
}
How are you managing the data, can you test at the point it is read to tell if it is stale? Providing advice on a multi-threaded app is pretty difficult. You could try setting up some tracing and log the specific pieces where you think the problem might be. If you logged when data is changed and when it is read you might be able to figure out where it is going wrong.
Post some example code to show us how you manage the data and we can take it from there.
If the data is going "stale", then you need to fix your caching system to evict/update old data.
Threading really isn't that hard, the logic is simple. The problem with threading is identifying your data that is shared and not shared, tracking this data, and make sure this data is updated in the correct order. Most of this has to do with your program's structure. Structure is much much more important when you add threads into your program.
I'm attempting to make my simple C# graphics library multi-threaded. However, after the introduction of this code:
/* foreach (IAffector affector in affectorLookup.Values)
affector.Update(timestep); */
taskManager.Value = timestep; taskManager.Start();
foreach (IAffector affector in affectorLookup.Values)
taskManager.AddToQueue(affector.Update);
taskManager.StopWhenDone();
taskManager.Wait();
the simulation starts experiencing sharp lag-spikes, which seem to originate in TaskHandler.Run (I can't tell for sure, because adding the previous code makes my code profiler ignore anything outside TaskHandler.Run).
The task manager:
public class TaskManager
{
public delegate void MethodDel(float timestep);
private Queue<MethodDel> queue;
private List<TaskHandler> handlers;
private float value;
public float Value
{
get
{
return value;
}
set
{
this.value = value;
}
}
public TaskManager()
{
this.queue = new Queue<MethodDel>();
this.handlers = new List<TaskHandler>(System.Environment.ProcessorCount);
for (int t = 0; t < this.handlers.Capacity; ++t)
this.handlers.Add(new TaskHandler(this));
this.value = 0;
}
public void Start()
{
foreach (var handler in handlers)
handler.Start();
}
public void Stop()
{
lock (queue)
queue.Clear();
foreach (var handler in handlers)
handler.StopWhenDone();
}
public void StopWhenDone()
{
foreach (var handler in handlers)
handler.StopWhenDone();
}
public void AddToQueue(MethodDel method)
{
lock (queue)
queue.Enqueue(method);
}
public bool GetFromQueue(out MethodDel method)
{
lock (queue)
{
if (queue.Count == 0) { method = null; return false; }
method = queue.Dequeue();
return true;
}
}
public int GetQueueCount()
{
return queue.Count;
}
internal void Wait()
{
// Have to wait for them one at a time because the main thread is STA.
WaitHandle[] waitHandles = new WaitHandle[1];
// for (int t = 0; t < handlers.Count; ++t)
// waitHandles[t] = handlers[t].WaitHandle;
// WaitHandle.WaitAll(waitHandles);
for (int t = 0; t < handlers.Count; ++t)
{ waitHandles[0] = handlers[t].WaitHandle; WaitHandle.WaitAll(waitHandles); }
}
}
And the task handler:
public class TaskHandler
{
private TaskManager manager;
private Thread thread;
private bool stopWhenDone;
private ManualResetEvent waitHandle;
public ManualResetEvent WaitHandle
{
get
{
return waitHandle;
}
}
public TaskHandler(TaskManager manager)
{
this.manager = manager;
}
public void Start()
{
waitHandle = new ManualResetEvent(false);
stopWhenDone = false;
thread = new Thread(Run);
thread.IsBackground = true;
thread.SetApartmentState(ApartmentState.MTA);
thread.Start();
}
public void StopWhenDone()
{
this.stopWhenDone = true;
}
// Possible source of slowdown
private void Run()
{
TaskManager.MethodDel curMethod;
while (!stopWhenDone || manager.GetQueueCount() > 0)
{
if (manager.GetFromQueue(out curMethod))
{
curMethod(manager.Value);
}
}
waitHandle.Set();
}
}
Starting a thread is a heavy operation. Not sure if it's as heavy as you are experiencing, but that could be it. Also, having all your processing run parallel can be putting a big strain on your system with possibly little benefit...
I'm going to venture that the spikes have something to do with waitHandle.Set();
I like the overall design, but I have not used WaitHandle before, so I am unsure how this interacts with your design.