As it currently stands, this question is not a good fit for our Q&A format. We expect answers to be supported by facts, references, or expertise, but this question will likely solicit debate, arguments, polling, or extended discussion. If you feel that this question can be improved and possibly reopened, visit the help center for guidance.
Closed 10 years ago.
Looking for some sample code (C#) for a simple thread pool implementation.
I found one on codeproject, but the codebase was just huge and I don't need all that functionality.
This is more for educational purposes anyways.
This is the simplest, naive, thread-pool implementation for educational purposes I could come up with (C# / .NET 3.5). It is not using the .NET's thread pool implementation in any way.
using System;
using System.Collections.Generic;
using System.Threading;
namespace SimpleThreadPool
{
public sealed class Pool : IDisposable
{
public Pool(int size)
{
this._workers = new LinkedList<Thread>();
for (var i = 0; i < size; ++i)
{
var worker = new Thread(this.Worker) { Name = string.Concat("Worker ", i) };
worker.Start();
this._workers.AddLast(worker);
}
}
public void Dispose()
{
var waitForThreads = false;
lock (this._tasks)
{
if (!this._disposed)
{
GC.SuppressFinalize(this);
this._disallowAdd = true; // wait for all tasks to finish processing while not allowing any more new tasks
while (this._tasks.Count > 0)
{
Monitor.Wait(this._tasks);
}
this._disposed = true;
Monitor.PulseAll(this._tasks); // wake all workers (none of them will be active at this point; disposed flag will cause then to finish so that we can join them)
waitForThreads = true;
}
}
if (waitForThreads)
{
foreach (var worker in this._workers)
{
worker.Join();
}
}
}
public void QueueTask(Action task)
{
lock (this._tasks)
{
if (this._disallowAdd) { throw new InvalidOperationException("This Pool instance is in the process of being disposed, can't add anymore"); }
if (this._disposed) { throw new ObjectDisposedException("This Pool instance has already been disposed"); }
this._tasks.AddLast(task);
Monitor.PulseAll(this._tasks); // pulse because tasks count changed
}
}
private void Worker()
{
Action task = null;
while (true) // loop until threadpool is disposed
{
lock (this._tasks) // finding a task needs to be atomic
{
while (true) // wait for our turn in _workers queue and an available task
{
if (this._disposed)
{
return;
}
if (null != this._workers.First && object.ReferenceEquals(Thread.CurrentThread, this._workers.First.Value) && this._tasks.Count > 0) // we can only claim a task if its our turn (this worker thread is the first entry in _worker queue) and there is a task available
{
task = this._tasks.First.Value;
this._tasks.RemoveFirst();
this._workers.RemoveFirst();
Monitor.PulseAll(this._tasks); // pulse because current (First) worker changed (so that next available sleeping worker will pick up its task)
break; // we found a task to process, break out from the above 'while (true)' loop
}
Monitor.Wait(this._tasks); // go to sleep, either not our turn or no task to process
}
}
task(); // process the found task
lock(this._tasks)
{
this._workers.AddLast(Thread.CurrentThread);
}
task = null;
}
}
private readonly LinkedList<Thread> _workers; // queue of worker threads ready to process actions
private readonly LinkedList<Action> _tasks = new LinkedList<Action>(); // actions to be processed by worker threads
private bool _disallowAdd; // set to true when disposing queue but there are still tasks pending
private bool _disposed; // set to true when disposing queue and no more tasks are pending
}
public static class Program
{
static void Main()
{
using (var pool = new Pool(5))
{
var random = new Random();
Action<int> randomizer = (index =>
{
Console.WriteLine("{0}: Working on index {1}", Thread.CurrentThread.Name, index);
Thread.Sleep(random.Next(20, 400));
Console.WriteLine("{0}: Ending {1}", Thread.CurrentThread.Name, index);
});
for (var i = 0; i < 40; ++i)
{
var i1 = i;
pool.QueueTask(() => randomizer(i1));
}
}
}
}
}
There is no need to implement your own, since it is not very hard to use the existing .NET implementation.
From ThreadPool Documentation:
using System;
using System.Threading;
public class Fibonacci
{
public Fibonacci(int n, ManualResetEvent doneEvent)
{
_n = n;
_doneEvent = doneEvent;
}
// Wrapper method for use with thread pool.
public void ThreadPoolCallback(Object threadContext)
{
int threadIndex = (int)threadContext;
Console.WriteLine("thread {0} started...", threadIndex);
_fibOfN = Calculate(_n);
Console.WriteLine("thread {0} result calculated...", threadIndex);
_doneEvent.Set();
}
// Recursive method that calculates the Nth Fibonacci number.
public int Calculate(int n)
{
if (n <= 1)
{
return n;
}
return Calculate(n - 1) + Calculate(n - 2);
}
public int N { get { return _n; } }
private int _n;
public int FibOfN { get { return _fibOfN; } }
private int _fibOfN;
private ManualResetEvent _doneEvent;
}
public class ThreadPoolExample
{
static void Main()
{
const int FibonacciCalculations = 10;
// One event is used for each Fibonacci object
ManualResetEvent[] doneEvents = new ManualResetEvent[FibonacciCalculations];
Fibonacci[] fibArray = new Fibonacci[FibonacciCalculations];
Random r = new Random();
// Configure and launch threads using ThreadPool:
Console.WriteLine("launching {0} tasks...", FibonacciCalculations);
for (int i = 0; i < FibonacciCalculations; i++)
{
doneEvents[i] = new ManualResetEvent(false);
Fibonacci f = new Fibonacci(r.Next(20,40), doneEvents[i]);
fibArray[i] = f;
ThreadPool.QueueUserWorkItem(f.ThreadPoolCallback, i);
}
// Wait for all threads in pool to calculation...
WaitHandle.WaitAll(doneEvents);
Console.WriteLine("All calculations are complete.");
// Display the results...
for (int i= 0; i<FibonacciCalculations; i++)
{
Fibonacci f = fibArray[i];
Console.WriteLine("Fibonacci({0}) = {1}", f.N, f.FibOfN);
}
}
}
Related
I am new to threading concept, using threading first time in my application. One of my application processing multiple data, without threading it is taking approx 2 minutes while with help of threading it is taking just 25 seconds, but i want notification when all threads finishes work.
private int z = 0 ;
startfunction()
{
z= 250;
start1step(z);
}
private void start1step(int i)
{
if (i < 0)
return;
else
{
Thread thread = new Thread(new ThreadStart(WorkThreadFunction));
thread.Start();
start1step( --i);
}
}
public void WorkThreadFunction( )
{
try
{
int x = z ;
z-- ;
// do some background work
if(x ==0)
MessageBox.Show("All Thread finished");
}
catch
{
//
}
}
Above sample code is working perfectly except that notification part, I want notification when all threads finishes background work. There is one last step left which sums up work finished by all these threads.
Please help
There are lots of ways to do this. I would say the two most convenient methods that remain similar to your current implementation involve using a CountDownEvent or switching to the Task class for your threading. A third method involves using the Parallel.ForEach() method, and that might actually suit your specific scenario better.
CountDownEvent looks like this:
CountDownEvent countDown = new CountDownEvent(250);
startfunction()
{
countDown.Reset();
start1step(250);
countDown.Wait();
}
private void start1step(int i)
{
while (i-- > 0)
{
new Thread(WorkThreadFunction, i).Start();
}
}
public void WorkThreadFunction(object o)
{
int x = (int)o;
try
{
// do some background work
}
catch
{
//
}
finally
{
countDown.Signal();
}
}
Task looks like this:
startfunction()
{
Task[] tasks = new Task[250];
start1step(tasks);
Task.WaitAll(tasks);
}
private void start1step(Task[] tasks)
{
for (int i = 0; i < tasks.Length; i++)
{
int taskParam = i;
tasks[i] = Task.Run(() => WorkThreadFunction(taskParam));
}
}
public void WorkThreadFunction(int x)
{
try
{
// do some background work
}
catch
{
//
}
}
Parallel.ForEach() looks like this:
startfunction()
{
Parallel.ForEach(Enumerable.Range(0, 250), i => WorkThreadFunction(i));
}
public void WorkThreadFunction(int x)
{
try
{
// do some background work
}
catch
{
//
}
}
In your main thread you can do something like this...
List<Thread> threads = new List<Thread>();
// CREATE THREADS and add them to the list of threads
while (threads.Any(x => x.IsAlive))
{
Thread.Sleep(500);
}
Console.WriteLine("done");
Essentially, keep track of the threads and check their status every now and then in a way that doesn't block the main thread.
I am building a small application simulating a horse race in order to gain some basic skill in working with threads.
My code contains this loop:
for (int i = 0; i < numberOfHorses; i++)
{
horsesThreads[i] = new Thread(horsesTypes[i].Race);
horsesThreads[i].Start(100);
}
In order to keep the race 'fair', I've been looking for a way to make all newly created threads wait until the rest of the new threads are set, and only then launch all of them to start running their methods (Please note that I understand that technically the threads can't be launched at the 'same time')
So basically, I am looking for something like this:
for (int i = 0; i < numberOfHorses; i++)
{
horsesThreads[i] = new Thread(horsesTypes[i].Race);
}
Monitor.LaunchThreads(horsesThreads);
Threading does not promise fairness or deterministic results, so it's not a good way to simulate a race.
Having said that, there are some sync objects that might do what you ask. I think the Barrier class (Fx 4+) is what you want.
The Barrier class is designed to support this.
Here's an example:
using System;
using System.Threading;
namespace Demo
{
class Program
{
private void run()
{
int numberOfHorses = 12;
// Use a barrier with a participant count that is one more than the
// the number of threads. The extra one is for the main thread,
// which is used to signal the start of the race.
using (Barrier barrier = new Barrier(numberOfHorses + 1))
{
var horsesThreads = new Thread[numberOfHorses];
for (int i = 0; i < numberOfHorses; i++)
{
int horseNumber = i;
horsesThreads[i] = new Thread(() => runRace(horseNumber, barrier));
horsesThreads[i].Start();
}
Console.WriteLine("Press <RETURN> to start the race!");
Console.ReadLine();
// Signals the start of the race. None of the threads that called
// SignalAndWait() will return from the call until *all* the
// participants have signalled the barrier.
barrier.SignalAndWait();
Console.WriteLine("Race started!");
Console.ReadLine();
}
}
private static void runRace(int horseNumber, Barrier barrier)
{
Console.WriteLine("Horse " + horseNumber + " is waiting to start.");
barrier.SignalAndWait();
Console.WriteLine("Horse " + horseNumber + " has started.");
}
private static void Main()
{
new Program().run();
}
}
}
[EDIT] I just noticed that Henk already mentioned Barrier, but I'll leave this answer here because it has some sample code.
I'd be looking at a ManualResetEvent as a gate; inside the Thread, decrement a counter; if it is still non-zero, wait on the gate; otherwise, open the gate. Basically:
using System;
using System.Threading;
class Program
{
static void Main()
{
ManualResetEvent gate = new ManualResetEvent(false);
int numberOfThreads = 10, pending = numberOfThreads;
Thread[] threads = new Thread[numberOfThreads];
ParameterizedThreadStart work = name =>
{
Console.WriteLine("{0} approaches the tape", name);
if (Interlocked.Decrement(ref pending) == 0)
{
Console.WriteLine("And they're off!");
gate.Set();
}
else gate.WaitOne();
Race();
Console.WriteLine("{0} crosses the line", name);
};
for (int i = 0; i < numberOfThreads; i++)
{
threads[i] = new Thread(work);
threads[i].Start(i);
}
for (int i = 0; i < numberOfThreads; i++)
{
threads[i].Join();
}
Console.WriteLine("all done");
}
static readonly Random rand = new Random();
static void Race()
{
int time;
lock (rand)
{
time = rand.Next(500,1000);
}
Thread.Sleep(time);
}
}
This is further to my question here
By doing some reading .... I moved away from Semaphores to ThreadPool.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
namespace ThreadPoolTest
{
class Data
{
public int Pos { get; set; }
public int Num { get; set; }
}
class Program
{
static ManualResetEvent[] resetEvents = new ManualResetEvent[20];
static void Main(string[] args)
{
int s = 0;
for (int i = 0; i < 100000; i++)
{
resetEvents[s] = new ManualResetEvent(false);
Data d = new Data();
d.Pos = s;
d.Num = i;
ThreadPool.QueueUserWorkItem(new WaitCallback(Process), (object)d);
if (s >= 19)
{
WaitHandle.WaitAll(resetEvents);
Console.WriteLine("Press Enter to Move forward");
Console.ReadLine();
s = 0;
}
else
{
s = s + 1;
}
}
}
private static void Process(object o)
{
Data d = (Data) o;
Console.WriteLine(d.Num.ToString());
Thread.Sleep(10000);
resetEvents[d.Pos].Set();
}
}
}
This code works and I am able to process in the sets of 20. But I don't like this code because of WaitAll. So let's say I start a batch of 20, and 3 threads take longer time while 17 have finished. Even then I will keep the 17 threads as waiting because of the WaitAll.
WaitAny would have been good... but it seems rather messy that I will have to build so much of control structures like Stacks, Lists, Queues etc in order to use the pool efficiently.
The other thing I don't like is that whole global variable in the class for resetEvents. because this array has to be shared between the Process method and the main loop.
The above code works... but I need your help in improving it.
Again... I am on .NET 2.0 VS 2008. I cannot use .NET 4.0 parallel/async framework.
There are several ways you can do this. Probably the easiest, based on what you've posted above, would be:
const int MaxThreads = 4;
const int ItemsToProcess = 10000;
private Semaphore _sem = new Semaphore(MaxThreads, MaxThreads);
void DoTheWork()
{
int s = 0;
for (int i = 0; i < ItemsToProcess; ++i)
{
_sem.WaitOne();
Data d = new Data();
d.Pos = s;
d.Num = i;
ThreadPool.QueueUserWorkItem(Process, d);
++s;
if (s >= 19)
s = 0;
}
// All items have been assigned threads.
// Now, acquire the semaphore "MaxThreads" times.
// When counter reaches that number, we know all threads are done.
int semCount = 0;
while (semCount < MaxThreads)
{
_sem.WaitOne();
++semCount;
}
// All items are processed
// Clear the semaphore for next time.
_sem.Release(semCount);
}
void Process(object o)
{
// do the processing ...
// release the semaphore
_sem.Release();
}
I only used four threads in my example because that's how many cores I have. It makes little sense to be using 20 threads when only four of them can be processing at any one time. But you're free to increase the MaxThreads number if you like.
So I'm pretty sure this is all .NET 2.0.
We'll start out defining Action, because I'm so used to using it. If using this solution in 3.5+, remove that definition.
Next, we create a queue of actions based on the input.
After that we define a callback; this callback is the meat of the method.
It first grabs the next item in the queue (using a lock since the queue isn't thread safe). If it ended up having an item to grab it executes that item. Next it adds a new item to the thread pool which is "itself". This is a recursive anonymous method (you don't come across uses of that all that often). This means that when the callback is called for the first time it will execute one item, then schedule a task which will execute another item, and that item will schedule a task that executes another item, and so on. Eventually the queue will run out, and they'll stop queuing more items.
We also want the method to block until we're all done, so for that we keep track of how many of these callbacks have finished through incrementing a counter. When that counter reaches the task limit we signal the event.
Finally we start N of these callbacks in the thread pool.
public delegate void Action();
public static void Execute(IEnumerable<Action> actions, int maxConcurrentItems)
{
object key = new object();
Queue<Action> queue = new Queue<Action>(actions);
int count = 0;
AutoResetEvent whenDone = new AutoResetEvent(false);
WaitCallback callback = null;
callback = delegate
{
Action action = null;
lock (key)
{
if (queue.Count > 0)
action = queue.Dequeue();
}
if (action != null)
{
action();
ThreadPool.QueueUserWorkItem(callback);
}
else
{
if (Interlocked.Increment(ref count) == maxConcurrentItems)
whenDone.Set();
}
};
for (int i = 0; i < maxConcurrentItems; i++)
{
ThreadPool.QueueUserWorkItem(callback);
}
whenDone.WaitOne();
}
Here's another option that doesn't use the thread pool, and just uses a fixed number of threads:
public static void Execute(IEnumerable<Action> actions, int maxConcurrentItems)
{
Thread[] threads = new Thread[maxConcurrentItems];
object key = new object();
Queue<Action> queue = new Queue<Action>(actions);
for (int i = 0; i < maxConcurrentItems; i++)
{
threads[i] = new Thread(new ThreadStart(delegate
{
Action action = null;
do
{
lock (key)
{
if (queue.Count > 0)
action = queue.Dequeue();
else
action = null;
}
if (action != null)
{
action();
}
} while (action != null);
}));
threads[i].Start();
}
for (int i = 0; i < maxConcurrentItems; i++)
{
threads[i].Join();
}
}
I am trying to create some kind of framework that simplifies process of writing object interaction algorithm. (One object -- many clients(algorithms))
For example I want to implement algorithm that do some very simple job and waits some condition meet in a loop:
public void MakeVerySimpleJob() { }
public void AsyncLoop()
{ // real algorithm can be more complex, but job is always very simple
while (true)
{
MakeVerySimpleJob();
WakeUpCondition = "As fast as u can!";
JobComplete.Set();
WakeUp.WaitOne();
}
}
void main()
{
Thread MyThread = new Thread(AsyncLoop);
MyThread.Start();
var w = new System.Diagnostics.Stopwatch(); w.Start();
for (int i = 0; i < 100000; i++)
{
// waitin for thread
JobComplete.WaitOne();
// ok we did it
WakeUpCondition = null;
WakeUp.Set();
}
w.Stop();
}
AutoResetEvent JobComplete = new AutoResetEvent(false);
AutoResetEvent WakeUp = new AutoResetEvent(false);
Unfortunately it consumes about 500ms to do 100000 simple jobs.
Ok multithreading is not acceptable in my case, but I dont want to force users to write algorithms in this manner:
// invoke it again and again
public void PseudoAsyncLoop()
{
if (CurrentState == 1)
{
MakeVerySimpleJob();
CurrentState = 2;
return;
}
else is (CurrentState == some_sate)
{
}
}
int CurrentState = 0;
So i look at Enumerators. With Enumerators user can implement their own algorithm in traditional style:
public IEnumerable<bool> PseudoAsyncLoop()
{
while (true)
{
MakeVerySimpleJob();
WakeUpCondition = "As fast as u can!";
yield return true;
}
}
public string WakeUpCondition { get; private set; }
void main()
{
var MyLoop = PseudoAsyncLoop();
var LoopEnumerator = MyLoop.GetEnumerator();
var w = new System.Diagnostics.Stopwatch(); w.Start();
for(int i = 0; i < 100000; i ++)
{
LoopEnumerator.MoveNext();
// ok we did it
WakeUpCondition = null;
}
w.Stop();
}
Now it takes about 3ms, great. But i think its something wrong with that all...
My questions is:
Am I in right direction?
How does professional programmers solves that types of problems?
May be there is some ways to optimize multithreaded version?
I don't completely understand what are you doing or why, but if this is actually representative of your code, then you can speed it up by using one of the -Slim synchronization primitives. There is no AutoResetEventSlim, but you can use SemaphoreSlim instead:
private readonly SemaphoreSlim JobComplete = new SemaphoreSlim(0, 1);
private readonly SemaphoreSlim WakeUp = new SemaphoreSlim(0, 1);
private void AsyncLoop()
{
while (true)
{
MakeVerySimpleJob();
WakeUpCondition = "As fast as u can!";
JobComplete.Release();
WakeUp.Wait();
}
}
private void main()
{
Thread MyThread = new Thread(AsyncLoop);
MyThread.Start();
for (int i = 0; i < 100000; i++)
{
JobComplete.Wait();
WakeUpCondition = null;
WakeUp.Release();
}
}
This results in about 5.5× faster execution on my machine.
There is a string array myDownloadList containing 100 string URIs. I want to start 5 thread jobs that will pop next URI from myDownloadList (like a stack) and do something with it (download it), until there is no URIs left on a stack (myDownloadList).
What would be the best practice to do this?
Use the ThreadPool, and just setup all of your requests. The ThreadPool will automatically schedule them appropriately.
This will get easier with .NET 4, using the Task Parallel Library. Setting up each request as a Task is very efficient and easy.
Make sure each thread locks the myDownloadList when accessing it. You could use recursion to keep getting the latest one, then when the list is 0 it can just stop the function.
See the example below.
public static List<string> MyList { get; set; }
public static object LockObject { get; set; }
static void Main(string[] args)
{
Console.Clear();
Program.LockObject = new object();
// Create the list
Program.MyList = new List<string>();
// Add 100 items to it
for (int i = 0; i < 100; i++)
{
Program.MyList.Add(string.Format("Item Number = {0}", i));
}
// Start Threads
for (int i = 0; i < 5; i++)
{
Thread thread = new Thread(new ThreadStart(Program.PopItemFromStackAndPrint));
thread.Name = string.Format("Thread # {0}", i);
thread.Start();
}
}
public static void PopItemFromStackAndPrint()
{
if (Program.MyList.Count == 0)
{
return;
}
string item = string.Empty;
lock (Program.LockObject)
{
// Get first Item
item = Program.MyList[0];
Program.MyList.RemoveAt(0);
}
Console.WriteLine("{0}:{1}", System.Threading.Thread.CurrentThread.Name, item);
// Sleep to show other processing for examples only
System.Threading.Thread.Sleep(10);
Program.PopItemFromStackAndPrint();
}