I'm trying to implement a basic Future class (yeah, I know about Task but this is for educational purposes) and ran into strange behavior of Monitor class. The class is implemented so that it enters the lock in constructor, queues an action which exits the lock to a thread pool. Result getter checks an instance variable to see if the action is completed and if it isn't, enters lock and then returns the result. Problem is that in fact result getter doesn't wait for the queued action to finish and proceeds anyway leading to incorrect results. Here's the code.
// The class itself
public class Future<T>
{
private readonly Func<T> _f;
private volatile bool _complete = false;
private T _result;
private Exception _error = new Exception("WTF");
private volatile bool _success = false;
private readonly ConcurrentStack<Action<T>> _callbacks = new ConcurrentStack<Action<T>>();
private readonly ConcurrentStack<Action<Exception>> _errbacks = new ConcurrentStack<Action<Exception>>();
private readonly object _lock = new object();
public Future(Func<T> f)
{
_f = f;
Monitor.Enter(_lock);
ThreadPool.QueueUserWorkItem(Run);
}
public void OnSuccess(Action<T> a)
{
_callbacks.Push(a);
if (_complete && _success)
a(_result);
}
public void OnError(Action<Exception> a)
{
_errbacks.Push(a);
if (_complete && !_success)
a(_error);
}
private void Run(object state)
{
try {
_result = _f();
_success = true;
_complete = true;
foreach (var cb in _callbacks) {
cb(_result);
}
} catch (Exception e) {
_error = e;
_complete = true;
foreach (var cb in _errbacks) {
cb(e);
}
} finally {
Monitor.Exit(_lock);
}
}
public T Result {
get {
if (!_complete) {
Monitor.Enter(_lock);
}
if (_success) {
return _result;
} else {
Console.WriteLine("Throwing error complete={0} success={1}", _complete, _success);
throw _error;
}
}
}
// Failing test
public void TestResultSuccess() {
var f = new Future<int>(() => 1);
var x = f.Result;
Assert.AreEqual (1, x);
}
I'm using Mono 3.2.3 on Mac OS X 10.9.
Only the thread that took the lock can exit the lock. You can't Enter it in the constructor on the calling thread then Exit from the thread-pool when it completes - the thread-pool worker does not have the lock.
And conversely: presumably it is the same thread that created the future that is accessing the getter: that is allowed to Enter again: it is re-entrant. Also, you need to Exit the same number of times that you Enter, otherwise it isn't actually released.
Basically, I don't think Monitor is the right approach here.
Related
I have a class that constantly refreshes devices physically connected to PC via USB. The monitoring method runs on a thread checking a _monitoring flag, and Start and Stop methods just set and unset that flag.
My current problem is: when the thread is running, I get the expected "busy" and "not busy" console prints, but when I call Stop method, it keeps running while(_busy) forever, because somehow the _monitoringThread seems to stop running!
I suspect it stops running because the last print is always busy, that is, the ExecuteMonitoring runs midway and then nobody knows (at least I don't).
Pause debugging and looking at StackTrace didn't help either, because it keeps in the while(_busy) statement inside Stop() method, forever.
public class DeviceMonitor
{
bool _running;
bool _monitoring;
bool _busy = false;
MonitoringMode _monitoringMode;
Thread _monitoringThread;
readonly object _lockObj = new object();
// CONSTRUTOR
public DeviceMonitor()
{
_monitoringThread = new Thread(new ThreadStart(ExecuteMonitoring));
_monitoringThread.IsBackground = true;
_running = true;
_monitoringThread.Start();
}
public void Start()
{
_monitoring = true;
}
public void Stop()
{
_monitoring = false;
while (_busy)
{
Thread.Sleep(5);
}
}
void ExecuteMonitoring()
{
while (_running)
{
Console.WriteLine("ExecuteMonitoring()");
if (_monitoring)
{
lock (_lockObj)
{
_busy = true;
}
Console.WriteLine("busy");
if (_monitoringMode == MonitoringMode.SearchDevices)
{
SearchDevices();
}
else
if (_monitoringMode == MonitoringMode.MonitorDeviceConnection)
{
MonitorDeviceConnection();
}
lock (_lockObj)
{
_busy = false;
}
Console.WriteLine("not busy");
}
Thread.Sleep(1000);
_busy = false;
}
}
private void SearchDevices()
{
var connected = ListDevices();
if (connected.Count > 0)
{
Device = connected.First();
ToggleMonitoringMode();
}
else
Device = null;
}
void MonitorDeviceConnection()
{
if (Device == null)
{
ToggleMonitoringMode();
}
else
{
bool responding = Device.isConnected;
Console.WriteLine("responding " + responding);
if (!responding)
{
Device = null;
ToggleMonitoringMode();
}
}
}
void ToggleMonitoringMode()
{
if (_monitoringMode == MonitoringMode.SearchDevices)
_monitoringMode = MonitoringMode.MonitorDeviceConnection;
else
if (_monitoringMode == MonitoringMode.MonitorDeviceConnection)
_monitoringMode = MonitoringMode.SearchDevices;
}
enum MonitoringMode
{
SearchDevices,
MonitorDeviceConnection
}
}
The most likely explanation is: optimization: The compiler sees that _busy is never changed inside the Stop method and it is therefore allowed to convert this to an endless loop by replacing _busy with true. This is valid, because the _busy field is not marked as being volatile and as such the optimizer doesn't have to assume changes happening on another thread.
So, try marking _busy as volatile. Or, even better - actually A LOT BETTER - use a ManualResetEvent:
ManualResetEvent _stopMonitoring = new ManualResetEvent(false);
ManualResetEvent _monitoringStopped = new ManualResetEvent(false);
ManualResetEvent _stopRunning = new ManualResetEvent(false);
public void Stop()
{
_stopMonitoring.Set();
_monitoringStopped.Wait();
}
void ExecuteMonitoring()
{
while (!_stopRunning.Wait(0))
{
Console.WriteLine("ExecuteMonitoring()");
if(!_stopMonitoring.Wait(0))
{
_monitoringStopped.Unset();
// ...
}
_monitoringStopped.Set();
Thread.Sleep(1000);
}
}
Code is from memory, might contain some typos.
I need a synchronizing class that behaves exactly like the AutoResetEvent class, but with one minor exception:
A call to the Set() method must release all waiting threads, and not just one.
How can I construct such a class? I am simply out of ideas?
Martin.
So you have multiple threads doing a .WaitOne() and you want to release them?
Use the ManualResetEvent class and all the waiting threads should release...
Thank you very much for all your thougts and inputs which I have read with great interest. I did some more searching here on Stackoverflow, and suddenly I found this, whcih turned out to be just what I was looking for. By cutting it down to just the two methods I need, I ended up with this small class:
public sealed class Signaller
{
public void PulseAll()
{
lock (_lock)
{
Monitor.PulseAll(_lock);
}
}
public bool Wait(TimeSpan maxWaitTime)
{
lock (_lock)
{
return Monitor.Wait(_lock, maxWaitTime);
}
}
private readonly object _lock = new object();
}
and it does excactly what it should! I'm amazed that a solution could be that simple, and I love such simplicity. I'ts beautiful. Thank you, Matthew Watson!
Martin.
Two things you might try.
Using a Barrier object add conditionally adding threads too it and signaling them.
The other might be to use a publisher subscriber setup like in RX. Each thread waits on an object that it passes to a collection. When you want to call 'set' loop over a snapshot of it calling set on each member.
Or you could try bears.
If the event is being referenced by all threads in a common field or property, you could replace the common field or property with a new non-signaled event and then signal the old one. It has some cost to it since you'll be regularly creating new synchronization objects, but it would work. Here's an example of how I would do that:
public static class Example
{
private static volatile bool stopRunning;
private static ReleasingAutoResetEvent myEvent;
public static void RunExample()
{
using (Example.myEvent = new ReleasingAutoResetEvent())
{
WaitCallback work = new WaitCallback(WaitThread);
for (int i = 0; i < 5; ++i)
{
ThreadPool.QueueUserWorkItem(work, i.ToString());
}
Thread.Sleep(500);
for (int i = 0; i < 3; ++i)
{
Example.myEvent.Set();
Thread.Sleep(5000);
}
Example.stopRunning = true;
Example.myEvent.Set();
}
}
private static void WaitThread(object state)
{
while (!Example.stopRunning)
{
Example.myEvent.WaitOne();
Console.WriteLine("Thread {0} is released!", state);
}
}
}
public sealed class ReleasingAutoResetEvent : IDisposable
{
private volatile ManualResetEvent manualResetEvent = new ManualResetEvent(false);
public void Set()
{
ManualResetEvent eventToSet = this.manualResetEvent;
this.manualResetEvent = new ManualResetEvent(false);
eventToSet.Set();
eventToSet.Dispose();
}
public bool WaitOne()
{
return this.manualResetEvent.WaitOne();
}
public bool WaitOne(int millisecondsTimeout)
{
return this.manualResetEvent.WaitOne(millisecondsTimeout);
}
public bool WaitOne(TimeSpan timeout)
{
return this.manualResetEvent.WaitOne(timeout);
}
public void Dispose()
{
this.manualResetEvent.Dispose();
}
}
Another more lightweight solution you could try that uses the Monitor class to lock and unlock objects is below. However, I'm not as happy with the cleanup story for this version of ReleasingAutoResetEvent since Monitor may hold a reference to it and keep it alive indefinitely if it is not properly disposed.
There are a few limitations/gotchas with this implementation. First, the thread that creates this object will be the only one that will be able to signal it with a call to Set; other threads that attempt to do the same thing will receive a SynchronizationLockException. Second, the thread that created it will never be able to wait on it successfully since it already owns the lock. This will only be an effective solution if you have exactly one controlling thread and several other waiting threads.
public static class Example
{
private static volatile bool stopRunning;
private static ReleasingAutoResetEvent myEvent;
public static void RunExample()
{
using (Example.myEvent = new ReleasingAutoResetEvent())
{
WaitCallback work = new WaitCallback(WaitThread);
for (int i = 0; i < 5; ++i)
{
ThreadPool.QueueUserWorkItem(work, i.ToString());
}
Thread.Sleep(500);
for (int i = 0; i < 3; ++i)
{
Example.myEvent.Set();
Thread.Sleep(5000);
}
Example.stopRunning = true;
Example.myEvent.Set();
}
}
private static void WaitThread(object state)
{
while (!Example.stopRunning)
{
Example.myEvent.WaitOne();
Console.WriteLine("Thread {0} is released!", state);
}
}
}
public sealed class ReleasingAutoResetEvent : IDisposable
{
private volatile object lockObject = new object();
public ReleasingAutoResetEvent()
{
Monitor.Enter(this.lockObject);
}
public void Set()
{
object objectToSignal = this.lockObject;
object objectToLock = new object();
Monitor.Enter(objectToLock);
this.lockObject = objectToLock;
Monitor.Exit(objectToSignal);
}
public void WaitOne()
{
object objectToMonitor = this.lockObject;
Monitor.Enter(objectToMonitor);
Monitor.Exit(objectToMonitor);
}
public bool WaitOne(int millisecondsTimeout)
{
object objectToMonitor = this.lockObject;
bool succeeded = Monitor.TryEnter(objectToMonitor, millisecondsTimeout);
if (succeeded)
{
Monitor.Exit(objectToMonitor);
}
return succeeded;
}
public bool WaitOne(TimeSpan timeout)
{
object objectToMonitor = this.lockObject;
bool succeeded = Monitor.TryEnter(objectToMonitor, timeout);
if (succeeded)
{
Monitor.Exit(objectToMonitor);
}
return succeeded;
}
public void Dispose()
{
Monitor.Exit(this.lockObject);
}
}
I have a simple pattern to run code only once. It's mostly used to Update something on the UI, while it may change very often in the Background.
private bool _updating;
private void UpdateSomething()
{
if (!_updating)
{
_updating = true;
Application.Current.Dispatcher.BeginInvoke(new Action(() =>
{
_updating = false;
DoSomething();
}), DispatcherPriority.Background);
}
}
I would prefer to put the boilerplate code inside a simple method:
public static void RunOnce(Action action, ref bool guard)
{
if (!guard)
{
guard = true;
Application.Current.Dispatcher.BeginInvoke(new Action(() =>
{
guard = false;
action();
}), DispatcherPriority.Background);
}
}
und call it like this:
void UpdateSomething()
{
RunOnce(DoSomething, ref _updating);
}
However, this does not work as you cannot have ref parameters inside anonymous methods.
Is there any workaround, e.g. to pin the ref parameter and free it when the method was executed?
You could do something like this:
public static void RunOnce(Action action, ref RunOnceToken token)
{
if (token == null || token.IsCompleted)
{
token = new RunOnceToken(
Application.Current.Dispatcher.BeginInvoke(
action,
DispatcherPriority.Background));
}
}
public sealed class RunOnceToken : IDisposable
{
private DispatcherOperation _operation;
public RunOnceToken(DispatcherOperation operation)
{
if (operation != null &&
operation.Status != DispatcherOperationStatus.Completed &&
operation.Status != DispatcherOperationStatus.Aborted)
{
_operation = operation;
_operation.Completed += OnCompletedOrAborted;
_operation.Aborted += OnCompletedOrAborted;
}
}
private void OnCompletedOrAborted(object sender, EventArgs e)
{
this.Dispose();
}
public bool IsCompleted
{
get { return _operation == null; }
}
public void Dispose()
{
var operation = _operation;
if (operation == null)
return;
_operation = null;
operation.Completed -= OnCompletedOrAborted;
operation.Aborted -= OnCompletedOrAborted;
}
}
Your example usage would change to:
private RunOnceToken _updateToken;
private void UpdateSomething()
{
RunOnce(DoSomething, ref _updateToken);
}
It doesn't really matter if you never clear your copy of the token, because the wrapped DispatcherOperation gets cleared out upon completion to avoid leaking action or any values it captures.
In case it wasn't obvious, none of this is concurrency-safe; I assume everything above is only accessed from the UI thread.
One useful enhancement might be to add an optional DispatcherPriority argument to RunOnce such that you can control the priority level used to schedule action (perhaps canceling an already-scheduled operation if it was scheduled at a lower priority).
I was no aware about DispatcherOperation existence, however seen Mike Strobel answer I wrote following code. I'm not 100% sure about it but it seems to work without to much boilerplate.
public static class DispatcherExtensions {
public static int clearInterval = 10_000;
private static long time => DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
private static long lastClearTime = time;
private static Dictionary<int, DispatcherOperation> currOperations = new Dictionary<int, DispatcherOperation>();
private static object sync = new object();
public static void invokeLastAsync(this Dispatcher d, Action a, DispatcherPriority p = DispatcherPriority.Background, [CallerFilePath]object key1 = null, [CallerLineNumber]object key2 = null) {
lock (sync) {
DispatcherOperation dop;
var k = key1.GetHashCode() ^ key2.GetHashCode();
if (currOperations.ContainsKey(k)) {
dop = currOperations[k];
currOperations.Remove(k);
dop.Abort();
}
dop = d.BeginInvoke(a, p);
clearOperations(false);
currOperations.Add(k, dop);
}
}
public static void clearOperations(bool force = true) {
var ct = time;
if (!force && ct - lastClearTime < clearInterval) return;
var nd = new Dictionary<int, DispatcherOperation>();
foreach (var ao in currOperations) {
var s = ao.Value.Status;
if (s == DispatcherOperationStatus.Completed
|| s == DispatcherOperationStatus.Aborted)
nd.Add(ao.Key, ao.Value);
}
currOperations = nd;
lastClearTime = ct;
}
}
Basically extension method take file path and line number as a key to store DispacherOperation instance in a dictionary, and If the key already have an operation, its aborted and replaced with new operation. Periodically, the dictionary is cleared, from completed/aborted actions that are no longer invoked.
The usage is very simple:
private int initCount = 0;
private int invokeCount = 0;
private void updateSomething() {
initCount++;
view.Dispatcher.invokeLastAsync(() => {
Console.WriteLine($#"invoked {++invokeCount}/{initCount}");
});
}
I haven't run to any issue with this so far. Maybe someone else could see some weak spot.
All the examples I have seen using SynchronisationContext.Post have been used in the same class. What I have is the UI thread passing some by-ref arguments to a threadwrapper class, updating the arguments and then I want it to update some labels etc on the UIThread.
internal class ConnThreadWrapper
{
....
public event EventHandler<MyEventArgs<String, Boolean>> updateConnStatus =
delegate { };
public void updateUIThread(string conn, bool connected)
{
uiContext.Post(new SendOrPostCallback((o) =>
{
updateConnStatus(this,
new MyEventArgs<String, Boolean>(conn,
connected));
}),
null);
}
}
//on ui thread
public void updateConnStatus(object sender, MyEventArgs<String, Boolean> e)
{
switch (e.val1)
{
case "CADS" :
if (e.val2 == true)
{
}
The Event seems to fire without any errors but nothing is ever received on the uiThread - i'm not sure if my signature for the sub updateConnStatus is correct or if it works like this. I obviously want the event to handled on the uithread and update the labels from that sub.
In a previous vb.net project I used to reference the form directly on the thread and used a delegate to invoke a callback but apparently this was a bad design as I was mixing application layers. I wanted to use the sync context as it was meant to be thread safe but most of the examples i've seen have used invoke.
Any ideas what I'm missing? Thanks
I wrote this helper class which works for me. Prior to using this class call InitializeUiContext() on UI thread somewhere on application start.
public static class UiScheduler
{
private static TaskScheduler _scheduler;
private static readonly ConcurrentQueue<Action> OldActions =
new ConcurrentQueue<Action>();
public static void InitializeUiContext()
{
_scheduler = TaskScheduler.FromCurrentSynchronizationContext();
}
private static void ExecuteOld()
{
if(_scheduler != null)
{
while(OldActions.Count > 0)
{
Action a;
if(OldActions.TryDequeue(out a))
{
UiExecute(_scheduler, a);
}
}
}
}
private static void UiExecute(TaskScheduler scheduler,
Action a,
bool wait = false)
{
//1 is usually UI thread, dunno how to check this better:
if (Thread.CurrentThread.ManagedThreadId == 1)
{
a();
}
else
{
Task t = Task.Factory.StartNew(a,
CancellationToken.None,
TaskCreationOptions.LongRunning,
scheduler);
if (wait) t.Wait();
}
}
public static void UiExecute(Action a, bool wait = false)
{
if (a != null)
{
if (_scheduler != null)
{
ExecuteOld();
UiExecute(_scheduler, a, wait);
}
else
{
OldActions.Enqueue(a);
}
}
}
}
In the end I ditched the ThreadWrapper and trying to marshal the event to the UI Thread and used a Task instead, in fact I think I can use task to do most of the stuff in this project so happy days.
Task<bool> t1 = new Task<bool>(() => testBB(ref _bbws_wrapper));
t1.Start();
Task cwt1 = t1.ContinueWith(task => { if (t1.Result == true) { this.ssi_bb_conn.BackColor = Color.Green;} else { this.ssi_bb_conn.BackColor = Color.Red; } }, TaskScheduler.FromCurrentSynchronizationContext());
.....
private static bool testBB(ref BBWebserviceWrapper _bbwsw)
{
try
{
//test the connections
if (_bbwsw.initialize_v1() == true)
{
if (_bbwsw.loginUser("XXXXXXXX", "XXXXXXXXX") == true)
{
return true;
}
else
{
return false;
}
}
else
{
return false;
}
}
catch
{
return false;
}
}
I have two threads, one thread processes a queue and the other thread adds stuff into the queue.
I want to put the queue processing thread to sleep when its finished processing the queue
I want to have the 2nd thread tell it to wake up when it has added an item to the queue
However these functions call System.Threading.SynchronizationLockException: Object synchronization method was called from an unsynchronized block of code on the Monitor.PulseAll(waiting); call, because I havent synchronized the function with the waiting object. [which I dont want to do, i want to be able to process while adding items to the queue]. How can I achieve this?
Queue<object> items = new Queue<object>();
object waiting = new object();
1st Thread
public void ProcessQueue()
{
while (true)
{
if (items.Count == 0)
Monitor.Wait(waiting);
object real = null;
lock(items) {
object item = items.Dequeue();
real = item;
}
if(real == null)
continue;
.. bla bla bla
}
}
2nd Thread involves
public void AddItem(object o)
{
... bla bla bla
lock(items)
{
items.Enqueue(o);
}
Monitor.PulseAll(waiting);
}
The answer is in the error message you posted:
"Object synchronization method was called from an unsynchronized block of code on the Monitor.PulseAll(waiting);"
You have to call Monitor.PulseAll(waiting) from inside the lock(waiting) block.
Also... you have to call Monitor.Wait from within a lock block as well.
If you have access to .NET 4.0, what you want to do can be achieved by BlockingCollection<T>.
If you want to do it yourself by means of the Monitor class and signaling with Pulse(), you are actually on the right track.
You get the exception because to call Wait(), Pulse() and PulseAll(), you have to own the lock on the specified object. You happen to miss this on waiting.
A sample basic thread-safe queue that can be used:
with foreach on the consumer,
with while or your favorite conditional construct on the producer side,
handles multiple producers/consumers and
uses lock(), Monitor.Pulse(), Monitor.PulseAll() and Monitor.Wait():
.
public class SignaledQueue<T>
{
Queue<T> queue = new Queue<T>();
volatile bool shutDown = false;
public bool Enqueue(T item)
{
if (!shutDown)
{
lock (queue)
{
queue.Enqueue(item);
//Pulse only if there can be waiters.
if (queue.Count == 1)
{
Monitor.PulseAll(queue);
}
}
return true;
}
//Indicate that processing should stop.
return false;
}
public IEnumerable<T> DequeueAll()
{
while (!shutDown)
{
do
{
T item;
lock (queue)
{
//If the queue is empty, wait.
if (queue.Count == 0)
{
if (shutDown) break;
Monitor.Wait(queue);
if (queue.Count == 0) break;
}
item = queue.Dequeue();
}
yield return item;
} while (!shutDown);
}
}
public void SignalShutDown()
{
shutDown = true;
lock (queue)
{
//Signal all waiting consumers with PulseAll().
Monitor.PulseAll(queue);
}
}
}
Sample usage:
class Program
{
static void Main(string[] args)
{
int numProducers = 4, numConsumers = 2;
SignaledQueue<int> queue = new SignaledQueue<int>();
ParameterizedThreadStart produce = delegate(object obj)
{
Random rng = new Random((int)obj);
int num = 0;
while (queue.Enqueue(++num))
{
Thread.Sleep(rng.Next(100));
}
};
ThreadStart consume = delegate
{
foreach (int num in queue.DequeueAll())
{
Console.Write(" {0}", num);
}
};
Random seedRng = new Random();
for (int i = 0; i < numProducers; i++)
{
new Thread(produce).Start(seedRng.Next());
}
for (int i = 0; i < numConsumers; i++)
{
new Thread(consume).Start();
}
Console.ReadKey(true);
queue.SignalShutDown();
}
}
Use Semaphore http://msdn.microsoft.com/library/system.threading.semaphore.aspx it was designed exactly for this
I prefer to use a callback that launches a processing thread that continues until it's caught up, with locks causing simultaneous readers and writers to wait in line:
public delegate void CallbackDelegate();
class Program
{
static void Main(string[] args)
{
Queue<object> items = new Queue<object>();
Processor processor = new Processor(items);
Adder adder = new Adder(items, new CallbackDelegate(processor.CallBack));
Thread addThread = new Thread(new ParameterizedThreadStart(adder.AddItem));
object objectToAdd = new object();
addThread.Start(objectToAdd);
}
}
class Processor
{
Queue<object> items;
public Processor(Queue<object> itemsArg)
{
items = itemsArg;
}
public void ProcessQueue()
{
lock (items)
{
while (items.Count > 0)
{
object real = items.Dequeue();
// process real
}
}
}
public void CallBack()
{
Thread processThread = new Thread(ProcessQueue);
processThread.IsBackground = true;
processThread.Start();
}
}
class Adder
{
Queue<object> items;
CallbackDelegate callback;
public Adder(Queue<object> itemsArg, CallbackDelegate callbackArg)
{
items = itemsArg;
callback = callbackArg;
}
public void AddItem(object o)
{
lock (items) { items.Enqueue(o); }
callback();
}
}