multithreaded fence lines from a text file, then sending a request form post \ get to that data after receiving a positive response in writing to another text file.
public int index = -1;
public int count = 1000;
private static readonly object SyncIndex = new object();
private static readonly object SyncFiles = new object();
public void CreateThreads(int threads)
{
Thread[] threadArray;
for (int i = 0; i < (threadArray = new Thread[threads]).Length; i++)
{
threadArray[i] = new Thread(this.Run) { IsBackground = true };
threadArray[i].Start();
}
}
public void Run()
{
while (true)
{
lock(SyncIndex) { index++;}
//if (index > count) { break; }
string resp = Check(index.ToString());
lock (SyncFiles)
{
if (resp == "true")
{
SaveText("good.txt", index.ToString());
}
else
{
SaveText("bad.txt", index.ToString());
}
}
}
}
public string Check(string login)
{
try
{
System.Net.WebRequest reqGET = System.Net.WebRequest.Create(#"http://www.minecraft.net/haspaid.jsp?user=" + login);
System.Net.WebResponse resp = reqGET.GetResponse();
System.IO.Stream stream = resp.GetResponseStream();
System.IO.StreamReader sr = new System.IO.StreamReader(stream);
string s = sr.ReadToEnd();
if (s.Contains("true"))
{
return "true";
}
else
{
return "false";
}
}
catch
{
Check(login);
}
return "";
}
//static ReaderWriterLockSlim cacheLock = new ReaderWriterLockSlim();
private static void SaveText(string file, string text)
{
//cacheLock.EnterWriteLock();
try
{
var write = new StreamWriter(file, true);
write.WriteLine(text);
write.Close();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message);
}
//finally
//{
// cacheLock.ExitWriteLock();
//}
}
private void btStart_Click(object sender, EventArgs e)
{
CreateThreads(300);
}
what is problem?
count = 1000
threads = 300
bad.txt
299
300
301
302
303
304
305
306
307
308
310
311
312
313
314
315
316
good.txt
309
Why text then start at 300?
where did I go wrong??????
First off, I'd recommend not launching one thread per work item. Use Parallel.For instead of making the threads yourself.
Not only will this prevent the bug (you can use the index directly), but it will also load balance more effectively.
That being said, your problem is that you're using the same variable in every thread. You need to make a temporary:
int localIndex;
lock(SyncIndex)
{
index++;
localIndex = index; // Copy this here, before another thread can change it
}
// Use localIndex, not index, from here on...
In your current code, you synchronize the increment of index, but other threads will still "change" its value before you can use it.
Related
I'm working on JSON text editor analog with highlighting syntax support.
I'm using DevExpress RichEditControl, where override ISyntaxHighlightService(using class, which implements this).
After getting pretty JSON text at the view highlighter starts working permanently, searching for different tokens, classifying them to highlight with concrete color. So, searching mechanism works too long if there are more than 300 lines of text, and it causes freezing UI while typing text.
I think I should run that things in other thread, but I get errors, related to "control is in another thread"...
The last thing I've tried, was invoking that method 1 or 2 per second, using DispatherTimer, to make the problem less enoying, but it still doesn't work as easy as in popular text editors because of delay.
So, what could you offer? May be there are alternative methods for coloring syntax? Or I can use side text editor control with such functions, instead of RichEditControl? May be it's easy to be reached, using converters and XAML?
ISyntaxHighLightService implemetation:
public class CustomSyntaxHighlightService : ISyntaxHighlightService
{
Document document;
Regex _numberString = new Regex(#"(\d+\W\d+)|[\d]+| (.*?)");
Regex _quotedString = new Regex("\"([^\"]*)\"[:]");
public CustomSyntaxHighlightService(Document document)
{
this.document = document;
DispatcherTimer dispatcherTimer = new DispatcherTimer();
dispatcherTimer.Tick += new EventHandler(dispatherExecute);
dispatcherTimer.Interval = new TimeSpan(0, 0, 1);
dispatcherTimer.Start();
}
private void dispatherExecute(object sender, EventArgs e)
{
List<SyntaxHighlightToken> JSONTokens = ParseTokens();
document.ApplySyntaxHighlight(JSONTokens);
}
public void ForceExecute()
{
}
public void Execute()
{
}
private List<SyntaxHighlightToken> ParseTokens()
{
List<SyntaxHighlightToken> tokens = new List<SyntaxHighlightToken>();
DocumentRange[] ranges = null;
ranges = document.FindAll(_quotedString).GetAsFrozen() as DocumentRange[];
for (int i = 0; i < ranges.Length; i++)
{
tokens.Add(CreateToken(ranges[i].Start.ToInt(), ranges[i].End.ToInt(), Color.FromArgb(207, 101, 239)));
}
// When I try to add such code block with another token string I get an error...
//ranges = document.FindAll(_anotherString).GetAsFrozen() as DocumentRange[];
//for (int i = 0; i < ranges.Length; i++)
//{
// tokens.Add(CreateToken(ranges[i].Start.ToInt(), ranges[i].End.ToInt(), Color.FromArgb(207, 101, 239)));
//}
ranges = document.FindAll(_numberString).GetAsFrozen() as DocumentRange[];
for (int j = 0; j < ranges.Length; j++)
{
if (!IsRangeInTokens(ranges[j], tokens))
{
tokens.Add(CreateToken(ranges[j].Start.ToInt(), ranges[j].End.ToInt(), Color.DeepSkyBlue));
}
}
tokens.Sort(new SyntaxHighlightTokenComparer());
tokens = CombineWithPlainTextTokens(tokens);
return tokens;
}
List<SyntaxHighlightToken> CombineWithPlainTextTokens(List<SyntaxHighlightToken> tokens)
{
List<SyntaxHighlightToken> result = new List<SyntaxHighlightToken>(tokens.Count * 2 + 1);
int documentStart = document.Range.Start.ToInt();
int documentEnd = document.Range.End.ToInt();
if (tokens.Count == 0)
{
result.Add(CreateToken(documentStart, documentEnd, Color.Black));
}
else
{
SyntaxHighlightToken firstToken = tokens[0];
if (documentStart < firstToken.Start)
{
result.Add(CreateToken(documentStart, firstToken.Start, Color.Black));
}
result.Add(firstToken);
for (int i = 1; i < tokens.Count; i++)
{
SyntaxHighlightToken token = tokens[i];
SyntaxHighlightToken prevToken = tokens[i - 1];
if (prevToken.End != token.Start)
{
result.Add(CreateToken(prevToken.End, token.Start, Color.Black));
}
result.Add(token);
}
SyntaxHighlightToken lastToken = tokens[tokens.Count - 1];
if (documentEnd > lastToken.End)
{
result.Add(CreateToken(lastToken.End, documentEnd, Color.Black));
}
}
return result;
}
private bool IsRangeInTokens(DocumentRange range, List<SyntaxHighlightToken> tokens)
{
return tokens.Any(t => IsIntersect(range, t));
}
bool IsIntersect(DocumentRange range, SyntaxHighlightToken token)
{
int start = range.Start.ToInt();
if (start >= token.Start && start < token.End)
{
return true;
}
int end = range.End.ToInt() - 1;
if (end >= token.Start && end < token.End)
{
return true;
}
if (start < token.Start && end >= token.End)
{
return true;
}
return false;
}
SyntaxHighlightToken CreateToken(int start, int end, Color foreColor)
{
SyntaxHighlightProperties properties = new SyntaxHighlightProperties();
properties.ForeColor = foreColor;
return new SyntaxHighlightToken(start, end - start, properties);
}
public class SyntaxHighlightTokenComparer : IComparer<SyntaxHighlightToken>
{
public int Compare(SyntaxHighlightToken x, SyntaxHighlightToken y)
{
return x.Start - y.Start;
}
}
EDIT:
I've understood the idea about cancellation token, but I don't know how to realize it right. Now I have got static class for token:
public static class CTTest
{
public static CancellationTokenSource CancellationTokenSource = new CancellationTokenSource();
public static CancellationToken Token = CancellationTokenSource.Token;
public static void SetNewTokenValues()
{
CancellationTokenSource = new CancellationTokenSource();
Token = CancellationTokenSource.Token;
}
}
And I use it here:
void ParseAndHighLight(CancellationToken token)
{
if (!token.IsCancellationRequested)
{
List<SyntaxHighlightToken> JSONTokens = ParseTokens();
Dispatcher.CurrentDispatcher.BeginInvoke(new ThreadStart(delegate { document.ApplySyntaxHighlight(JSONTokens); }));
}
else
{
CTTest.SetNewTokenValues();
ForceExecute();
}
}
public void ForceExecute()
{
Execute();
}
public void Execute()
{
t = new Task(() => ParseAndHighLight(CTTest.Token));
t.Start();
}
Cancelling when text changes:
private void xParsedRichEditControl_TextChanged(object sender, EventArgs e)
{
CTTest.CancellationTokenSource.Cancel();
}
I have written a simple "latency simulator" which works, but at times, messages are delayed for longer than the time specified. I need help to ensure that messages are delayed for the correct amount of time.
The main problem, I believe, is that I am using Thread.Sleep(x), which is depended on various factors but mainly on the clock interrupt rate, which causes Thread.Sleep() to have a resolution of roughly 15ms. Further, intensive tasks will demand more CPU time and will occasionally result in a delay greater than the one requested. If you are not familiar with the resolution issues of Thread.Sleep, you can read these SO posts: here, here and here.
This is my LatencySimulator:
public class LatencySimulatorResult: EventArgs
{
public int messageNumber { get; set; }
public byte[] message { get; set; }
}
public class LatencySimulator
{
private int messageNumber;
private int latency = 0;
private int processedMessageCount = 0;
public event EventHandler messageReady;
public void Delay(byte[] message, int delay)
{
latency = delay;
var result = new LatencySimulatorResult();
result.message = message;
result.messageNumber = messageNumber;
if (latency == 0)
{
if (messageReady != null)
messageReady(this, result);
}
else
{
ThreadPool.QueueUserWorkItem(ThreadPoolCallback, result);
}
Interlocked.Increment(ref messageNumber);
}
private void ThreadPoolCallback(object threadContext)
{
Thread.Sleep(latency);
var next = (LatencySimulatorResult)threadContext;
var ready = next.messageNumber == processedMessageCount + 1;
while (ready == false)
{
ready = next.messageNumber == processedMessageCount + 1;
}
if (messageReady != null)
messageReady(this, next);
Interlocked.Increment(ref processedMessageCount);
}
}
To use it, you create a new instance and bind to the event handler:
var latencySimulator = new LatencySimulator();
latencySimulator.messageReady += MessageReady;
You then call latencySimulator.Delay(someBytes, someDelay);
When a message has finished being delayed, the event is fired and you can then process the delayed message.
It is important that the order in which messages are added is maintained. I cannot have them coming out the other end of the latency simulator in some random order.
Here is a test program to use the latency simulator and to see how long messages have been delayed for:
private static LatencySimulator latencySimulator;
private static ConcurrentDictionary<int, PendingMessage> pendingMessages;
private static List<long> measurements;
static void Main(string[] args)
{
var results = TestLatencySimulator();
var anomalies = results.Result.Where(x=>x > 32).ToList();
foreach (var result in anomalies)
{
Console.WriteLine(result);
}
Console.ReadLine();
}
static async Task<List<long>> TestLatencySimulator()
{
latencySimulator = new LatencySimulator();
latencySimulator.messageReady += MessageReady;
var numberOfMeasurementsMax = 1000;
pendingMessages = new ConcurrentDictionary<int, PendingMessage>();
measurements = new List<long>();
var sendTask = Task.Factory.StartNew(() =>
{
for (var i = 0; i < numberOfMeasurementsMax; i++)
{
var message = new Message { Id = i };
pendingMessages.TryAdd(i, new PendingMessage() { Id = i });
latencySimulator.Delay(Serialize(message), 30);
Thread.Sleep(50);
}
});
//Spin some tasks up to simulate high CPU usage
Task.Factory.StartNew(() => { FindPrimeNumber(100000); });
Task.Factory.StartNew(() => { FindPrimeNumber(100000); });
Task.Factory.StartNew(() => { FindPrimeNumber(100000); });
sendTask.Wait();
return measurements;
}
static long FindPrimeNumber(int n)
{
int count = 0;
long a = 2;
while (count < n)
{
long b = 2;
int prime = 1;// to check if found a prime
while (b * b <= a)
{
if (a % b == 0)
{
prime = 0;
break;
}
b++;
}
if (prime > 0)
{
count++;
}
a++;
}
return (--a);
}
private static void MessageReady(object sender, EventArgs e)
{
LatencySimulatorResult result = (LatencySimulatorResult)e;
var message = (Message)Deserialize(result.message);
if (pendingMessages.ContainsKey(message.Id) != true) return;
pendingMessages[message.Id].stopwatch.Stop();
measurements.Add(pendingMessages[message.Id].stopwatch.ElapsedMilliseconds);
}
static object Deserialize(byte[] arrBytes)
{
using (var memStream = new MemoryStream())
{
var binForm = new BinaryFormatter();
memStream.Write(arrBytes, 0, arrBytes.Length);
memStream.Seek(0, SeekOrigin.Begin);
var obj = binForm.Deserialize(memStream);
return obj;
}
}
static byte[] Serialize<T>(T obj)
{
BinaryFormatter bf = new BinaryFormatter();
using (var ms = new MemoryStream())
{
bf.Serialize(ms, obj);
return ms.ToArray();
}
}
If you run this code, you will see that about 5% of the messages are delayed for more than the expected 30ms. In fact, some are as high as 60ms. Without any background tasks or high CPU usage, the simulator behaves as expected.
I need them all to be 30ms (or as close to as possible) - I do not want some arbitrary 50-60ms delays.
Can anyone suggest how I can refactor this code so that I can achieve the desired result, but without the use of Thread.Sleep() and with as little CPU overhead as possible?
I get some problems with c# windows form.
My goal is to slice a big file(maybe>5GB) into files,and each file contains a million lines.
According to the code below,I have no idea why it will be out of memory.
Thanks.
StreamReader readfile = new StreamReader(...);
StreamWriter writefile = new StreamWriter(...);
string content;
while ((content = readfile.ReadLine()) != null)
{
writefile.Write(content + "\r\n");
i++;
if (i % 1000000 == 0)
{
index++;
writefile.Close();
writefile.Dispose();
writefile = new StreamWriter(...);
}
label5.Text = i.ToString();
label5.Update();
}
The error is probably in the
label5.Text = i.ToString();
label5.Update();
just to make a test I've written something like:
for (int i = 0; i < int.MaxValue; i++)
{
label1.Text = i.ToString();
label1.Update();
}
The app freezes around 16000-18000 (Windows 7 Pro SP1 x64, the app running both x86 and x64).
What probably happens is that by running your long operation in the main thread of the app, you stall the message queue of the window, and at a certain point it freezes. You can see that this is the problem by adding a
Application.DoEvents();
instead of the
label5.Update();
But even this is a false solution. The correct solution is moving the copying on another thread and updating the control every x milliseconds, using the Invoke method (because you are on a secondary thread),
For example:
public void Copy(string source, string dest)
{
const int updateMilliseconds = 100;
int index = 0;
int i = 0;
StreamWriter writefile = null;
try
{
using (StreamReader readfile = new StreamReader(source))
{
writefile = new StreamWriter(dest + index);
// Initial value "back in time". Forces initial update
int milliseconds = unchecked(Environment.TickCount - updateMilliseconds);
string content;
while ((content = readfile.ReadLine()) != null)
{
writefile.Write(content);
writefile.Write("\r\n"); // Splitted to remove a string concatenation
i++;
if (i % 1000000 == 0)
{
index++;
writefile.Dispose();
writefile = new StreamWriter(dest + index);
// Force update
milliseconds = unchecked(milliseconds - updateMilliseconds);
}
int milliseconds2 = Environment.TickCount;
int diff = unchecked(milliseconds2 - milliseconds);
if (diff >= updateMilliseconds)
{
milliseconds = milliseconds2;
Invoke((Action)(() => label5.Text = string.Format("File {0}, line {1}", index, i)));
}
}
}
}
finally
{
if (writefile != null)
{
writefile.Dispose();
}
}
// Last update
Invoke((Action)(() => label5.Text = string.Format("File {0}, line {1} Finished", index, i)));
}
and call it with:
var thread = new Thread(() => Copy(#"C:\Temp\lst.txt", #"C:\Temp\output"));
thread.Start();
Note how it will write the label5 every 100 milliseconds, plus once at the beginning (by setting the initial value of milliseconds "back in time"), each time the output file is changed (by setting the value of milliseconds "back in time") and after having disposed everything.
An even more correct example can be written by using the BackgroundWorker class, that exists explicitly for this scenario. It has an event, ProgressChanged, that can be subscribed to update the window.
Something like this:
private void button1_Click(object sender, EventArgs e)
{
BackgroundWorker backgroundWorker = new BackgroundWorker();
backgroundWorker.WorkerReportsProgress = true;
backgroundWorker.ProgressChanged += backgroundWorker_ProgressChanged;
backgroundWorker.RunWorkerCompleted += backgroundWorker_RunWorkerCompleted;
backgroundWorker.DoWork += backgroundWorker_DoWork;
backgroundWorker.RunWorkerAsync(new string[] { #"C:\Temp\lst.txt", #"C:\Temp\output" });
}
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e)
{
BackgroundWorker worker = sender as BackgroundWorker;
string[] arguments = (string[])e.Argument;
string source = arguments[0];
string dest = arguments[1];
const int updateMilliseconds = 100;
int index = 0;
int i = 0;
StreamWriter writefile = null;
try
{
using (StreamReader readfile = new StreamReader(source))
{
writefile = new StreamWriter(dest + index);
// Initial value "back in time". Forces initial update
int milliseconds = unchecked(Environment.TickCount - updateMilliseconds);
string content;
while ((content = readfile.ReadLine()) != null)
{
writefile.Write(content);
writefile.Write("\r\n"); // Splitted to remove a string concatenation
i++;
if (i % 1000000 == 0)
{
index++;
writefile.Dispose();
writefile = new StreamWriter(dest + index);
// Force update
milliseconds = unchecked(milliseconds - updateMilliseconds);
}
int milliseconds2 = Environment.TickCount;
int diff = unchecked(milliseconds2 - milliseconds);
if (diff >= updateMilliseconds)
{
milliseconds = milliseconds2;
worker.ReportProgress(0, new int[] { index, i });
}
}
}
}
finally
{
if (writefile != null)
{
writefile.Dispose();
}
}
// For the RunWorkerCompleted
e.Result = new int[] { index, i };
}
void backgroundWorker_ProgressChanged(object sender, ProgressChangedEventArgs e)
{
int[] state = (int[])e.UserState;
label5.Text = string.Format("File {0}, line {1}", state[0], state[1]);
}
void backgroundWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
int[] state = (int[])e.Result;
label5.Text = string.Format("File {0}, line {1} Finished", state[0], state[1]);
}
I'm facing a dilemma (!).
In a first scenario, I implemented a solution that replicates data from one data base to another using SQLBulkCopy synchronously and I had no problem at all.
Now, using ThreadPool, I implemented the same in a assynchronously scenario, a thread per table, and all works fine, but past some time (usualy 1 hour because the operations of copy takes about the same time), the operations send to the ThreadPool stop being executed. There is one diferent SQLBulkCopy using one diferent SQLConnection per thread.
I already see the number of free threads, and they are all free at the beginning of the invocation. I have one AutoResetEvent to wait that the threads finish their job before launching again, and a Semaphore FIFO that hold the counter of active threads.
Is there some issue that I have forgotten or that I should avaliate when using SqlBulkCopy? I appreciate some help, because my ideas are over;)
->Usage
SemaphoreFIFO waitingThreads = new SemaphoreFIFO();
AutoResetEvent autoResetEvent = new AutoResetEvent(false);
(...)
List<TableMappingHolder> list = BulkCopy.Mapping(tables);
waitingThreads.Put(list.Count, 300000);
for (int i = 0; i < list.Count; i++){
ThreadPool.QueueUserWorkItem(call =>
//Replication
(...)
waitingThreads.Get();
if (waitingThreads.Counter == 0)
autoResetEvent.Set();
);
}
bool finalized = finalized = autoResetEvent.WaitOne(300000);
(...)
//Bulk Copy
public bool SetData(SqlDataReader reader, string _destinationTableName, List<SqlBulkCopyColumnMapping> _sqlBulkCopyColumnMappings)
{
using (SqlConnection destinationConnection =
new SqlConnection(ConfigurationManager.ConnectionStrings["dconn"].ToString()))
{
destinationConnection.Open();
// Set up the bulk copy object.
// Note that the column positions in the source
// data reader match the column positions in
// the destination table so there is no need to
// map columns.
using (SqlBulkCopy bulkCopy =
new SqlBulkCopy(destinationConnection)) {
bulkCopy.BulkCopyTimeout = 300000;
bulkCopy.DestinationTableName = _destinationTableName;
// Set up the column mappings by name.
foreach (SqlBulkCopyColumnMapping columnMapping in _sqlBulkCopyColumnMappings)
bulkCopy.ColumnMappings.Add(columnMapping);
try{
// Write from the source to the destination.
bulkCopy.WriteToServer(reader);
}
catch (Exception ex){return false;}
finally
{
try{reader.Close();}
catch (Exception e){//log}
try{bulkCopy.Close();}
catch (Exception e){//log}
try{destinationConnection.Close(); }
catch (Exception e){ //log }
}
}
}
return true;
}
#
Semaphore
public sealed class SemaphoreFIFO
{
private int _counter;
private readonly LinkedList<int> waitQueue = new LinkedList<int>();
public int Counter
{
get { return _counter; }
}
private void internalNotify()
{
if (waitQueue.Count > 0 && _counter == 0)
{
Monitor.PulseAll(waitQueue);
}
}
public void Get()
{
lock (waitQueue)
{
_counter --;
internalNotify();
}
}
public bool Put(int n, int timeout)
{
if (timeout < 0 && timeout != Timeout.Infinite)
throw new ArgumentOutOfRangeException("timeout");
if (n < 0)
throw new ArgumentOutOfRangeException("n");
lock (waitQueue)
{
if (waitQueue.Count == 0 && _counter ==0)
{
_counter +=n;
internalNotify();
return true;
}
int endTime = Environment.TickCount + timeout;
LinkedListNode<int> me = waitQueue.AddLast(n);
try
{
while (true)
{
Monitor.Wait(waitQueue, timeout);
if (waitQueue.First == me && _counter ==0)
{
_counter += n;
waitQueue.RemoveFirst();
internalNotify();
return true;
}
if (timeout != Timeout.Infinite)
{
int remainingTime = endTime - Environment.TickCount;
if (remainingTime <= 0)
{
// TIMEOUT
if (waitQueue.First == me)
{
waitQueue.RemoveFirst();
internalNotify();
}
else
waitQueue.Remove(me);
return false;
}
timeout = remainingTime;
}
}
}
catch (ThreadInterruptedException e)
{
// INTERRUPT
if (waitQueue.First == me)
{
waitQueue.RemoveFirst();
internalNotify();
}
else
waitQueue.Remove(me);
throw e;
}
}
}
}
I would just go back to using SQLBulkCopy synchronously. I'm not sure what you gain by doing a bunch of bulk copies all at the same time (instead of one after the other). It may complete everything a bit faster, but I'm not even sure of that.
I am spawning external console application and use async output redirect.
as shown in this SO post
My problem is it seems that the spawned process needs to produce certain amount of output before I get the OutputDataReceived event notification.
I want to receive the OutputDataReceived event as soon as possible.
I have a bare-bones redirecting application, and here are some observations:
1. When I call a simple 'while(true) print("X");' console application (C#) I receive output event immediately.
2. When I call a 3d party app I am trying to wrap from the command line I see the line-by-line output.
3. When I call that 3d party app from my bare-bone wrapper (see 1) - the output comes in chunks (about one page size).
What happens inside that app?
FYI: The app in question is a "USBee DX Data Exctarctor (Async bus) v1.0".
I did some more research and have a fix to microsofts Process class. But as my last answer was deleted without a reason, I have had to create a new one.
So take this example...
Create a windows app and stick a rich textbox on the main form, then add this to the form load...
Process p = new Process()
{
StartInfo = new ProcessStartInfo()
{
FileName = "cmd.exe",
CreateNoWindow = true,
UseShellExecute = false,
ErrorDialog = false,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
},
EnableRaisingEvents = true,
SynchronizingObject = this
};
p.OutputDataReceived += (s, ea) => this.richTextBox1.AppendText(ea.Data);
p.Start();
p.BeginOutputReadLine();
This will output something like this...
Microsoft Windows [Version 6.1.7601]
Copyright (c) 2009 Microsoft Corporation. All rights reserved.
The OutputDataReceived event is not fired for the last line. After some ILSpying it appears that this is deliberate because the last line does not end with a crlf, it assumes there is more comming and appends it to the start of the next event.
To correct this, I have written a wrapper for the Process class and taken some of the required internal classes out with it so that it all works neatly. Here is the FixedProcess class...
using System;
using System.Collections;
using System.IO;
using System.Text;
using System.Threading;
namespace System.Diagnostics
{
internal delegate void UserCallBack(string data);
public delegate void DataReceivedEventHandler(object sender, DataReceivedEventArgs e);
public class FixedProcess : Process
{
internal AsyncStreamReader output;
internal AsyncStreamReader error;
public event DataReceivedEventHandler OutputDataReceived;
public event DataReceivedEventHandler ErrorDataReceived;
public new void BeginOutputReadLine()
{
Stream baseStream = StandardOutput.BaseStream;
this.output = new AsyncStreamReader(this, baseStream, new UserCallBack(this.FixedOutputReadNotifyUser), StandardOutput.CurrentEncoding);
this.output.BeginReadLine();
}
public void BeginErrorReadLine()
{
Stream baseStream = StandardError.BaseStream;
this.error = new AsyncStreamReader(this, baseStream, new UserCallBack(this.FixedErrorReadNotifyUser), StandardError.CurrentEncoding);
this.error.BeginReadLine();
}
internal void FixedOutputReadNotifyUser(string data)
{
DataReceivedEventHandler outputDataReceived = this.OutputDataReceived;
if (outputDataReceived != null)
{
DataReceivedEventArgs dataReceivedEventArgs = new DataReceivedEventArgs(data);
if (this.SynchronizingObject != null && this.SynchronizingObject.InvokeRequired)
{
this.SynchronizingObject.Invoke(outputDataReceived, new object[]
{
this,
dataReceivedEventArgs
});
return;
}
outputDataReceived(this, dataReceivedEventArgs);
}
}
internal void FixedErrorReadNotifyUser(string data)
{
DataReceivedEventHandler errorDataReceived = this.ErrorDataReceived;
if (errorDataReceived != null)
{
DataReceivedEventArgs dataReceivedEventArgs = new DataReceivedEventArgs(data);
if (this.SynchronizingObject != null && this.SynchronizingObject.InvokeRequired)
{
this.SynchronizingObject.Invoke(errorDataReceived, new object[]
{
this,
dataReceivedEventArgs
});
return;
}
errorDataReceived(this, dataReceivedEventArgs);
}
}
}
internal class AsyncStreamReader : IDisposable
{
internal const int DefaultBufferSize = 1024;
private const int MinBufferSize = 128;
private Stream stream;
private Encoding encoding;
private Decoder decoder;
private byte[] byteBuffer;
private char[] charBuffer;
private int _maxCharsPerBuffer;
private Process process;
private UserCallBack userCallBack;
private bool cancelOperation;
private ManualResetEvent eofEvent;
private Queue messageQueue;
private StringBuilder sb;
private bool bLastCarriageReturn;
public virtual Encoding CurrentEncoding
{
get
{
return this.encoding;
}
}
public virtual Stream BaseStream
{
get
{
return this.stream;
}
}
internal AsyncStreamReader(Process process, Stream stream, UserCallBack callback, Encoding encoding)
: this(process, stream, callback, encoding, 1024)
{
}
internal AsyncStreamReader(Process process, Stream stream, UserCallBack callback, Encoding encoding, int bufferSize)
{
this.Init(process, stream, callback, encoding, bufferSize);
this.messageQueue = new Queue();
}
private void Init(Process process, Stream stream, UserCallBack callback, Encoding encoding, int bufferSize)
{
this.process = process;
this.stream = stream;
this.encoding = encoding;
this.userCallBack = callback;
this.decoder = encoding.GetDecoder();
if (bufferSize < 128)
{
bufferSize = 128;
}
this.byteBuffer = new byte[bufferSize];
this._maxCharsPerBuffer = encoding.GetMaxCharCount(bufferSize);
this.charBuffer = new char[this._maxCharsPerBuffer];
this.cancelOperation = false;
this.eofEvent = new ManualResetEvent(false);
this.sb = null;
this.bLastCarriageReturn = false;
}
public virtual void Close()
{
this.Dispose(true);
}
void IDisposable.Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing && this.stream != null)
{
this.stream.Close();
}
if (this.stream != null)
{
this.stream = null;
this.encoding = null;
this.decoder = null;
this.byteBuffer = null;
this.charBuffer = null;
}
if (this.eofEvent != null)
{
this.eofEvent.Close();
this.eofEvent = null;
}
}
internal void BeginReadLine()
{
if (this.cancelOperation)
{
this.cancelOperation = false;
}
if (this.sb == null)
{
this.sb = new StringBuilder(1024);
this.stream.BeginRead(this.byteBuffer, 0, this.byteBuffer.Length, new AsyncCallback(this.ReadBuffer), null);
return;
}
this.FlushMessageQueue();
}
internal void CancelOperation()
{
this.cancelOperation = true;
}
private void ReadBuffer(IAsyncResult ar)
{
int num;
try
{
num = this.stream.EndRead(ar);
}
catch (IOException)
{
num = 0;
}
catch (OperationCanceledException)
{
num = 0;
}
if (num == 0)
{
lock (this.messageQueue)
{
if (this.sb.Length != 0)
{
this.messageQueue.Enqueue(this.sb.ToString());
this.sb.Length = 0;
}
this.messageQueue.Enqueue(null);
}
try
{
this.FlushMessageQueue();
return;
}
finally
{
this.eofEvent.Set();
}
}
int chars = this.decoder.GetChars(this.byteBuffer, 0, num, this.charBuffer, 0);
this.sb.Append(this.charBuffer, 0, chars);
this.GetLinesFromStringBuilder();
this.stream.BeginRead(this.byteBuffer, 0, this.byteBuffer.Length, new AsyncCallback(this.ReadBuffer), null);
}
private void GetLinesFromStringBuilder()
{
int i = 0;
int num = 0;
int length = this.sb.Length;
if (this.bLastCarriageReturn && length > 0 && this.sb[0] == '\n')
{
i = 1;
num = 1;
this.bLastCarriageReturn = false;
}
while (i < length)
{
char c = this.sb[i];
if (c == '\r' || c == '\n')
{
if (c == '\r' && i + 1 < length && this.sb[i + 1] == '\n')
{
i++;
}
string obj = this.sb.ToString(num, i + 1 - num);
num = i + 1;
lock (this.messageQueue)
{
this.messageQueue.Enqueue(obj);
}
}
i++;
}
// Flush Fix: Send Whatever is left in the buffer
string endOfBuffer = this.sb.ToString(num, length - num);
lock (this.messageQueue)
{
this.messageQueue.Enqueue(endOfBuffer);
num = length;
}
// End Flush Fix
if (this.sb[length - 1] == '\r')
{
this.bLastCarriageReturn = true;
}
if (num < length)
{
this.sb.Remove(0, num);
}
else
{
this.sb.Length = 0;
}
this.FlushMessageQueue();
}
private void FlushMessageQueue()
{
while (this.messageQueue.Count > 0)
{
lock (this.messageQueue)
{
if (this.messageQueue.Count > 0)
{
string data = (string)this.messageQueue.Dequeue();
if (!this.cancelOperation)
{
this.userCallBack(data);
}
}
continue;
}
break;
}
}
internal void WaitUtilEOF()
{
if (this.eofEvent != null)
{
this.eofEvent.WaitOne();
this.eofEvent.Close();
this.eofEvent = null;
}
}
}
public class DataReceivedEventArgs : EventArgs
{
internal string _data;
/// <summary>Gets the line of characters that was written to a redirected <see cref="T:System.Diagnostics.Process" /> output stream.</summary>
/// <returns>The line that was written by an associated <see cref="T:System.Diagnostics.Process" /> to its redirected <see cref="P:System.Diagnostics.Process.StandardOutput" /> or <see cref="P:System.Diagnostics.Process.StandardError" /> stream.</returns>
/// <filterpriority>2</filterpriority>
public string Data
{
get
{
return this._data;
}
}
internal DataReceivedEventArgs(string data)
{
this._data = data;
}
}
}
Stick that in your project and then change ...
Process p = new Process()
{
....
to
FixedProcess p = new FixedProcess()
{
....
Now your application should display something like this...
Microsoft Windows [Version 6.1.7601]
Copyright (c) 2009 Microsoft Corporation. All rights reserved.
C:\Projects\FixedProcess\bin\Debug>
without needing to make any other changes to your existing code. It is also still async and wrapped up nicely. The one caveat is that now you will get multiple events for large output with potential breaks in-between, so you will need to handle this scenario yourself. Other than that, it should be all good.
It seems as the problem was that the dummy app was written in c# which flushes the output automatically one every println while the 3rd party app was written in c/c++ and therefore only wrote when the stdoutbuffer was full. The only solution which ive found is to make sure the c/c++ app flushes after every print or to set its buffer to 0.
Check out this answer.
How to send input to the console as if the user is typing?
The idea is that you will receive the output received events when any is thrown after the process is started.