How to pass stream from Naudio to MS SpeechRecognization? - c#

I am trying to use SetInputToAudioStream method for speech engine.
But it does not work.
I've also searched few articles and tried every possible way. but it still does not work.
https://stackoverflow.com/a/6203533/1336662
https://stackoverflow.com/a/6203533/1336662
I had to use the SpeechStreamer class described in Sean's response in order for the SpeechRecognitionEngine to work.
Here is my code, please if anyone can help me, that will be great.
using NAudio.Wave;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Speech.Recognition;
using System.Speech.AudioFormat;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Threading;
namespace WpfAppNAudio
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public MainWindow()
{
InitializeComponent();
console.AppendText("Click to start recording");
}
public WaveIn waveSource = null;
public WaveFileWriter waveFile = null;
private SpeechRecognitionEngine _recognizer = null;
//Stream a = null;
SpeechStreamer stream = null;
private bool _recognizing;
void RecognizeSpeechAndWriteToConsole()
{
_recognizer = new SpeechRecognitionEngine();
try
{
//_recognizer.
// Create and load a grammar.
Grammar dictation = new DictationGrammar();
dictation.Name = "Dictation Grammar";
_recognizer.LoadGrammar(dictation);
_recognizer.SpeechRecognized += _recognizer_SpeechRecognized; // if speech is recognized, call the specified method
_recognizer.SpeechRecognitionRejected += _recognizer_SpeechRecognitionRejected; // if recognized speech is rejected, call the specified method
_recognizer.SpeechDetected += _recognizer_SpeechDetected;
_recognizer.RecognizeCompleted += _recognizer_RecognizeCompleted;
}
catch (Exception)
{
}
}
private void _recognizer_RecognizeCompleted(object sender, RecognizeCompletedEventArgs e)
{
}
private void _recognizer_SpeechDetected(object sender, SpeechDetectedEventArgs e)
{
}
private void _recognizer_SpeechRecognitionRejected(object sender, SpeechRecognitionRejectedEventArgs e)
{
console.AppendText("speech rejected");
}
private void _recognizer_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
console.AppendText("speech recognized" + e.Result.Text);
}
private void StartBtn_Click()
{
waveSource = new WaveIn();
waveSource.WaveFormat = new WaveFormat(22050, 8, 1);
waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable);
waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped);
waveFile = new WaveFileWriter(#"C:\Temp\Test0001.wav", waveSource.WaveFormat);
console.AppendText("Starting recording");
RecognizeSpeechAndWriteToConsole();
waveSource.StartRecording();
}
void StopBtn_Click(object sender, EventArgs e)
{
waveSource.StopRecording();
}
void waveSource_DataAvailable(object sender, WaveInEventArgs e)
{
if (waveFile != null)
{
stream = new SpeechStreamer(e.Buffer.Length);
stream.Write(e.Buffer, 0, e.BytesRecorded);
waveFile.Write(e.Buffer, 0, e.BytesRecorded);
waveFile.Flush();
if (!_recognizing)
{
_recognizing = true;
_recognizer.SetInputToAudioStream(stream, new System.Speech.AudioFormat.SpeechAudioFormatInfo(22050, System.Speech.AudioFormat.AudioBitsPerSample.Eight, System.Speech.AudioFormat.AudioChannel.Mono));
var s = _recognizer.RecognizerInfo.SupportedAudioFormats;
_recognizer.RecognizeAsync(RecognizeMode.Multiple);
}
}
}
void waveSource_RecordingStopped(object sender, StoppedEventArgs e)
{
if (waveSource != null)
{
waveSource.Dispose();
waveSource = null;
}
if (waveFile != null)
{
waveFile.Dispose();
waveFile = null;
}
}
private void Button_Click(object sender, RoutedEventArgs e)
{
StartBtn_Click();
}
private void Button_Click_1(object sender, RoutedEventArgs e)
{
waveSource.StopRecording();
}
}
class SpeechStreamer : Stream
{
private AutoResetEvent _writeEvent;
private List<byte> _buffer;
private int _buffersize;
private int _readposition;
private int _writeposition;
private bool _reset;
public SpeechStreamer(int bufferSize)
{
_writeEvent = new AutoResetEvent(false);
_buffersize = bufferSize;
_buffer = new List<byte>(_buffersize);
for (int i = 0; i < _buffersize; i++)
_buffer.Add(new byte());
_readposition = 0;
_writeposition = 0;
}
public override bool CanRead
{
get { return true; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return true; }
}
public override long Length
{
get { return -1L; }
}
public override long Position
{
get { return 0L; }
set { }
}
public override long Seek(long offset, SeekOrigin origin)
{
return 0L;
}
public override void SetLength(long value)
{
}
public override int Read(byte[] buffer, int offset, int count)
{
int i = 0;
while (i < count && _writeEvent != null)
{
if (!_reset && _readposition >= _writeposition)
{
_writeEvent.WaitOne(100, true);
continue;
}
buffer[i] = _buffer[_readposition + offset];
_readposition++;
if (_readposition == _buffersize)
{
_readposition = 0;
_reset = false;
}
i++;
}
return count;
}
public override void Write(byte[] buffer, int offset, int count)
{
for (int i = offset; i < offset + count; i++)
{
_buffer[_writeposition] = buffer[i];
_writeposition++;
if (_writeposition == _buffersize)
{
_writeposition = 0;
_reset = true;
}
}
_writeEvent.Set();
}
public override void Close()
{
_writeEvent.Close();
_writeEvent = null;
base.Close();
}
public override void Flush()
{
}
}
}

stream = new SpeechStreamer(e.Buffer.Length);
stream.Write(e.Buffer, 0, e.BytesRecorded);
Is this really the only place that is writing to the stream, no where else in the application? If it only writes some bytes when it initializes it will not continue to get audio coming in to the recognizer.

Related

Bad types trying to use audio recognition loopback

The Microsoft Speech system has a good example code, but I have a problem when adding loopback to record what it is playing not what is coming through mic. To give text description of a video for example while not playing it on the speaker. It seems like this is the library to do it but I'm getting type errors pushing it to the audio stream of the recognizer:
using System;
using System.Speech.Recognition;
using NAudio.Wave;
using NAudio.CoreAudioApi.Interfaces;
using NAudio.CoreAudioApi;
using System.IO;
using System.Speech.AudioFormat;
namespace SpeechRecognitionApp
{
class Program
{
static void Main(string[] args)
{
// Create an in-process speech recognizer for the en-US locale.
using (
SpeechRecognitionEngine recognizer =
new SpeechRecognitionEngine(
new System.Globalization.CultureInfo("en-US")))
{
// Create and load a dictation grammar.
recognizer.LoadGrammar(new DictationGrammar());
// Add a handler for the speech recognized event.
recognizer.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(recognizer_SpeechRecognized);
// Configure input to the speech recognizer.
//recognizer.SetInputToDefaultAudioDevice();
WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
Stream captureStream = new System.IO.MemoryStream();
capture.DataAvailable += (s, a) =>
{
captureStream.Write(a.Buffer, 0, a.BytesRecorded);
captureStream.Flush();
};
capture.StartRecording();
Console.WriteLine(capture.WaveFormat.AverageBytesPerSecond);
Console.WriteLine(capture.WaveFormat.BitsPerSample);
recognizer.SetInputToAudioStream(captureStream, new SpeechAudioFormatInfo(
capture.WaveFormat.AverageBytesPerSecond, AudioBitsPerSample.Sixteen, AudioChannel.Stereo));
// Start asynchronous, continuous speech recognition.
recognizer.RecognizeAsync(RecognizeMode.Multiple);
// Keep the console window open.
while (true)
{
Console.ReadLine();
}
}
}
// Handle the SpeechRecognized event.
static void recognizer_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
Console.WriteLine("Recognized text: " + e.Result.Text);
}
}
}
Updated As you can see in the revised code, this is compiling at least now, but does not recognize any speech internal or external. In fact it outputs:
384000
32
So since there is no "thirtytwo" on AudioBitsPerSample perhaps I can't even use NAudio class to get system audio??
Update This seems to work somewhat, based on another answer but it doesn't pick up very much, I'm thinking it's sending slooow or fast audio in perhaps?
using System;
using System.Speech.Recognition;
using NAudio.Wave;
using NAudio.CoreAudioApi.Interfaces;
using NAudio.CoreAudioApi;
using System.IO;
using System.Speech.AudioFormat;
namespace SpeechRecognitionApp
{
class FakeStreamer : Stream
{
public bool bExit = false;
Stream stream;
Stream client;
public FakeStreamer(Stream client)
{
this.client = client;
this.stream = client;
}
public override bool CanRead
{
get { return stream.CanRead; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return stream.CanWrite; }
}
public override long Length
{
get { return -1L; }
}
public override long Position
{
get { return 0L; }
set { }
}
public override long Seek(long offset, SeekOrigin origin)
{
return 0L;
}
public override void SetLength(long value)
{
stream.SetLength(value);
}
public override int Read(byte[] buffer, int offset, int count)
{
int len = 0, c = count;
while (c > 0 && !bExit)
{
try
{
len = stream.Read(buffer, offset, c);
}
catch (Exception e)
{
Console.WriteLine("ouch");
}
/*if (!client.Connected || len == 0)
{
//Exit read loop
return 0;
}*/
offset += len;
c -= len;
}
return count;
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
public override void Close()
{
stream.Close();
base.Close();
}
public override void Flush()
{
stream.Flush();
}
}
class Program
{
static void Main(string[] args)
{
// Create an in-process speech recognizer for the en-US locale.
using (
SpeechRecognitionEngine recognizer =
new SpeechRecognitionEngine(
new System.Globalization.CultureInfo("en-US")))
{
// Create and load a dictation grammar.
recognizer.LoadGrammar(new DictationGrammar());
// Add a handler for the speech recognized event.
recognizer.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(recognizer_SpeechRecognized);
// Configure input to the speech recognizer.
//recognizer.SetInputToDefaultAudioDevice();
WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
Stream captureStream = new System.IO.MemoryStream();
Stream buffStream = new FakeStreamer(captureStream);
capture.DataAvailable += (s, a) =>
{
captureStream.Write(a.Buffer, 0, a.BytesRecorded);
};
capture.StartRecording();
Console.WriteLine(capture.WaveFormat.AverageBytesPerSecond);
Console.WriteLine(capture.WaveFormat.BitsPerSample);
//recognizer.SetInputToDefaultAudioDevice();
recognizer.SetInputToAudioStream(buffStream, new SpeechAudioFormatInfo(
capture.WaveFormat.AverageBytesPerSecond/4, AudioBitsPerSample.Eight, AudioChannel.Stereo));
// Start asynchronous, continuous speech recognition.
recognizer.RecognizeAsync(RecognizeMode.Multiple);
// Keep the console window open.
while (true)
{
Console.ReadLine();
}
}
}
// Handle the SpeechRecognized event.
static void recognizer_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
Console.WriteLine("Recognized text: " + e.Result.Text);
}
}
}
Update 3 - Trying to recode the audio stream to what voice recognition will do: unfortunately it does not get to the capture recoded audio as you can see...
using System;
using System.Speech.Recognition;
using NAudio.Wave;
using NAudio.CoreAudioApi.Interfaces;
using NAudio.CoreAudioApi;
using System.IO;
using System.Speech.AudioFormat;
namespace SpeechRecognitionApp
{
class FakeStreamer : Stream
{
public bool bExit = false;
Stream stream;
Stream client;
public FakeStreamer(Stream client)
{
this.client = client;
this.stream = client;
}
public override bool CanRead
{
get { return stream.CanRead; }
}
public override bool CanSeek
{
get { return false; }
}
public override bool CanWrite
{
get { return stream.CanWrite; }
}
public override long Length
{
get { return -1L; }
}
public override long Position
{
get { return 0L; }
set { }
}
public override long Seek(long offset, SeekOrigin origin)
{
return 0L;
}
public override void SetLength(long value)
{
stream.SetLength(value);
}
public override int Read(byte[] buffer, int offset, int count)
{
int len = 0, c = count;
while (c > 0 && !bExit)
{
try
{
len = stream.Read(buffer, offset, c);
}
catch (Exception e)
{
Console.WriteLine("ouch");
}
/*if (!client.Connected || len == 0)
{
//Exit read loop
return 0;
}*/
offset += len;
c -= len;
}
return count;
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
}
public override void Close()
{
stream.Close();
base.Close();
}
public override void Flush()
{
stream.Flush();
}
}
class Program
{
static void Main(string[] args)
{
// Create an in-process speech recognizer for the en-US locale.
using (
SpeechRecognitionEngine recognizer =
new SpeechRecognitionEngine(
new System.Globalization.CultureInfo("en-US")))
{
// Create and load a dictation grammar.
recognizer.LoadGrammar(new DictationGrammar());
// Add a handler for the speech recognized event.
recognizer.SpeechRecognized +=
new EventHandler<SpeechRecognizedEventArgs>(recognizer_SpeechRecognized);
// Configure input to the speech recognizer.
//recognizer.SetInputToDefaultAudioDevice();
WasapiLoopbackCapture capture = new WasapiLoopbackCapture();
Stream captureStream = new System.IO.MemoryStream();
//Stream buffStream = new FakeStreamer(captureStream);
capture.DataAvailable += (s, a) =>
{
captureStream.Write(a.Buffer, 0, a.BytesRecorded);
};
Console.WriteLine(capture.WaveFormat.AverageBytesPerSecond);
Console.WriteLine(capture.WaveFormat.BitsPerSample);
var newFormat = new WaveFormat(8000, 16, 1);
//using (var conversionStream = new WaveFormatConversionStream(newFormat, capture)
var resampler = new MediaFoundationResampler(new NAudio.Wave.RawSourceWaveStream(captureStream,capture.WaveFormat), newFormat);
Stream captureConvertStream = new System.IO.MemoryStream();
resampler.ResamplerQuality = 60;
//WaveFileWriter.WriteWavFileToStream(captureConvertStream, resampler);
//recognizer.SetInputToDefaultAudioDevice();
Stream buffStream = new FakeStreamer(captureConvertStream);
recognizer.SetInputToAudioStream(buffStream, new SpeechAudioFormatInfo(
8000, AudioBitsPerSample.Sixteen, AudioChannel.Mono));
// Start asynchronous, continuous speech recognition.
recognizer.RecognizeAsync(RecognizeMode.Multiple);
capture.StartRecording();
var arr = new byte[128];
while (resampler.Read(arr, 0, arr.Length) > 0)
{
captureConvertStream.Write(arr, 0, arr.Length);
Console.WriteLine("Never getting here");
}
// Keep the console window open.
while (true)
{
Console.ReadLine();
}
}
}
// Handle the SpeechRecognized event.
static void recognizer_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
{
Console.WriteLine("Recognized text: " + e.Result.Text);
}
}
}

Visual studio 2010- Application as a service in release mode

I have an desktop console application created in visual studio 2010.How do i convert it to windows service?. basically in debug mode i want it as normal app , but in release mode i want a service build output
You can do it this way:
namespace Program
{
static class Program
{
public static bool Stopped = false;
[STAThread]
static void Main(string[] args)
{
Interactive.Initialize();
Interactive.OnStopped += new Interactive.StopedDelegate(OnStopped);
Interactive.Title = Path.GetFileNameWithoutExtension(
Assembly.GetExecutingAssembly().Location);
if (args.Length == 0) Interactive.Run(RunProc);
else if (args[0] == "-svc") ServiceBase.Run(new Service());
}
public static void RunProc() { yourConsoleMain(); }
public static void OnStopped() { Stopped = true; exitFromMain(); }
}
public class Service : ServiceBase
{
public static string Name = Path.GetFileNameWithoutExtension(
Assembly.GetExecutingAssembly().Location);
public static string CmdLineSwitch = "-svc";
public static ServiceStartMode StartMode = ServiceStartMode.Automatic;
public static bool DesktopInteract = true;
public bool Stopped = false;
public Service() { ServiceName = Name; }
public void Start() { OnStart(null); }
protected override void OnStart(string[] args)
{
System.Diagnostics.EventLog.WriteEntry(
ServiceName, ServiceName + " service started.");
Thread thread = new Thread(MainThread);
thread.Start();
}
protected override void OnStop()
{
System.Diagnostics.EventLog.WriteEntry(
ServiceName, ServiceName + " service stopped.");
Stopped = true;
Application.Exit();
}
private void MainThread()
{
Interactive.Run(Program.RunProc);
if (!Stopped) Stop();
}
}
}
Let me explain this... Basically, in Main you define that your program starts as a service if it is started with argument '-svc'.
Put in RunProc() what you normally do in main(), and in OnStopped() event handler some code that will cause main() to exit.
Then, override ServiceBase and perform some basic start/stop service.
In Windows 7 and later you must explicitly define that your service can interact with desktop if you want to see some output. But there is another problem, console window cannot be shown. So I created this console simulator which can write and also read input.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.Threading;
using System.Runtime.InteropServices;
namespace ProgramIO.Control
{
public delegate void WriteDelegate(string value, int x, int y);
public delegate void ReadDelegate(out string value, bool readLine);
public delegate void EnableInputDelegate(bool enable);
public partial class InteractiveForm : Form
{
private delegate void ClearInputBufferDelegate();
public enum EIOOperation { None = 0, Write, Read }
private EventWaitHandle eventInvoke =
new EventWaitHandle(false, EventResetMode.AutoReset);
private EventWaitHandle eventInput =
new EventWaitHandle(false, EventResetMode.AutoReset);
private bool readLine = false;
private string inputBuffer = "";
private int inputPosition = 0;
private int inputBufferPosition = 0;
private EIOOperation IOOperation;
private int bufferSize = 0x10000;
private bool CaretShown = false;
private delegate object DoInvokeDelegate(Delegate method, params object[] args);
private delegate void SetTitleDelegate(string value);
private delegate void SetForegroundcolorDelegate(Color value);
public string Title {
get { return Text; }
set {
if (InvokeRequired) InvokeEx(
(SetTitleDelegate)delegate(string title) { Text = title; },
1000, new object[] { value });
else Text = value; }}
public Color ForegroundColor {
get { return ForeColor; }
set {
if (InvokeRequired) InvokeEx(
(SetForegroundcolorDelegate)delegate(Color color) { ForeColor = color; },
1000, new object[] { value });
else ForeColor = value; }}
public InteractiveForm()
{
InitializeComponent();
DoubleBuffered = true;
}
#region Asynchronous Methods
private bool InvokeEx(Delegate method, int timeout, params object[] args)
{
BeginInvoke((DoInvokeDelegate)DoInvoke, new object[] { method, args });
if (eventInvoke.WaitOne(timeout)) return true;
else return false;
}
private void EnableInput(bool enable)
{
if (InvokeRequired)
InvokeEx((EnableInputDelegate)DoEnableInput, 1000, new object[] { enable });
else DoEnableInput(enable);
}
private void ClearInputBuffer()
{
if (InvokeRequired)
InvokeEx((ClearInputBufferDelegate)DoClearInputBuffer, 1000, new object[0]);
else DoClearInputBuffer();
}
public void Write(string value, int x = -1, int y = -1)
{
lock (this) {
IOOperation = EIOOperation.Write;
if (InvokeRequired)
InvokeEx((WriteDelegate)DoWrite, 1000, new object[] { value, x, y });
else DoWrite(value, x, y);
IOOperation = EIOOperation.None; }
}
public string Read(bool readLine)
{
lock (this) {
EnableInput(true);
IOOperation = EIOOperation.Read; this.readLine = readLine; string value = "";
ClearInputBuffer(); eventInput.WaitOne();
object[] args = new object[] { value, readLine };
if (InvokeRequired) {
InvokeEx((ReadDelegate)DoRead, 1000, args); value = (string) args[0]; }
else DoRead(out value, readLine);
//inputPosition = textBox.Text.Length; inputBuffer = "";
ClearInputBuffer();
IOOperation = EIOOperation.None;
EnableInput(false);
return value;
}
}
#endregion //Asynchronous Methods
#region Synchronous Methods
protected override void OnShown(EventArgs e) { base.OnShown(e); textBox.Focus(); }
public object DoInvoke(Delegate method, params object[] args)
{
object obj = method.DynamicInvoke(args);
eventInvoke.Set();
return obj;
}
private void CorrectSelection()
{
if (textBox.SelectionStart < inputPosition) {
if (textBox.SelectionLength > (inputPosition - textBox.SelectionStart))
textBox.SelectionLength -= inputPosition - textBox.SelectionStart;
else textBox.SelectionLength = 0;
textBox.SelectionStart = inputPosition; }
}
protected void DoClearInputBuffer()
{
inputPosition = textBox.Text.Length; inputBuffer = "";
}
protected void DoEnableInput(bool enable)
{
if (enable) { textBox.ReadOnly = false; textBox.SetCaret(true); }
else { textBox.ReadOnly = true; textBox.SetCaret(false); }
}
protected void DoWrite(string value, int x, int y)
{
string[] lines = textBox.Text.Split(new string[] { "\r\n" }, StringSplitOptions.None);
string[] addLines = new string[0];
if (y == -1) y = lines.Length - 1;
if (lines.Length - 1 < y) addLines = new string[y - lines.Length - 1];
if (y < lines.Length) {
if (x == -1) x = lines[y].Length;
if (lines[y].Length < x)
lines[y] += new String(' ', x - lines[y].Length) + value;
else
lines[y] = lines[y].Substring(0, x) + value +
((x + value.Length) < lines[y].Length ?
lines[y].Substring(x + value.Length) : ""); }
else {
y -= lines.Length;
if (x == -1) x = addLines[y].Length;
addLines[y] += new String(' ', x - addLines[y].Length) + value; }
textBox.Text = (string.Join("\r\n", lines) +
(addLines.Length > 0 ? "\r\n" : "") + string.Join("\r\n", addLines));
textBox.Select(textBox.Text.Length, 0); textBox.ScrollToCaret();
inputBuffer = "";
}
protected void DoRead(out string value, bool readLine)
{
value = "";
if (readLine) {
int count = inputBuffer.IndexOf("\r\n");
if (count > 0) { value = inputBuffer.Substring(0, count); }}
else if (inputBuffer.Length > 0) {
value = inputBuffer.Substring(0, 1); }
inputBuffer = "";
}
private void textBox_TextChanged(object sender, EventArgs e)
{
if (IOOperation == EIOOperation.Read) {
inputBuffer = textBox.Text.Substring(inputPosition);
if (!readLine || inputBuffer.Contains("\r\n")) eventInput.Set(); }
if (textBox.Text.Length > bufferSize) { textBox.Text =
textBox.Text.Substring(textBox.Text.Length - bufferSize, bufferSize);
textBox.Select(textBox.Text.Length, 0); textBox.ScrollToCaret(); }
}
private void textBox_KeyDown(object sender, KeyEventArgs e)
{
if (IOOperation != EIOOperation.Read ||
(e.KeyCode == Keys.Back && inputBuffer.Length == 0))
e.SuppressKeyPress = true;
}
private void textBox_MouseUp(object sender, MouseEventArgs e)
{
CorrectSelection();
}
private void textBox_KeyUp(object sender, KeyEventArgs e)
{
if (!(IOOperation == EIOOperation.Read) ||
((e.KeyCode == Keys.Left || e.KeyCode == Keys.Up) &&
textBox.SelectionStart < inputPosition))
CorrectSelection();
}
private void InteractiveForm_FormClosing(object sender, FormClosingEventArgs e)
{
eventInput.Set();
lock (this) { }
}
#endregion //Synchronous Methods
}
public class InteractiveWindow : TextBox
{
[DllImport("user32.dll")]
static extern bool HideCaret(IntPtr hWnd);
[DllImport("user32.dll")]
static extern bool ShowCaret(IntPtr hWnd);
private delegate void SetCaretDelegate(bool visible);
private const int WM_SETFOCUS = 0x0007;
private bool CaretVisible = true;
public void SetCaret(bool visible)
{
if (InvokeRequired) Invoke((SetCaretDelegate)DoSetCaret, new object[] { visible });
else DoSetCaret(visible);
}
private void DoSetCaret(bool visible)
{
if (CaretVisible != visible)
{
CaretVisible = visible;
if (CaretVisible) ShowCaret(Handle);
else HideCaret(Handle);
}
}
protected override void WndProc(ref Message m)
{
base.WndProc(ref m);
if (m.Msg == WM_SETFOCUS)
{
if (CaretVisible) { ShowCaret(Handle); }
else HideCaret(Handle);
}
}
}
}
namespace ProgramIO
{
using ProgramIO.Control;
public static class Interactive
{
public delegate void StopedDelegate();
public delegate void RunDelegate();
public static bool Initialized = false;
private static InteractiveForm frmIO = null;
private static Thread IOThread = null;
private static EventWaitHandle EventStarted =
new EventWaitHandle(false, EventResetMode.AutoReset);
public static string Title {
get { return frmIO.Title; }
set { frmIO.Title = value; } }
public static Color ForegroundColor {
get {return frmIO.ForeColor; }
set { frmIO.ForeColor = value; } }
public static event StopedDelegate OnStopped = null;
private static void form_Show(object sender, EventArgs e)
{
frmIO = sender as InteractiveForm;
EventStarted.Set();
}
private static void form_FormClosed(object sender, FormClosedEventArgs e)
{
lock (frmIO) {
frmIO = null;
Application.Exit(); }
}
public static void Initialize()
{
IOThread = new Thread(IOThreadProc);
IOThread.Name = "Interactive Thread"; IOThread.Start();
EventStarted.WaitOne();
Initialized = true;
}
public static void Run(RunDelegate runProc = null)
{
if (!Initialized) Initialize();
if (runProc != null) runProc();
Application.Run();
if (OnStopped != null) OnStopped();
}
public static void IOThreadProc()
{
InteractiveForm form = new InteractiveForm();
form.Shown += new EventHandler(form_Show);
form.FormClosed += new FormClosedEventHandler(form_FormClosed);
Application.Run(form);
}
public static void Write(string value, int x = -1, int y = -1)
{
if (frmIO != null) lock (frmIO) { frmIO.Write(value, x, y); }
}
public static void WriteLine(string value)
{
if (frmIO != null) lock (frmIO) {
Interactive.Write(value); Interactive.Write("\r\n"); }
}
public static int Read()
{
if (frmIO != null) lock (frmIO) {
string input = frmIO.Read(false);
if (input.Length > 0) return input[0]; }
return 0;
}
public static string ReadLine()
{
if (frmIO != null) lock (frmIO) { return frmIO.Read(true); }
else return "";
}
}
}
This last class, Interactive, actually serve as invoker for asynchronous methods, and it is used in Main() at the beginning.
You can skip this whole second section of code if you don't want to see console window when program is run as a windows service.
I have also created an Installer class for this, but it would be just to much code on this page.
EDIT: This InteractiveForm is actually a form with designer class, but very simple, consisting only of Form and EditBox inside filling its area.
Basicallly you need 3 projects in your solution:
Application itself
WinService for production
Console Application for test purposes
So your application must have some kind of Start() method with e.g. infinite loop that does all work and maybe Stop() method to stop processing.
Winservice project must contain class derived from ServiceBase, it'l have OnStartmethod that calls your application's Start and OnStop that calls application's Stop method.
Next, in console application you do pretty much same - calling Start method in console's entry point.
So far for debug you run your console app, and for release you publish your winservice project
Winservice class might look like:
upd
Winservice codez:
public class MyWinService : ServiceBase
{
IMyApplicationService _myApplicationService;
//constructor - resolve dependencies here
public MyWinService()
{
_myApplicationService = new MyApplicationService();
}
protected override void OnStart(string[] args)
{
base.OnStart(args);
try
{
_myApplicationService.Start();
}
catch (Exception exception)
{
//log exception
}
}
protected override void OnStop()
{
base.OnStop();
try
{
_myApplicationService.Stop();
}
catch (Exception exception)
{
//log exception
}
}
}
Application service:
public class MyApplicationService : IMyApplicationService
{
public MyApplicationService()
{
//some initializations
}
public Start()
{
//do work here
}
public Stop()
{
//...
}
}

need help on button to switch on webcam for C# based QR code project

i am currently learning on QR code webcam decoder. i have taken an example from https://zxingnet.svn.codeplex.com/svn/trunk/Clients/AForgeDemo/ and have succesfully build it without error. however when i run it with my webcam connected, no input or it wont switch on the webcam. based on my understanding, the webcam would be switch on when user select it at combobox. well, since there is no error at build, i cant pinpoint what went wrong. i have also taken a look at a project which switch on the webcam when user press a button and i plan to implement it to the current project. i have already insert the button but i do not know what should i program at the button to switch on the webcam instead on having to choose at combobox
would someone kindly advise or guide me through it.
below is the main program, and 2 class
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using AForge.Video;
using ZXing;
using System.Threading;
namespace AForgeDemo
{
public partial class AForgeDemoForm : Form
{
private struct Device
{
public int Index;
public string Name;
public override string ToString()
{
return Name;
}
}
private readonly CameraDevices camDevices;
private Bitmap currentBitmapForDecoding;
private readonly Thread decodingThread;
private Result currentResult;
private readonly Pen resultRectPen;
public AForgeDemoForm()
{
InitializeComponent();
camDevices = new CameraDevices();
decodingThread = new Thread(DecodeBarcode);
decodingThread.Start();
pictureBox1.Paint += pictureBox1_Paint;
resultRectPen = new Pen(Color.Green, 10);
}
void pictureBox1_Paint(object sender, PaintEventArgs e)
{
if (currentResult == null)
return;
if (currentResult.ResultPoints != null && currentResult.ResultPoints.Length > 0)
{
var resultPoints = currentResult.ResultPoints;
var rect = new Rectangle((int)resultPoints[0].X, (int)resultPoints[0].Y, 1, 1);
foreach (var point in resultPoints)
{
if (point.X < rect.Left)
rect = new Rectangle((int)point.X, rect.Y, rect.Width + rect.X - (int)point.X, rect.Height);
if (point.X > rect.Right)
rect = new Rectangle(rect.X, rect.Y, rect.Width + (int)point.X - rect.X, rect.Height);
if (point.Y < rect.Top)
rect = new Rectangle(rect.X, (int)point.Y, rect.Width, rect.Height + rect.Y - (int)point.Y);
if (point.Y > rect.Bottom)
rect = new Rectangle(rect.X, rect.Y, rect.Width, rect.Height + (int)point.Y - rect.Y);
}
using (var g = pictureBox1.CreateGraphics())
{
g.DrawRectangle(resultRectPen, rect);
}
}
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
LoadDevicesToCombobox();
}
protected override void OnClosing(System.ComponentModel.CancelEventArgs e)
{
base.OnClosing(e);
if (!e.Cancel)
{
decodingThread.Abort();
if (camDevices.Current != null)
{
camDevices.Current.NewFrame -= Current_NewFrame;
if (camDevices.Current.IsRunning)
{
camDevices.Current.SignalToStop();
}
}
}
}
private void LoadDevicesToCombobox()
{
cmbDevice.Items.Clear();
for (var index = 0; index < camDevices.Devices.Count; index++)
{
cmbDevice.Items.Add(new Device { Index = index, Name = camDevices.Devices[index].Name });
}
}
private void cmbDevice_SelectedIndexChanged(object sender, EventArgs e)
{
if (camDevices.Current != null)
{
camDevices.Current.NewFrame -= Current_NewFrame;
if (camDevices.Current.IsRunning)
{
camDevices.Current.SignalToStop();
}
}
camDevices.SelectCamera(((Device)(cmbDevice.SelectedItem)).Index);
camDevices.Current.NewFrame += Current_NewFrame;
camDevices.Current.Start();
}
private void Current_NewFrame(object sender, NewFrameEventArgs eventArgs)
{
if (IsDisposed)
{
return;
}
try
{
if (currentBitmapForDecoding == null)
{
currentBitmapForDecoding = (Bitmap)eventArgs.Frame.Clone();
}
Invoke(new Action<Bitmap>(ShowFrame), eventArgs.Frame.Clone());
}
catch (ObjectDisposedException)
{
// not sure, why....
}
}
private void ShowFrame(Bitmap frame)
{
if (pictureBox1.Width < frame.Width)
{
pictureBox1.Width = frame.Width;
}
if (pictureBox1.Height < frame.Height)
{
pictureBox1.Height = frame.Height;
}
pictureBox1.Image = frame;
}
private void DecodeBarcode()
{
var reader = new BarcodeReader();
while (true)
{
if (currentBitmapForDecoding != null)
{
var result = reader.Decode(currentBitmapForDecoding);
if (result != null)
{
Invoke(new Action<Result>(ShowResult), result);
}
currentBitmapForDecoding.Dispose();
currentBitmapForDecoding = null;
}
Thread.Sleep(200);
}
}
private void ShowResult(Result result)
{
currentResult = result;
txtBarcodeFormat.Text = result.BarcodeFormat.ToString();
txtContent.Text = result.Text;
}
private void button1_Click(object sender, EventArgs e)
{
}
}
}
the class for cameradevice
using System.Collections.Generic;
using System.Linq;
using System.Text;
using AForge.Video.DirectShow;
namespace AForgeDemo
{
internal class CameraDevices
{
public FilterInfoCollection Devices { get; private set; }
public VideoCaptureDevice Current { get; private set; }
public CameraDevices()
{
Devices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
}
public void SelectCamera(int index)
{
if (index >= Devices.Count)
{
throw new ArgumentOutOfRangeException("index");
}
Current = new VideoCaptureDevice(Devices[index].MonikerString);
}
}
}
again, i kindly ask for anybody help on what i shall put at the button command to activate the webcam directly instead of choosing from combobox
thanks a million
The code which you need for your event handler behind the button is similar to the code within the method cmbDevice_SelectedIndexChanged.
I think it should look like the following
// select the first available camera and start capturing
camDevices.SelectCamera(0);
camDevices.Current.NewFrame += Current_NewFrame;
camDevices.Current.Start();
But I think the main challenge is to find out why the original example doesn't work as expected. It makes no difference if the same code is called in the handler of the combobox or in the handler of the button.

nullreferenceexception was unhandled NAudio MP3 streaming

I am using Naudios Demo app to demostrait a litle bit. I found out that when i play some mp3 file, in the end om the sound/mp3 i get a
error nullreferenceexception was unhandled
nullreferenceexception object reference not set to an instance of an
object
in the file Mp3FrameDecompressor.cs
using System;
using System.Collections.Generic;
using System.Text;
using NAudio.Wave;
using NAudio.Wave.Compression;
using System.Diagnostics;
namespace NAudio.Wave
{
/// <summary>
/// MP3 Frame Decompressor using ACM
/// </summary>
public class AcmMp3FrameDecompressor : IDisposable, IMp3FrameDecompressor
{
private AcmStream conversionStream;
private WaveFormat pcmFormat;
/// <summary>
/// Creates a new ACM frame decompressor
/// </summary>
/// <param name="sourceFormat">The MP3 source format</param>
public AcmMp3FrameDecompressor(WaveFormat sourceFormat)
{
this.pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat);
conversionStream = new AcmStream(sourceFormat, pcmFormat);
}
/// <summary>
/// Output format (PCM)
/// </summary>
public WaveFormat OutputFormat { get { return pcmFormat; } }
/// <summary>
/// Decompresses a frame
/// </summary>
/// <param name="frame">The MP3 frame</param>
/// <param name="dest">destination buffer</param>
/// <param name="destOffset">Offset within destination buffer</param>
/// <returns>Bytes written into destination buffer</returns>
public int DecompressFrame(Mp3Frame frame, byte[] dest, int destOffset)
{
Array.Copy(frame.RawData, conversionStream.SourceBuffer, frame.FrameLength);
int sourceBytesConverted = 0;
int converted = conversionStream.Convert(frame.FrameLength, out sourceBytesConverted);
if (sourceBytesConverted != frame.FrameLength)
{
throw new InvalidOperationException(String.Format("Couldn't convert the whole MP3 frame (converted {0}/{1})",
sourceBytesConverted, frame.FrameLength));
}
Array.Copy(conversionStream.DestBuffer, 0, dest, destOffset, converted);
return converted;
}
/// <summary>
/// Disposes of this MP3 frame decompressor
/// </summary>
public void Dispose()
{
if (this.conversionStream != null)
{
this.conversionStream.Dispose();
this.conversionStream = null;
}
}
}
}
This is the Form i use
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using NAudio.Wave;
using System.Net;
using System.Threading;
using System.Net.Sockets;
using System.IO;
using System.Diagnostics;
using System.ComponentModel.Composition;
namespace NAudioDemo
{
public partial class MP3StreamingPanel : Form
{
public MP3StreamingPanel()
{
InitializeComponent();
this.volumeSlider1.VolumeChanged += new EventHandler(volumeSlider1_VolumeChanged);
this.Disposed += this.MP3StreamingPanel_Disposing;
}
enum StreamingPlaybackState
{
Stopped,
Playing,
Buffering,
Paused
}
void volumeSlider1_VolumeChanged(object sender, EventArgs e)
{
if (this.volumeProvider != null)
{
this.volumeProvider.Volume = this.volumeSlider1.Volume;
}
}
private BufferedWaveProvider bufferedWaveProvider;
private IWavePlayer waveOut;
private volatile StreamingPlaybackState playbackState;
private volatile bool fullyDownloaded;
private HttpWebRequest webRequest;
private VolumeWaveProvider16 volumeProvider;
delegate void ShowErrorDelegate(string message);
private void ShowError(string message)
{
if (this.InvokeRequired)
{
this.BeginInvoke(new ShowErrorDelegate(ShowError), message);
}
else
{
MessageBox.Show(message);
}
}
private void StreamMP3(object state)
{
this.fullyDownloaded = false;
string url = (string)state;
webRequest = (HttpWebRequest)WebRequest.Create(url);
HttpWebResponse resp = null;
try
{
resp = (HttpWebResponse)webRequest.GetResponse();
}
catch(WebException e)
{
if (e.Status != WebExceptionStatus.RequestCanceled)
{
ShowError(e.Message);
}
return;
}
byte[] buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame
IMp3FrameDecompressor decompressor = null;
try
{
using (var responseStream = resp.GetResponseStream())
{
var readFullyStream = new ReadFullyStream(responseStream);
do
{
if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4)
{
Debug.WriteLine("Buffer getting full, taking a break");
Thread.Sleep(500);
}
else
{
Mp3Frame frame = null;
try
{
frame = Mp3Frame.LoadFromStream(readFullyStream);
}
catch (EndOfStreamException)
{
this.fullyDownloaded = true;
// reached the end of the MP3 file / stream
break;
}
catch (WebException)
{
// probably we have aborted download from the GUI thread
break;
}
if (decompressor == null)
{
// don't think these details matter too much - just help ACM select the right codec
// however, the buffered provider doesn't know what sample rate it is working at
// until we have a frame
WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate);
decompressor = new AcmMp3FrameDecompressor(waveFormat);
this.bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat);
this.bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); // allow us to get well ahead of ourselves
//this.bufferedWaveProvider.BufferedDuration = 250;
}
int decompressed = decompressor.DecompressFrame(frame, buffer, 0);
//Debug.WriteLine(String.Format("Decompressed a frame {0}", decompressed));
bufferedWaveProvider.AddSamples(buffer, 0, decompressed);
}
} while (playbackState != StreamingPlaybackState.Stopped);
Debug.WriteLine("Exiting");
// was doing this in a finally block, but for some reason
// we are hanging on response stream .Dispose so never get there
decompressor.Dispose();
}
}
finally
{
if (decompressor != null)
{
decompressor.Dispose();
}
}
}
private void buttonPlay_Click(object sender, EventArgs e)
{
if (playbackState == StreamingPlaybackState.Stopped)
{
playbackState = StreamingPlaybackState.Buffering;
this.bufferedWaveProvider = null;
ThreadPool.QueueUserWorkItem(new WaitCallback(StreamMP3), textBoxStreamingUrl.Text);
timer1.Enabled = true;
}
else if (playbackState == StreamingPlaybackState.Paused)
{
playbackState = StreamingPlaybackState.Buffering;
}
}
private void StopPlayback()
{
if (playbackState != StreamingPlaybackState.Stopped)
{
if (!fullyDownloaded)
{
webRequest.Abort();
}
this.playbackState = StreamingPlaybackState.Stopped;
if (waveOut != null)
{
waveOut.Stop();
waveOut.Dispose();
waveOut = null;
}
timer1.Enabled = false;
// n.b. streaming thread may not yet have exited
Thread.Sleep(500);
ShowBufferState(0);
}
}
private void ShowBufferState(double totalSeconds)
{
}
private void timer1_Tick(object sender, EventArgs e)
{
if (playbackState != StreamingPlaybackState.Stopped)
{
if (this.waveOut == null && this.bufferedWaveProvider != null)
{
Debug.WriteLine("Creating WaveOut Device");
this.waveOut = CreateWaveOut();
waveOut.PlaybackStopped += waveOut_PlaybackStopped;
this.volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider);
this.volumeProvider.Volume = this.volumeSlider1.Volume;
waveOut.Init(volumeProvider);
progressBarBuffer.Maximum = (int)bufferedWaveProvider.BufferDuration.TotalMilliseconds;
}
else if (bufferedWaveProvider != null)
{
var bufferedSeconds = bufferedWaveProvider.BufferedDuration.TotalSeconds;
ShowBufferState(bufferedSeconds);
// make it stutter less if we buffer up a decent amount before playing
if (bufferedSeconds < 0.5 && this.playbackState == StreamingPlaybackState.Playing && !this.fullyDownloaded)
{
this.playbackState = StreamingPlaybackState.Buffering;
waveOut.Pause();
Debug.WriteLine(String.Format("Paused to buffer, waveOut.PlaybackState={0}", waveOut.PlaybackState));
}
else if (bufferedSeconds > 1 && this.playbackState == StreamingPlaybackState.Buffering)
{
waveOut.Play();
Debug.WriteLine(String.Format("Started playing, waveOut.PlaybackState={0}", waveOut.PlaybackState));
this.playbackState = StreamingPlaybackState.Playing;
}
else if (this.fullyDownloaded && bufferedSeconds == 0)
{
Debug.WriteLine("Reached end of stream");
StopPlayback();
}
}
}
}
private IWavePlayer CreateWaveOut()
{
return new WaveOut();
//return new DirectSoundOut();
}
private void MP3StreamingPanel_Disposing(object sender, EventArgs e)
{
StopPlayback();
}
private void buttonPause_Click(object sender, EventArgs e)
{
if (playbackState == StreamingPlaybackState.Playing || playbackState == StreamingPlaybackState.Buffering)
{
waveOut.Pause();
Debug.WriteLine(String.Format("User requested Pause, waveOut.PlaybackState={0}", waveOut.PlaybackState));
playbackState = StreamingPlaybackState.Paused;
}
}
private void buttonStop_Click(object sender, EventArgs e)
{
StopPlayback();
}
private void waveOut_PlaybackStopped(object sender, StoppedEventArgs e)
{
Debug.WriteLine("Playback Stopped");
if (e.Exception != null)
{
MessageBox.Show(String.Format("Playback Error {0}", e.Exception.Message));
}
}
}
[Export(typeof(INAudioDemoPlugin))]
public class MP3StreamingPanelPlugin : INAudioDemoPlugin
{
public string Name
{
get { return "MP3 Streaming"; }
}
public Control CreatePanel()
{
return new MP3StreamingPanel();
}
}
}
i get the error on this line
Array.Copy(frame.RawData, conversionStream.SourceBuffer, frame.FrameLength);

Saving to File in C#, visual studio 2010, errors

g'day guys,
i have a small error with my program where when i try to save to file an error occurs which says "A required privilege is not held by the client." I not sure how to fix this as i am running it off of my laptop which only i use and unless i have set up administrator status correctly i dont know what is going on.
I posted my code below just to be sure
Cheers.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.IO;
using System.IO.Ports;
using System.Threading;
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
namespace WindowsFormsApplication1
{
public partial class Form1 : Form
{
delegate void addlistitemcallback(string value);
public static string inputdata;
public static int MaximumSpeed, maximumRiderInput, RiderInput, Time, CurrentSpeed, DistanceTravelled, MaximumMotorOutput, MotorOutput, InputSpeed;
public static string SaveDataString;
public Thread Serial;
public static SerialPort SerialData;
public static string[] portlist = SerialPort.GetPortNames();
public static string[] SaveData = new string[4];
public static string directory = "C:\\";
public Form1()
{
Serial = new Thread(ReadData);
InitializeComponent();
int Count = 0;
for (Count = 0; Count < portlist.Length; Count++)
{
ComPortCombo.Items.Add(portlist[Count]);
}
}
private void Form1_Load(object sender, EventArgs e)
{
}
private void StartDataButton_Click(object sender, EventArgs e)
{
SerialData = new SerialPort(ComPortCombo.Text, 19200, Parity.None, 8, StopBits.One);
SerialData.Open();
SerialData.WriteLine("P");
Serial.Start();
StartDataButton.Enabled = false;
EndDataButton.Enabled = true;
ComPortCombo.Enabled = false;
CurrentSpeed = 0;
MaximumSpeed = 0;
Time = 0;
DistanceTravelled = 0;
MotorOutput = 0;
RiderInput = 0;
SaveData[0] = "";
SaveData[1] = "";
SaveData[2] = "";
SaveData[3] = "";
SaveDataButton.Enabled = false;
if (SerialData.IsOpen)
{
ComPortStatusLabel.Text = "OPEN";
SerialData.NewLine = "/n";
SerialData.WriteLine("0");
SerialData.WriteLine("/n");
}
}
private void EndDataButton_Click(object sender, EventArgs e)
{
SerialData.Close();
SaveDataButton.Enabled = true;
//SerialData.WriteLine("1");
//SerialData.WriteLine("0");
if (!SerialData.IsOpen)
{
ComPortStatusLabel.Text = "CLOSED";
}
int i = 0;
for (i = 0; i < 4; i++)
{
if (i == 0)
{
SaveDataString = "MaximumSpeed during the Ride was = " + Convert.ToString(MaximumSpeed) + "m/h";
SaveData[i] = SaveDataString;
}
if (i == 1)
{
SaveDataString = "Total Distance Travelled = " + Convert.ToString(DistanceTravelled) + "m";
SaveData[i] = SaveDataString;
}
if (i == 2)
{
SaveDataString = "Maximum Rider Input Power = " + Convert.ToString(maximumRiderInput) + "Watts";
SaveData[i] = SaveDataString;
}
if (i == 3)
{
SaveDataString = "Maximum Motor Output Power = " + Convert.ToString(MaximumMotorOutput) + "Watts";
SaveData[i] = SaveDataString;
}
}
}
private void SaveDataButton_Click(object sender, EventArgs e)
{
//File.WriteAllBytes(directory + "image" + imageNO + ".txt", ); //saves the file to Disk
File.WriteAllLines("C:\\" + "BikeData.txt", SaveData);
}
public void updateSpeedtextbox(string value)
{
if (SpeedTextBox.InvokeRequired)
{
addlistitemcallback d = new addlistitemcallback(updateSpeedtextbox);
Invoke(d, new object[] { value });
}
else
{
SpeedTextBox.Text = value;
}
}
public void updatePowertextbox(string value)
{
if (RiderInputTextBox.InvokeRequired)
{
addlistitemcallback d = new addlistitemcallback(updatePowertextbox);
Invoke(d, new object[] { value });
}
else
{
RiderInputTextBox.Text = value;
}
}
public void updateDistancetextbox(string value)
{
if (DistanceTravelledTextBox.InvokeRequired)
{
addlistitemcallback d = new addlistitemcallback(updateDistancetextbox);
Invoke(d, new object[] { value });
}
else
{
DistanceTravelledTextBox.Text = value;
}
}
public void updateMotortextbox(string value)
{
if (MotorOutputTextBox.InvokeRequired)
{
addlistitemcallback d = new addlistitemcallback(updateMotortextbox);
Invoke(d, new object[] { value });
}
else
{
MotorOutputTextBox.Text = value;
}
}
public void ReadData()
{
int counter = 0;
while (SerialData.IsOpen)
{
if (counter == 0)
{
try
{
InputSpeed = Convert.ToInt16(SerialData.ReadChar());
if (CurrentSpeed > MaximumSpeed)
{
MaximumSpeed = CurrentSpeed;
}
updateSpeedtextbox("Current Wheel Speed = " + Convert.ToString(InputSpeed) + "Km/h");
DistanceTravelled = DistanceTravelled + (Convert.ToInt16(InputSpeed) * Time);
updateDistancetextbox("Total Distance Travelled = " + Convert.ToString(DistanceTravelled) + "Km");
}
catch (Exception) { }
}
if (counter == 1)
{
try
{
RiderInput = Convert.ToInt16(SerialData.ReadChar());
if (RiderInput > maximumRiderInput)
{
maximumRiderInput = RiderInput;
}
updatePowertextbox("Current Rider Input Power =" + Convert.ToString(RiderInput) + "Watts");
}
catch (Exception) { }
}
if (counter == 2)
{
try
{
MotorOutput = Convert.ToInt16(SerialData.ReadChar());
if (MotorOutput > MaximumMotorOutput)
{
MaximumMotorOutput = MotorOutput;
}
updateMotortextbox("Current Motor Output = " + Convert.ToString(MotorOutput) + "Watts");
}
catch (Exception) { }
}
counter++;
if (counter == 3)
{
counter = 0;
}
}
}
private void Form1_Closed(object sender, EventArgs e)
{
if (SerialData.IsOpen)
{
SerialData.Close();
}
}
private void ComPortCombo_SelectedIndexChanged(object sender, EventArgs e)
{
StartDataButton.Enabled = true;
}
private void DistanceTravelledTextBox_TextChanged(object sender, EventArgs e)
{
}
}
}
You probably don't have write access to C:\. Try changing the save path to "C:\Users\{YouName}\Documents\BikeData.txt" instead.
Or start Visual Studio with administrative privileges by right clicking on its icon and choosing "Run as Administrator"
File.WriteAllLines("C:\" + "BikeData.txt", SaveData);
File.WriteAllLine(string,string[]), through "SecurityException" when user does not have rights to write in a particular directrory or drive so you have to give write permission, refer this link File.WriteAllLines

Categories