NLayer MpegFile to SFML.Net SoundStream - c#

I'm currently working on SFML.Net to expand with mp3 support. Therefore I wrote a Stream class which uses NLayer MpegFile to decode the mp3.
public class Mp3StreamSFML : SoundStream
{
private MpegFile mp3file;
private int currentBufferSize;
private short[] currentBuffer;
public Mp3StreamSFML(String _filename)
{
mp3file = new MpegFile(_filename);
Initialize((uint)mp3file.Channels, (uint)mp3file.SampleRate);
currentBufferSize = 0;
currentBuffer = new short[currentBufferSize];
}
#region implemented abstract members of SoundStream
protected override bool OnGetData(out short[] samples)
{
if (currentBufferSize <= mp3file.Position)
{
byte[] buffer = new byte[512];
if (mp3file.ReadSamples(buffer, 0, buffer.Length) > 0)
{
Array.Resize(ref currentBuffer, currentBuffer.Length + (buffer.Length / 2));
Buffer.BlockCopy(buffer, 0, currentBuffer, currentBufferSize, buffer.Length);
currentBufferSize = currentBuffer.Length;
}
samples = currentBuffer;
return true;
}
else
{
samples = currentBuffer;
return false;
}
}
protected override void OnSeek(TimeSpan timeOffset)
{
mp3file.Position = (long)timeOffset.TotalSeconds;
}
#endregion
}
I use it this way:
try
{
stream = new Mp3StreamSFML(this.objProgram.getObjCuesheet().getAudiofilePath(true));
stream.Play();
log.debug("samplerate = " + stream.SampleRate);
}
catch(Exception ex)
{
log.fatal(ex.ToString());
}
Unfortunately, there is not the correct sound played, its just "juttering" and sound really weird. What I'm doing wrong? Seems like a problem between the 2 Frameworks.
Thanks for your help.
Sven

Solved the problem this way:
using System;
using SFML.Audio;
using NLayer;
using System.Threading;
namespace AudioCuesheetEditor.AudioBackend.SFML
{
/// <summary>
/// Class for mp3 decoded audio files to use in SFML as Soundstream, since SFML doesn't support mp3 decoding (for legal reasons).
/// </summary>
public class Mp3StreamSFML : SoundStream
{
private static readonly Logfile log = Logfile.getLogfile(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
private MpegFile mp3file;
private Mutex mutex;
/// <summary>
/// Initializes a new instance of the <see cref="AudioCuesheetEditor.AudioBackend.SFML.Mp3StreamSFML"/> class.
/// </summary>
/// <param name="_filename">Full path to the file</param>
public Mp3StreamSFML(String _filename)
{
log.debug("Constructor called with " + _filename);
this.mp3file = new MpegFile(_filename);
this.Initialize((uint)this.mp3file.Channels, (uint)this.mp3file.SampleRate);
this.mutex = new Mutex();
}
public TimeSpan Duration
{
get
{
log.debug("Duration = " + this.mp3file.Duration);
return this.mp3file.Duration;
}
}
#region implemented abstract members of SoundStream
protected override bool OnGetData(out short[] samples)
{
log.debug("OnGetData called");
this.mutex.WaitOne();
//Buffer data for about 1 second
float[] normalizedaudiodata = new float[48000];
int readSamples = this.mp3file.ReadSamples(normalizedaudiodata, 0, normalizedaudiodata.Length);
short[] pcmaudiodata;
if (readSamples > 0)
{
pcmaudiodata = new short[readSamples]; // converted data
for (int i = 0; i < readSamples; i++)
{
// clip the data
if (normalizedaudiodata[i] > 1.0f)
{
normalizedaudiodata[i] = 1.0f;
}
else
{
if (normalizedaudiodata[i] < -1.0f)
{
normalizedaudiodata[i] = -1.0f;
}
}
// convert to pcm data
pcmaudiodata[i] = (short)(normalizedaudiodata[i] * short.MaxValue);
}
samples = pcmaudiodata;
this.mutex.ReleaseMutex();
return true;
}
else
{
samples = null;
this.mutex.ReleaseMutex();
return false;
}
}
protected override void OnSeek(TimeSpan timeOffset)
{
log.debug("OnSeek called with " + timeOffset);
this.mutex.WaitOne();
if ((timeOffset <= this.mp3file.Duration) && (timeOffset >= TimeSpan.Zero))
{
this.mp3file.Time = timeOffset;
}
this.mutex.ReleaseMutex();
}
#endregion
}
}

Related

Using Named Pipes in C#

i've been trying to use Named Pipes in C# for a while but can't get it to work.
I'm working with the following code:
internal class SplitManager
{
public static void initialize()
{
Debug.Log("Initializing LiveSplit pipe");
SplitManager.pipeClientStream = new NamedPipeClientStream("//.//pipe//LiveSplit");
Debug.Log("Successfully initialized LiveSplit pipe");
}
public static void startRun()
{
Debug.Log("[PIPE]: start");
SplitManager.WriteString("start");
}
public static void performSplit()
{
Debug.Log("[PIPE]: split");
SplitManager.WriteString("split");
}
private static void WriteString(string str)
{
SplitManager.pipeClientStream.Connect();
new StreamString(SplitManager.pipeClientStream).WriteString(str);
SplitManager.pipeClientStream.Close();
}
private static NamedPipeClientStream pipeClientStream;
}
public class StreamString
{
public StreamString(Stream ioStream)
{
this.ioStream = ioStream;
this.streamEncoding = new UnicodeEncoding();
}
public string ReadString()
{
int num = this.ioStream.ReadByte() * 256;
num += this.ioStream.ReadByte();
byte[] array = new byte[num];
this.ioStream.Read(array, 0, num);
return this.streamEncoding.GetString(array);
}
public int WriteString(string outString)
{
byte[] bytes = this.streamEncoding.GetBytes(outString);
int num = bytes.Length;
if (num > 65535)
{
num = 65535;
}
this.ioStream.WriteByte((byte)(num / 256));
this.ioStream.WriteByte((byte)(num & 255));
this.ioStream.Write(bytes, 0, num);
this.ioStream.Flush();
return bytes.Length + 2;
}
private Stream ioStream;
private UnicodeEncoding streamEncoding;
}
When i run this code i get Win32Exception with an error message that it cannot find the specified file. I'm 100% sure the path is fine, since i checked it with powershell command [System.IO.Directory]::GetFiles("\\.\\pipe\\"). Any ideas why does this error happen?
It turns out that C# actually provides the "//.//pipe//" prefix to the string, so simply replacing SplitManager.pipeClientStream = new NamedPipeClientStream("//.//pipe//LiveSplit"); to SplitManager.pipeClientStream = new NamedPipeClientStream("LiveSplit"); worked!

Unity 3D game using accelorometer sensor

I am developing a virtual reality windows based game. I will be using an accelerometer sensor to get the hand movements of the player and use it in the game, where as the player will use it to fight an enemy in the game. I managed to get the accelerometer readings from the sensor.
I just need an idea on how I can now integrate it with my game in Unity. I used .Net to get the readings from the sensor. This is a TI simple link sensor tag (CC2650STK). This will connect via Bluetooth to my Windows phone.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Windows.Devices.Bluetooth.GenericAttributeProfile;
using Windows.Devices.Enumeration;
using Windows.Storage.Streams;
using Buffer = Windows.Storage.Streams.Buffer;
namespace SensorTag
{
/// This class provides access to the SensorTag Accelerometer BLE data
public class BleAccelerometerService : BleGenericGattService
{
public BleAccelerometerService()
{
}
/// The version of the SensorTag device. 1=CC2541, 2=CC2650.
public int Version { get; set; }
static Guid AccelerometerServiceUuid = Guid.Parse("f000aa10-0451-4000-b000-000000000000");
static Guid AccelerometerCharacteristicUuid = Guid.Parse("f000aa11-0451-4000-b000-000000000000");
static Guid AccelerometerCharacteristicConfigUuid = Guid.Parse("f000aa12-0451-4000-b000-000000000000");
static Guid AccelerometerCharacteristicPeriodUuid = Guid.Parse("f000aa13-0451-4000-b000-000000000000");
Delegate _accelerometerValueChanged;
public event EventHandler<AccelerometerMeasurementEventArgs> AccelerometerMeasurementValueChanged
{
add
{
if (_accelerometerValueChanged != null)
{
_accelerometerValueChanged = Delegate.Combine(_accelerometerValueChanged, value);
}
else
{
_accelerometerValueChanged = value;
RegisterForValueChangeEvents(AccelerometerCharacteristicUuid);
}
}
remove
{
if (_accelerometerValueChanged != null)
{
_accelerometerValueChanged = Delegate.Remove(_accelerometerValueChanged, value);
if (_accelerometerValueChanged == null)
{
UnregisterForValueChangeEvents(AccelerometerCharacteristicUuid);
}
}
}
}
private async Task<int> GetConfig()
{
var ch = GetCharacteristic(AccelerometerCharacteristicConfigUuid);
if (ch != null)
{
var properties = ch.CharacteristicProperties;
if ((properties & GattCharacteristicProperties.Read) != 0)
{
var result = await ch.ReadValueAsync();
IBuffer buffer = result.Value;
DataReader reader = DataReader.FromBuffer(buffer);
var value = reader.ReadByte();
Debug.WriteLine("Acceleration config = " + value);
return (int)value;
}
}
return -1;
}
bool isReading;
public async Task StartReading()
{
if (!isReading)
{
await WriteCharacteristicByte(AccelerometerCharacteristicConfigUuid, 1);
isReading = true;
}
}
public async Task StopReading()
{
if (isReading)
{
isReading = false;
await WriteCharacteristicByte(AccelerometerCharacteristicConfigUuid, 0);
}
}
/// Get the rate at which accelerometer is being polled, in milliseconds.
/// </summary>
/// <returns>Returns the value read from the sensor or -1 if something goes wrong.</returns>
public async Task<int> GetPeriod()
{
byte v = await ReadCharacteristicByte(AccelerometerCharacteristicPeriodUuid, Windows.Devices.Bluetooth.BluetoothCacheMode.Uncached);
return (int)(v * 10);
}
/// <summary>
/// Set the rate at which accelerometer is being polled, in milliseconds.
/// </summary>
/// <param name="milliseconds">The delay between updates, accurate only to 10ms intervals. Maximum value is 2550.</param>
public async Task SetPeriod(int milliseconds)
{
int delay = milliseconds / 10;
byte p = (byte)delay;
if (p < 1)
{
p = 1;
}
await WriteCharacteristicByte(AccelerometerCharacteristicPeriodUuid, p);
}
private void OnAccelerationMeasurementValueChanged(AccelerometerMeasurementEventArgs args)
{
if (_accelerometerValueChanged != null)
{
((EventHandler<AccelerometerMeasurementEventArgs>)_accelerometerValueChanged)(this, args);
}
}
public async Task<bool> ConnectAsync(string deviceContainerId)
{
return await this.ConnectAsync(AccelerometerServiceUuid, deviceContainerId);
}
protected override void OnCharacteristicValueChanged(GattCharacteristic sender, GattValueChangedEventArgs eventArgs)
{
if (sender.Uuid == AccelerometerCharacteristicUuid)
{
if (_accelerometerValueChanged != null)
{
uint dataLength = eventArgs.CharacteristicValue.Length;
using (DataReader reader = DataReader.FromBuffer(eventArgs.CharacteristicValue))
{
if (dataLength == 3)
{
var data = new byte[dataLength];
reader.ReadBytes(data);
AccelerometerMeasurement measurement = new AccelerometerMeasurement();
sbyte x = (sbyte)data[0];
sbyte y = (sbyte)data[1];
sbyte z = (sbyte)data[2];
measurement.X = (double)x / 64.0;
measurement.Y = (double)y / 64.0;
measurement.Z = (double)z / 64.0;
OnAccelerationMeasurementValueChanged(new AccelerometerMeasurementEventArgs(measurement, eventArgs.Timestamp));
}
}
}
}
}
}
public class AccelerometerMeasurement
{
/// <summary>
/// Get/Set X accelerometer in units of 1 g (9.81 m/s^2).
/// </summary>
public double X { get; set;}
/// <summary>
/// Get/Set Y accelerometer in units of 1 g (9.81 m/s^2).
/// </summary>
public double Y { get; set;}
/// <summary>
/// Get/Set Z accelerometer in units of 1 g (9.81 m/s^2).
/// </summary>
public double Z { get; set;}
public AccelerometerMeasurement()
{
}
}
public class AccelerometerMeasurementEventArgs : EventArgs
{
public AccelerometerMeasurementEventArgs(AccelerometerMeasurement measurement, DateTimeOffset timestamp)
{
Measurement = measurement;
Timestamp = timestamp;
}
public AccelerometerMeasurement Measurement
{
get;
private set;
}
public DateTimeOffset Timestamp
{
get;
private set;
}
}
}
One requirement would be to convert the acceleration values into quaternion:
private void ReadAcceleration(AccelerometerMeasurement measure){
Vector3 accel = new Vector3((float)measure.X,
(float)measure.Y, (float)measure.Z);
Quaternion rotation = Quaternion.LookRotation(accel,Vector3.forward);
}
The rotation quaternion can be used to set an object to the device rotation.

Beep in C# .NET application on server side

To debug a firewall delay issue I need an application that will produce a beep on server side when it detects an HTTP GET request.
This code (test.ashx):
<%# WebHandler Language="C#" Class="TestHandler" %>
using System;
using System.Web;
public class TestHandler : IHttpHandler
{
public void ProcessRequest(HttpContext context)
{
HttpResponse Response = context.Response;
try
{
Response.Write("Before beep");
Console.Beep();
Response.Write("After beep");
}
catch (Exception ex)
{
Response.Write(ex.Message + "<br />\n" + ex.InnerException.Message);
}
}
public bool IsReusable { get { return false; } }
}
produces sound only when debugging in IIS Express. After moving the web app to IIS, the sound disappears.
The three easy ways of producing a sound are System.Console.Beep(), System.Media.SoundPlayer, and System.Media.SystemSounds.Beep().
Unfortunately, these methods only work in desktop applications, and won't work in service applications. When ASP.Net apps are run under IIS Express (a desktop app), these sound methods work. However, when ASP.Net apps are run under the IIS service, the sound methods don't work.
System.Console.Beep() ultimately calls the kernel32.dll Beep() function. It's restricted to desktop apps only (scroll down to the Requirements section).
Same for System.Media.SoundPlayer and System.Media.SystemSounds.Beep(). They call the kernel32.dll MessageBeep() and the winmm.dll PlaySound() functions, respectively. They, too, are restricted to desktop apps.
One way to get sounds to play in a service is use NAudio. It's easy to install via NuGet.
This chunk of code is the only way I could get the sound to play. It has to be played on a separate worker thread, and the execution of the worker thread needs to be paused to let the .wav file finish playing.
using System;
using System.Diagnostics;
using System.Threading;
using NAudio.Dsp;
using NAudio.Wave;
...
protected void Button1_Click(object sender, EventArgs e)
{
var waveFilename = #"c:\Windows\Media\tada.wav";
/* Trying to play the .wav file on the main thread
doesn't seem to work. */
ThreadPool.QueueUserWorkItem(
(state) =>
{
using (var audioPlayback = new AudioPlayback())
{
audioPlayback.Load(waveFilename);
audioPlayback.Play(); // Asynchronous.
/* Need to sleep for the approximate length of .wav file,
otherwise no sound is produced because of the
asynchronous Play() call. */
Thread.Sleep(2000);
}
});
}
Here's the supporting code taken from code in NAudio's NAudioWPFDemo project:
public class MaxSampleEventArgs : EventArgs
{
[DebuggerStepThrough]
public MaxSampleEventArgs(float minValue, float maxValue)
{
this.MaxSample = maxValue;
this.MinSample = minValue;
}
public float MaxSample { get; private set; }
public float MinSample { get; private set; }
}
public class FftEventArgs : EventArgs
{
[DebuggerStepThrough]
public FftEventArgs(Complex[] result)
{
this.Result = result;
}
public Complex[] Result { get; private set; }
}
public class SampleAggregator : ISampleProvider
{
// volume
public event EventHandler<MaxSampleEventArgs> MaximumCalculated;
private float maxValue;
private float minValue;
public int NotificationCount { get; set; }
int count;
// FFT
public event EventHandler<FftEventArgs> FftCalculated;
public bool PerformFFT { get; set; }
private readonly Complex[] fftBuffer;
private readonly FftEventArgs fftArgs;
private int fftPos;
private readonly int fftLength;
private int m;
private readonly ISampleProvider source;
private readonly int channels;
public SampleAggregator(ISampleProvider source, int fftLength = 1024)
{
channels = source.WaveFormat.Channels;
if (!IsPowerOfTwo(fftLength))
throw new ArgumentException("FFT Length must be a power of two");
this.m = (int) Math.Log(fftLength, 2.0);
this.fftLength = fftLength;
this.fftBuffer = new Complex[fftLength];
this.fftArgs = new FftEventArgs(fftBuffer);
this.source = source;
}
private bool IsPowerOfTwo(int x)
{
return (x & (x - 1)) == 0;
}
public void Reset()
{
count = 0;
maxValue = minValue = 0;
}
private void Add(float value)
{
if (PerformFFT && FftCalculated != null)
{
fftBuffer[fftPos].X = (float) (value * FastFourierTransform.HammingWindow(fftPos, fftLength));
fftBuffer[fftPos].Y = 0;
fftPos++;
if (fftPos >= fftBuffer.Length)
{
fftPos = 0;
// 1024 = 2^10
FastFourierTransform.FFT(true, m, fftBuffer);
FftCalculated(this, fftArgs);
}
}
maxValue = Math.Max(maxValue, value);
minValue = Math.Min(minValue, value);
count++;
if (count >= NotificationCount && NotificationCount > 0)
{
if (MaximumCalculated != null)
MaximumCalculated(this, new MaxSampleEventArgs(minValue, maxValue));
Reset();
}
}
public WaveFormat WaveFormat { get { return source.WaveFormat; } }
public int Read(float[] buffer, int offset, int count)
{
var samplesRead = source.Read(buffer, offset, count);
for (int n = 0; n < samplesRead; n += channels)
Add(buffer[n + offset]);
return samplesRead;
}
}
public class AudioPlayback : IDisposable
{
private IWavePlayer _playbackDevice;
private WaveStream _fileStream;
public void Load(string fileName)
{
Stop();
CloseFile();
EnsureDeviceCreated();
OpenFile(fileName);
}
private void CloseFile()
{
if (_fileStream != null)
{
_fileStream.Dispose();
_fileStream = null;
}
}
private void OpenFile(string fileName)
{
try
{
var inputStream = new AudioFileReader(fileName);
_fileStream = inputStream;
var aggregator = new SampleAggregator(inputStream);
aggregator.NotificationCount = inputStream.WaveFormat.SampleRate / 100;
aggregator.PerformFFT = true;
_playbackDevice.Init(aggregator);
}
catch
{
CloseFile();
throw;
}
}
private void EnsureDeviceCreated()
{
if (_playbackDevice == null)
CreateDevice();
}
private void CreateDevice()
{
_playbackDevice = new WaveOut { DesiredLatency = 200 };
}
public void Play()
{
if (_playbackDevice != null && _fileStream != null && _playbackDevice.PlaybackState != PlaybackState.Playing)
_playbackDevice.Play();
}
public void Pause()
{
if (_playbackDevice != null)
_playbackDevice.Pause();
}
public void Stop()
{
if (_playbackDevice != null)
_playbackDevice.Stop();
if (_fileStream != null)
_fileStream.Position = 0;
}
public void Dispose()
{
Stop();
CloseFile();
if (_playbackDevice != null)
_playbackDevice.Dispose();
}
}
Try this System.Media.SystemSounds.Beep.Play();

Binding Library Mono for Android

I want to build an application with monodroid to have a live video stream from an IPCamera (with MJpeg format) to my tablet. after digging the internet I found that there is a Mjpeg Library project written in Java from here. it has two files MjpegView.java and MjpegInputStream.Java which I put them both here:
MjpegView.java
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) { super(context); init(context); }
public void surfaceCreated(SurfaceHolder holder) { surfaceDone = true; }
public void showFps(boolean b) { showFps = b; }
public void setSource(MjpegInputStream source) { mIn = source; startPlayback();}
public void setOverlayPaint(Paint p) { overlayPaint = p; }
public void setOverlayTextColor(int c) { overlayTextColor = c; }
public void setOverlayBackgroundColor(int c) { overlayBackgroundColor = c; }
public void setOverlayPosition(int p) { ovlPos = p; }
public void setDisplayMode(int s) { displayMode = s; }
}
MjpegInputStream.Java
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
so I converted that (actually create a c# wrapper) with Binding Library project.
but although I followed the Sample code tutorial of this project as following:
The sample itself:
public class MjpegSample extends Activity {
private MjpegView mv;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://webcam5.hrz.tu-darmstadt.de/axis-cgi/mjpg/video.cgi?resolution=320x240";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
What I have Done in Monodroid:
namespace AndroidApplication8
{
[Activity(Label = "AndroidApplication8", MainLauncher = true, Icon = "#drawable/icon")]
public class Activity1 : Activity
{
int count = 1;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
String URL = "rtsp://192.168.1.3/Mjpeg/video.cgi";
var mv = new MjpegView(this);
SetContentView(mv);
**mv.SetSource(MjpegInputStream.Read(URL));
mv.SetDisplayMode(MjpegView.SizeBestFit);
mv.StartPlayback();
}
}
}
but it gives me an error in the line indicated with ** when it wants to execute MjpegInputStream.Read()
and it jumps to the class converted from the native Java files without any more information.
You should check your video type.For example if your video encoding is compressed over there(before getting to your android device) you should encode it before put it into your browser.This could let you write a code in java for example to verify the incoming stream from cameras first(don't use build-in browser of android) and then decode it manually.
Good luck!

Is there a way to fake a DirectShow filter in a program?

I have an IP Camera that receives a char buffer containing an image over the network. I cant access it until i setup the connection to it in a program. I am trying to dissect windows source filter code and im not going very fast so i thought i'd ask if it was possible to just take a buffer like that and cast it to something that could then connect a pin to AVISplitter or such in Directshow/.net
(video buffer from IP Cam) -> (???) -> (AVI Splitter) -> (Profit)
Update
I have my program capturing video in a namespace, and i have this code from the GSSF in its own namespace. I pass a ptr with an image from the cam namespace to the GSSF namespace. This only occurs once, but the graph streams from this one image, and the camera streams from the network. is there a way to continually pass the buffer from cam to GSSF or should i combine the namespaces somehow? I tried sending the main camera pointer to the GSSF but it crashed because its accessing the pointer and its being written. maybe if i grabbed an image, passed the pointer, waited to grab a new one?
*Update*
I shrunk my code and I don't believe im doing the namespace correctly either now that i look at it.
namespace Cam_Controller
{
static byte[] mainbyte = new byte[1280*720*2];
static IntPtr main_ptr = new IntPtr();
//(this function is threaded)
static void Trial(NPvBuffer mBuffer, NPvDisplayWnd mDisplayWnd, VideoCompression compressor)
{
Functions function = new Functions();
Defines define = new Defines();
NPvResult operationalResult = new NPvResult();
VideoCompression mcompressor = new VideoCompression();
int framecount = 0;
while (!Stopping && AcquiringImages)
{
Mutex lock_video = new Mutex();
NPvResult result = mDevice.RetrieveNextBuffer(mBuffer, operationalResult);
if(result.isOK())
{
framecount++;
wer = (int)mDisplayWnd.Display(mBuffer, wer);
main_ptr = (IntPtr)mBuffer.GetMarshalledBuffer();
Marshal.Copy(main_ptr, mainbyte, 0, 720 * 2560);
}
}
}
private void button7_Click(object sender, EventArgs e)
{
IntPtr dd = (IntPtr)mBuffer.GetMarshalledBuffer();
Marshal.Copy(dd, main_byte1, 0, 720 * 2560);
play = new VisiCam_Controller.DxPlay.DxPlay("", panel9, main_byte1);
play.Start();
}
namespace DxPlay
{
public class DxPlay
{
public DxPlay(string sPath, Control hWin, byte[] color)
{
try
{
// pick one of our image providers
//m_ImageHandler = new ImageFromFiles(sPath, 24);
m_ImageHandler = new ImageFromPixels(20, color);
//m_ImageHandler = new ImageFromMpg(#"c:\c1.mpg");
//m_ImageHandler = new ImageFromMpg(sPath);
//m_ImageHandler = new ImageFromMP3(#"c:\vss\media\track3.mp3");
// Set up the graph
SetupGraph(hWin);
}
catch
{
Dispose();
throw;
}
}
}
abstract internal class imagehandler
internal class imagefrompixels
{
private int[] mainint = new int[720 * 1280];
unsafe public ImageFromPixels(long FPS, byte[] x)
{
long fff = 720 * 1280 * 3;
mainptr = new IntPtr(fff);
for (int p = 0; p < 720 * 640; p++)
{
U = (x[ p * 4 + 0]);
Y = (x[p * 4 + 1]);
V = (x[p * 4 + 2]);
Y2 = (x[p * 4 + 3]);
int one = V << 16 | Y << 8 | U;
int two = V << 16 | Y2 << 8 | U;
mainint[p * 2 + 0] = one;
mainint[p * 2 + 1] = two;
}
m_FPS = UNIT / FPS;
m_b = 211;
m_g = 197;
}
}
}
}
Theres also GetImage but thats relatively the same, copy the buffer into the pointer. What happens is i grab a buffer of the image and send it to the DxPlay class. it is able to process it and put it on the directshow line no problems; but it never updates nor gets updated because its just a single buffer. If i instead send DxPlay a IntPtr holding the address of the image buffer, the code crashes for accessing memory because i assume ImageFromPixels code ( which isn't there now ( change
(x[p * 4 + #])
to
(IntPtr)((x-passed as an IntPtr).toInt64()+p*4 + #)
))
is accessing the memory of the pointer as the Cam_Controller class is editing it. I make and pass copies of the IntPtrs, and new IntPtrs but they fail halfway through the conversion.
If you want to do this in .NET, the following steps are needed:
Use the DirectShow.NET Generic Sample Source Filter (GSSF.AX) from the Misc/GSSF directory within the sample package. A source filter is always a COM module, so you need to register it too using "RegSvr32 GSSF.ax".
Implement a bitmap provider in .NET
Setup a graph, and connect the pin from the GSSF to the implementation of the bitmap provider.
Pray.
I am using the following within a project, and made it reusable for future usage.
The code (not the best, and not finished, but a working start) (this takes a IVideoSource, which is bellow):
public class VideoSourceToVideo : IDisposable
{
object locker = new object();
public event EventHandler<EventArgs> Starting;
public event EventHandler<EventArgs> Stopping;
public event EventHandler<EventArgs> Completed;
/// <summary> graph builder interface. </summary>
private DirectShowLib.ICaptureGraphBuilder2 captureGraphBuilder = null;
DirectShowLib.IMediaControl mediaCtrl = null;
IMediaEvent mediaEvent = null;
bool stopMediaEventLoop = false;
Thread mediaEventThread;
/// <summary> Dimensions of the image, calculated once in constructor. </summary>
private readonly VideoInfoHeader videoInfoHeader;
IVideoSource source;
public VideoSourceToVideo(IVideoSource source, string destFilename, string encoderName)
{
try
{
this.source = source;
// Set up the capture graph
SetupGraph(destFilename, encoderName);
}
catch
{
Dispose();
throw;
}
}
/// <summary> release everything. </summary>
public void Dispose()
{
StopMediaEventLoop();
CloseInterfaces();
}
/// <summary> build the capture graph for grabber. </summary>
private void SetupGraph(string destFilename, string encoderName)
{
int hr;
// Get the graphbuilder object
captureGraphBuilder = new DirectShowLib.CaptureGraphBuilder2() as DirectShowLib.ICaptureGraphBuilder2;
IFilterGraph2 filterGraph = new DirectShowLib.FilterGraph() as DirectShowLib.IFilterGraph2;
mediaCtrl = filterGraph as DirectShowLib.IMediaControl;
IMediaFilter mediaFilt = filterGraph as IMediaFilter;
mediaEvent = filterGraph as IMediaEvent;
captureGraphBuilder.SetFiltergraph(filterGraph);
IBaseFilter aviMux;
IFileSinkFilter fileSink = null;
hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, destFilename, out aviMux, out fileSink);
DsError.ThrowExceptionForHR(hr);
DirectShowLib.IBaseFilter compressor = DirectShowUtils.GetVideoCompressor(encoderName);
if (compressor == null)
{
throw new InvalidCodecException(encoderName);
}
hr = filterGraph.AddFilter(compressor, "compressor");
DsError.ThrowExceptionForHR(hr);
// Our data source
IBaseFilter source = (IBaseFilter)new GenericSampleSourceFilter();
// Get the pin from the filter so we can configure it
IPin ipin = DsFindPin.ByDirection(source, PinDirection.Output, 0);
try
{
// Configure the pin using the provided BitmapInfo
ConfigurePusher((IGenericSampleConfig)ipin);
}
finally
{
Marshal.ReleaseComObject(ipin);
}
// Add the filter to the graph
hr = filterGraph.AddFilter(source, "GenericSampleSourceFilter");
Marshal.ThrowExceptionForHR(hr);
hr = filterGraph.AddFilter(source, "source");
DsError.ThrowExceptionForHR(hr);
hr = captureGraphBuilder.RenderStream(null, null, source, compressor, aviMux);
DsError.ThrowExceptionForHR(hr);
IMediaPosition mediaPos = filterGraph as IMediaPosition;
hr = mediaCtrl.Run();
DsError.ThrowExceptionForHR(hr);
}
private void ConfigurePusher(IGenericSampleConfig ips)
{
int hr;
source.SetMediaType(ips);
// Specify the callback routine to call with each sample
hr = ips.SetBitmapCB(source);
DsError.ThrowExceptionForHR(hr);
}
private void StartMediaEventLoop()
{
mediaEventThread = new Thread(MediaEventLoop)
{
Name = "Offscreen Vid Player Medialoop",
IsBackground = false
};
mediaEventThread.Start();
}
private void StopMediaEventLoop()
{
stopMediaEventLoop = true;
if (mediaEventThread != null)
{
mediaEventThread.Join();
}
}
public void MediaEventLoop()
{
MediaEventLoop(x => PercentageCompleted = x);
}
public double PercentageCompleted
{
get;
private set;
}
// FIXME this needs some work, to be completely in-tune with needs.
public void MediaEventLoop(Action<double> UpdateProgress)
{
mediaEvent.CancelDefaultHandling(EventCode.StateChange);
//mediaEvent.CancelDefaultHandling(EventCode.Starvation);
while (stopMediaEventLoop == false)
{
try
{
EventCode ev;
IntPtr p1, p2;
if (mediaEvent.GetEvent(out ev, out p1, out p2, 0) == 0)
{
switch (ev)
{
case EventCode.Complete:
Stopping.Fire(this, null);
if (UpdateProgress != null)
{
UpdateProgress(source.PercentageCompleted);
}
return;
case EventCode.StateChange:
FilterState state = (FilterState)p1.ToInt32();
if (state == FilterState.Stopped || state == FilterState.Paused)
{
Stopping.Fire(this, null);
}
else if (state == FilterState.Running)
{
Starting.Fire(this, null);
}
break;
// FIXME add abort and stuff, and propagate this.
}
// Trace.WriteLine(ev.ToString() + " " + p1.ToInt32());
mediaEvent.FreeEventParams(ev, p1, p2);
}
else
{
if (UpdateProgress != null)
{
UpdateProgress(source.PercentageCompleted);
}
// FiXME use AutoResetEvent
Thread.Sleep(100);
}
}
catch (Exception e)
{
Trace.WriteLine("MediaEventLoop: " + e);
}
}
}
/// <summary> Shut down capture </summary>
private void CloseInterfaces()
{
int hr;
try
{
if (mediaCtrl != null)
{
// Stop the graph
hr = mediaCtrl.Stop();
mediaCtrl = null;
}
}
catch (Exception ex)
{
Debug.WriteLine(ex);
}
if (captureGraphBuilder != null)
{
Marshal.ReleaseComObject(captureGraphBuilder);
captureGraphBuilder = null;
}
GC.Collect();
}
public void Start()
{
StartMediaEventLoop();
}
}
IVideoSource:
public interface IVideoSource : IGenericSampleCB
{
double PercentageCompleted { get; }
int GetImage(int iFrameNumber, IntPtr ip, int iSize, out int iRead);
void SetMediaType(global::IPerform.Video.Conversion.Interops.IGenericSampleConfig psc);
int SetTimeStamps(global::DirectShowLib.IMediaSample pSample, int iFrameNumber);
}
ImageVideoSource (mostly taken from DirectShow.NET examples):
// A generic class to support easily changing between my different sources of data.
// Note: You DON'T have to use this class, or anything like it. The key is the SampleCallback
// routine. How/where you get your bitmaps is ENTIRELY up to you. Having SampleCallback call
// members of this class was just the approach I used to isolate the data handling.
public abstract class ImageVideoSource : IDisposable, IVideoSource
{
#region Definitions
/// <summary>
/// 100 ns - used by a number of DS methods
/// </summary>
private const long UNIT = 10000000;
#endregion
/// <summary>
/// Number of callbacks that returned a positive result
/// </summary>
private int m_iFrameNumber = 0;
virtual public void Dispose()
{
}
public abstract double PercentageCompleted { get; protected set; }
abstract public void SetMediaType(IGenericSampleConfig psc);
abstract public int GetImage(int iFrameNumber, IntPtr ip, int iSize, out int iRead);
virtual public int SetTimeStamps(IMediaSample pSample, int iFrameNumber)
{
return 0;
}
/// <summary>
/// Called by the GenericSampleSourceFilter. This routine populates the MediaSample.
/// </summary>
/// <param name="pSample">Pointer to a sample</param>
/// <returns>0 = success, 1 = end of stream, negative values for errors</returns>
virtual public int SampleCallback(IMediaSample pSample)
{
int hr;
IntPtr pData;
try
{
// Get the buffer into which we will copy the data
hr = pSample.GetPointer(out pData);
if (hr >= 0)
{
// Set TRUE on every sample for uncompressed frames
hr = pSample.SetSyncPoint(true);
if (hr >= 0)
{
// Find out the amount of space in the buffer
int cbData = pSample.GetSize();
hr = SetTimeStamps(pSample, m_iFrameNumber);
if (hr >= 0)
{
int iRead;
// Get copy the data into the sample
hr = GetImage(m_iFrameNumber, pData, cbData, out iRead);
if (hr == 0) // 1 == End of stream
{
pSample.SetActualDataLength(iRead);
// increment the frame number for next time
m_iFrameNumber++;
}
}
}
}
}
finally
{
// Release our pointer the the media sample. THIS IS ESSENTIAL! If
// you don't do this, the graph will stop after about 2 samples.
Marshal.ReleaseComObject(pSample);
}
return hr;
}
}
RawVideoSource (an example of a concrete managed source generator for a DirectShow pipeline):
internal class RawVideoSource : ImageVideoSource
{
private byte[] buffer;
private byte[] demosaicBuffer;
private RawVideoReader reader;
public override double PercentageCompleted
{
get;
protected set;
}
public RawVideoSource(string sourceFile)
{
reader = new RawVideoReader(sourceFile);
}
override public void SetMediaType(IGenericSampleConfig psc)
{
BitmapInfoHeader bmi = new BitmapInfoHeader();
bmi.Size = Marshal.SizeOf(typeof(BitmapInfoHeader));
bmi.Width = reader.Header.VideoSize.Width;
bmi.Height = reader.Header.VideoSize.Height;
bmi.Planes = 1;
bmi.BitCount = 24;
bmi.Compression = 0;
bmi.ImageSize = (bmi.BitCount / 8) * bmi.Width * bmi.Height;
bmi.XPelsPerMeter = 0;
bmi.YPelsPerMeter = 0;
bmi.ClrUsed = 0;
bmi.ClrImportant = 0;
int hr = psc.SetMediaTypeFromBitmap(bmi, 0);
buffer = new byte[reader.Header.FrameSize];
demosaicBuffer = new byte[reader.Header.FrameSize * 3];
DsError.ThrowExceptionForHR(hr);
}
long startFrameTime;
long endFrameTime;
unsafe override public int GetImage(int iFrameNumber, IntPtr ip, int iSize, out int iRead)
{
int hr = 0;
if (iFrameNumber < reader.Header.NumberOfFrames)
{
reader.ReadFrame(buffer, iFrameNumber, out startFrameTime, out endFrameTime);
Demosaic.DemosaicGBGR24Bilinear(buffer, demosaicBuffer, reader.Header.VideoSize);
Marshal.Copy(demosaicBuffer, 0, ip, reader.Header.FrameSize * 3);
PercentageCompleted = ((double)iFrameNumber / reader.Header.NumberOfFrames) * 100.0;
}
else
{
PercentageCompleted = 100;
hr = 1; // End of stream
}
iRead = iSize;
return hr;
}
override public int SetTimeStamps(IMediaSample pSample, int iFrameNumber)
{
reader.ReadTimeStamps(iFrameNumber, out startFrameTime, out endFrameTime);
DsLong rtStart = new DsLong(startFrameTime);
DsLong rtStop = new DsLong(endFrameTime);
int hr = pSample.SetTime(rtStart, rtStop);
return hr;
}
}
And the interops to the GSSF.AX COM:
namespace IPerform.Video.Conversion.Interops
{
[ComImport, Guid("6F7BCF72-D0C2-4449-BE0E-B12F580D056D")]
public class GenericSampleSourceFilter
{
}
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
Guid("33B9EE57-1067-45fa-B12D-C37517F09FC0")]
public interface IGenericSampleCB
{
[PreserveSig]
int SampleCallback(IMediaSample pSample);
}
[Guid("CE50FFF9-1BA8-4788-8131-BDE7D4FFC27F"),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IGenericSampleConfig
{
[PreserveSig]
int SetMediaTypeFromBitmap(BitmapInfoHeader bmi, long lFPS);
[PreserveSig]
int SetMediaType([MarshalAs(UnmanagedType.LPStruct)] AMMediaType amt);
[PreserveSig]
int SetMediaTypeEx([MarshalAs(UnmanagedType.LPStruct)] AMMediaType amt, int lBufferSize);
[PreserveSig]
int SetBitmapCB(IGenericSampleCB pfn);
}
}
Good luck, try to get it working using this. Or comment with further questions so we can iron out other issues.

Categories