In Unity3d Webgl I download and write big file (about 100 mb and more) to cache (indexeddb).
I downloaded file with UnityWebRequest and write file in downloadHandler.
Sometimes after call filestream.Write() or filestream.Flush() Chrome will crash.
Exception not available, only crash tab of chrome.
Crash happens abount in 50 percent of dowloads.
public class Downloader : MonoBehaviour
{
public IEnumerator DownloadFileCoroutine(string url, string filePath, string clientName, int fileId, Action<int> downloadedCallback)
{
var currentWebRequest = UnityWebRequest.Get(url);
currentWebRequest.downloadHandler = new ToFileDownloadHandler(new byte[64 * 1024], filePath);
var downloadHendler = (currentWebRequest.downloadHandler as ToFileDownloadHandler);
currentWebRequest.Send();
while (!currentWebRequest.isDone && !currentWebRequest.isNetworkError && !currentWebRequest.isHttpError)
{
if (currentWebRequest.isNetworkError)
{
yield break;
}
}
if (currentWebRequest.isNetworkError)
{
downloadHendler.Cancel();
}
else
{
/// suceess
}
}
public class ToFileDownloadHandler : DownloadHandlerScript
{
private int _expected = -1;
private long _received = 0;
private readonly string _filepath;
private readonly FileStream _fileStream = null;
private bool _canceled = false;
public ToFileDownloadHandler(byte[] buffer, string filepath) : base(buffer)
{
CreateDir();
_filepath = filepath;
_fileStream = new FileStream(filepath, FileMode.Create, FileAccess.Write);
}
protected override byte[] GetData() { return null; }
protected override bool ReceiveData(byte[] data, int dataLength)
{
if (data == null || data.Length < 1)
{
return false;
}
_received += dataLength;
if (!_canceled)
{
_fileStream.Write(data, 0, dataLength);
_fileStream.Flush();
}
return true;
}
protected override float GetProgress()
{
if (_expected < 0) return 0;
return (float)_received / _expected;
}
protected override void CompleteContent()
{
_fileStream.Close();
_isComplete = true;
}
protected override void ReceiveContentLength(int contentLength)
{
_expected = contentLength;
}
public void Cancel()
{
if (_canceled) return;
_canceled = true;
if (_fileStream != null)
{
_fileStream.Close();
}
File.Delete(_filepath);
}
private void CreateDir()
{
cachePath = Application.persistentDataPath;
if (!Directory.Exists(cachePath))
{
Directory.CreateDirectory(cachePath);
}
}
}
}
Related
I'm trying to create a TCP Client-Server connection, the client can connect to the server successfully, but I can't receive the data I'm sending from the server and I get this Error:
NullReferenceException: Object reference not set to an instance of an object ClientHandlePackets.HandleDataPackets (System.Byte[] data) (at Assets/Client/Scripts/ClientHanlePackets.cs:89) ClientHandlePackets.HandleData (System.Byte[] data) (at Assets/Client/Scripts/ClientHanlePackets.cs:61) ClientTCP.Update () (at Assets/Client/Scripts/ClientTCP.cs:31)
How can I Solve this?
c#
using System.Collections.Generic;
using UnityEngine;
public enum ServerPackets
{
S_INFORMATION = 1,
S_EXECUTEMETHODONCLIENT,
}
public class ClientHandlePackets : MonoBehaviour
{
public static Bytebuffer playerbuffer;
public delegate void Packet_(byte[] data);
private static Dictionary<long, Packet_> packets;
private static long pLength;
private void Awake()
{
initalizePackets();
}
private static void initalizePackets()
{
packets = new Dictionary<long, Packet_>();
packets.Add((long)ServerPackets.S_INFORMATION, PacketInformation);
}
public static void HandleData(byte[] data)
{
byte[] Buffer;
Buffer = (byte[])data.Clone();
if (playerbuffer == null) { playerbuffer = new Bytebuffer(); };
playerbuffer.WriteBytes(Buffer);
if (playerbuffer.Count() == 0)
{
playerbuffer.Clear();
return;
}
if (playerbuffer.Length() >= 8)
{
pLength = playerbuffer.ReadLong(false);
if (pLength <= 0)
{
playerbuffer.Clear();
return;
}
}
while (pLength > 0 & pLength <= playerbuffer.Length() - 8)
{
if (pLength <= playerbuffer.Length() - 8)
{
playerbuffer.ReadLong(); //REads out the packet identifier
data = playerbuffer.Readbytes((int)pLength); // Gets the full package length
HandleDataPackets(data);
}
pLength = 0;
if (playerbuffer.Length() >= 8)
{
pLength = playerbuffer.ReadLong(false);
if (pLength < 0)
{
playerbuffer.Clear();
return;
}
}
}
}
private static void HandleDataPackets(byte[] data)
{
long packetIdentifier;
Bytebuffer Buffer;
Packet_ packet;
Buffer = new Bytebuffer();
Buffer.WriteBytes(data);
packetIdentifier = Buffer.ReadLong();
Buffer.Dispose();
if (packets.TryGetValue(packetIdentifier, out packet))
{
packet.Invoke(data);
}
}
private static void PacketInformation(byte[] data)
{
Bytebuffer buffer = new Bytebuffer();
buffer.WriteBytes(data);
long packetIdentifier = buffer.ReadLong();
string msg1 = buffer.Readstring();
string msg2 = buffer.Readstring();
int Level = buffer.ReadInteger();
Debug.Log(msg1);
Debug.Log(msg2);
Debug.Log(Level);
}
}
using System;
using System.Net.Sockets;
using UnityEngine;
using UnityEngine.UI;
public class ClientTCP: MonoBehaviour
{
public Text info;
public static ClientTCP instance;
public TcpClient client;
public NetworkStream mystream;
private byte[] AsynchBuffer;
public bool IsConnected;
public byte[] Receivebyte;
public bool handleData = false;
private string IP_Adress= "127.0.0.1";
private int port=5555;
private void Awake()
{
instance = this;
}
private void Update()
{
if (handleData == true)
{
ClientHandlePackets.HandleData(Receivebyte);
handleData = false;
}
}
public void Connect()
{
Debug.Log("Trying to connect to the sever...");
client = new TcpClient();
client.ReceiveBufferSize = 4096;
client.SendBufferSize = 4096;
AsynchBuffer = new byte[8192];
try
{
client.BeginConnect(IP_Adress, port, new AsyncCallback(ConnectCallback), client);
}
catch
{
Debug.Log("unable to connect to the server");
}
}
private void ConnectCallback(IAsyncResult result)
{
try
{
client.EndConnect(result);
if (client.Connected == false)
{
return;
}
else
{
mystream = client.GetStream();
mystream.BeginRead(AsynchBuffer,0,8192,OnRecieveData,null);
IsConnected = true;
Debug.Log("You are connected to the server successfully!");
}
}
catch (Exception)
{
IsConnected = false;
return;
}
}
private void OnRecieveData(IAsyncResult result)
{
try
{
int packetlength = mystream.EndRead(result);
Receivebyte = new byte[packetlength];
Buffer.BlockCopy(AsynchBuffer, 0, Receivebyte, 0, packetlength);
if (packetlength == 0)
{
Debug.Log("disconnected");
Application.Quit();
return;
}
handleData = true;
mystream.BeginRead(AsynchBuffer, 0, 8192, OnRecieveData, null);
}
catch (Exception)
{
Debug.Log("disconnected");
Application.Quit();
return;
}
}
}
the problem is here:
if (packets.TryGetValue(packetIdentifier, out packet))
{
packet.Invoke(data);
}
to avoid this error you can use
if (packets.TryGetValue(packetIdentifier, out packet))
{
packet?.Invoke(data);
}
But the problem is that i don't see where you fill your packets' dictionary with data in your code.
Can you show your ByteBuffer class?
I am trying to integrate Thrift with ActiveMQ in c#.My publisher and subscriber messages are getting serialized and de-serialized right way. But at the very last step that I am getting the exception like "Remote side has closed" under TJSONProtocol. This is the following line which revels the error.
///<summary>
/// Return and consume the next byte to be Read, either taking it from the
/// data buffer if present or getting it from the transport otherwise.
///</summary>
public byte Read()
{
if (hasData)
{
hasData = false;
}
else
{
proto.trans.ReadAll(data, 0, 1);
}
return data[0];
}
This is the exact error I am getting
This is my custom transports for server side
class ThriftJMSServerTransport : TServerTransport
{
Listener listener = new Listener();
protected override TTransport AcceptImpl()
{
try
{
return new ActiveMQTransport().Accept();
// return listener.Accept(); ;
}
catch (Exception ex)
{
return null;
}
}
public override void Close()
{
throw new NotImplementedException();
}
public override void Listen()
{
// listener.Initialize();
listener.Intitializelistner();
// throw new NotImplementedException();
}
}
ActiveMQTransport Class
public delegate void MessageReceivedEventHandler(string message, string correlationId);
public class ActiveMQTransport : TTransport
{
private MemoryStream _inputStream;
private MemoryStream _outputStream;
private ActiveMQServerUtil _activeMq;
private string _corelationId;
/// <summary>
/// constructor
/// </summary>
/// <param name="activeMq"></param>
public ActiveMQTransport()
: this(new MemoryStream(), new MemoryStream())
{
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="inputStream"></param>
/// <param name="outputstream"></param>
/// <param name="activeMq"></param>
public ActiveMQTransport(MemoryStream inputStream, MemoryStream outputstream)
{
_inputStream = inputStream;
_outputStream = outputstream;
_activeMq = ActiveMQServerUtil.GetActiveMQServerUtil();
_activeMq.MessageReceived += new MessageReceivedEventHandler(_activeMq_MessageReceived);
}
public ActiveMQTransport Accept()
{
IMessage message = _activeMq.Receive();
ITextMessage textMessage = message as ITextMessage;
_activeMq_MessageReceived(textMessage.Text, textMessage.NMSCorrelationID);
return this;
}
/// <summary>
/// Read Message and write onto transport input stream
/// </summary>
/// <param name="message"></param>
/// <param name="correlationId"></param>
private void _activeMq_MessageReceived(string message, string correlationId)
{
byte[] opMessage = Encoding.UTF8.GetBytes(message);
_corelationId = correlationId;
//_inputStream = new MemoryStream();
_inputStream.Write(opMessage, 0, message.Length);
_inputStream.Position = 0;
Console.WriteLine("CorelationId: {0}, Listner Message: {1}"
, correlationId, message);
}
public override bool IsOpen
{
get { return true; }
}
public override void Open()
{
}
public override void Close()
{
}
public override int Read(byte[] buf, int off, int len)
{
try
{
int i= _inputStream.Read(buf, off, len);
return i;
}
catch(Exception ex)
{
return 0;
}
}
public override void Write(byte[] buf, int off, int len)
{
_outputStream.Write(buf, off, len);
}
/// <summary>
/// Flush result to ActiveMQ
/// </summary>
public override void Flush()
{
_activeMq.Send(Encoding.UTF8.GetString(_outputStream.ToArray()), this._corelationId);
_outputStream = new MemoryStream();
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
}
}
}
Listner class
public class Listener
{
public const string DESTINATION = "Topic1to2";
ActiveMQTransport activeMq = new ActiveMQTransport();
public static Queue<IMessage> Processqueue = new Queue<IMessage>();
public Listener()
{
}
private void PutMessageinQueue(ITextMessage message)
{
if (!string.IsNullOrEmpty(message.Text))
{
Processqueue.Enqueue(message);
}
}
private void consumer_Listener(IMessage message)
{
ITextMessage textMessage = message as ITextMessage;
Console.WriteLine(textMessage.Text);
if (!string.IsNullOrEmpty(textMessage.Text))
{
Processqueue.Enqueue(textMessage);
}
}
public void Intitializelistner()
{
IConnectionFactory factory = new
ConnectionFactory("activemq:tcp://localhost:61616/");
//Create the connection
using (IConnection connection =
factory.CreateConnection())
{
// connection.ClientId = "testing listener";
connection.Start();
//Create the Session
using (ISession session = connection.CreateSession())
{
//Create the Consumer
IMessageConsumer consumer = session.CreateConsumer(
new Apache.NMS.ActiveMQ.Commands.ActiveMQTopic(
DESTINATION));
consumer.Listener += new MessageListener(
consumer_Listener);
Console.ReadLine();
}
}
}
//static void consumer_Listener1(IMessage message)
//{
// Console.WriteLine("Receive: " +
// ((ITextMessage)message).Text);
//}
}
publisher class
public class Publisher : BaseClass
{
public const string DESTINATION = "Topic2to1";
public Publisher()
{
}
public void SendMessage(string message)
{
IConnectionFactory factory = new ConnectionFactory("activemq:tcp://localhost:61616/");
using (IConnection connection = factory.CreateConnection())
{
//Create the Session
using (ISession session = connection.CreateSession())
{
//Create the Producer for the topic/queue
IMessageProducer prod = session.CreateProducer(new ActiveMQTopic(DESTINATION));
//Send Messages
// int i = 0;
// while (!Console.KeyAvailable)
{
ITextMessage msg = prod.CreateTextMessage();
msg.Text = message;
Console.WriteLine("Sending: " + msg.Text);
prod.Send(msg);
System.Threading.Thread.Sleep(250);
// i++;
}
}
}
Console.ReadLine();
}
}
Following is the client side custom transport
public class ThriftJmsTransport : TTransport
{
public delegate void MessageReceivedEventHandler(string message);
protected MemoryStream inputStream=new MemoryStream();
protected MemoryStream outputStream=new MemoryStream();
Listener listener = new Listener();
public ThriftJmsTransport()
{
}
public ThriftJmsTransport(MemoryStream inputStream, MemoryStream outputStream)
{
this.inputStream = inputStream;
this.outputStream = outputStream;
}
public Stream OutputStream
{
get { return outputStream; }
}
public Stream InputStream
{
get { return inputStream; }
}
public override void Close()
{
if (inputStream != null)
{
inputStream.Close();
inputStream = null;
}
if (outputStream != null)
{
outputStream.Close();
outputStream = null;
}
}
public override bool IsOpen
{
get { return true; }
}
public override void Open()
{
// listener.Initialize();
listener.Intitializelistner();
// Listener.MessageReceived += new MessageReceivedEventHandler(_activeMq_MessageReceived);
}
void _activeMq_MessageReceived(string message)
{
inputStream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(message));
Console.WriteLine(message);
inputStream.Position = 0;
// _signal.Set();
}
public override int Read(byte[] buf, int off, int len)
{
if (inputStream == null)
{
throw new TTransportException(TTransportException.ExceptionType.NotOpen, "Cannot read from null inputstream");
}
return inputStream.Read(buf, off, len);
}
public override void Write(byte[] buf, int off, int len)
{
if (!outputStream.CanWrite)
outputStream = new MemoryStream();
outputStream.Write(buf, off, len);
}
public override void Flush()
{
//if (outputStream == null)
//{
// throw new TTransportException(TTransportException.ExceptionType.NotOpen, "Cannot flush null outputstream");
//}
if (outputStream != null)
{
// StreamReader reader = new StreamReader(outputStream);
string text = System.Text.Encoding.Default.GetString(outputStream.ToArray());
Send(text);
}
}
public void Send(string message)
{
var publisher=new Publisher();
publisher.SendMessage(message);
}
#region " IDisposable Support "
private bool _IsDisposed;
// IDisposable
protected override void Dispose(bool disposing)
{
if (!_IsDisposed)
{
if (disposing)
{
if (InputStream != null)
InputStream.Dispose();
if (OutputStream != null)
OutputStream.Dispose();
}
}
_IsDisposed = true;
}
#endregion
}
To debug a firewall delay issue I need an application that will produce a beep on server side when it detects an HTTP GET request.
This code (test.ashx):
<%# WebHandler Language="C#" Class="TestHandler" %>
using System;
using System.Web;
public class TestHandler : IHttpHandler
{
public void ProcessRequest(HttpContext context)
{
HttpResponse Response = context.Response;
try
{
Response.Write("Before beep");
Console.Beep();
Response.Write("After beep");
}
catch (Exception ex)
{
Response.Write(ex.Message + "<br />\n" + ex.InnerException.Message);
}
}
public bool IsReusable { get { return false; } }
}
produces sound only when debugging in IIS Express. After moving the web app to IIS, the sound disappears.
The three easy ways of producing a sound are System.Console.Beep(), System.Media.SoundPlayer, and System.Media.SystemSounds.Beep().
Unfortunately, these methods only work in desktop applications, and won't work in service applications. When ASP.Net apps are run under IIS Express (a desktop app), these sound methods work. However, when ASP.Net apps are run under the IIS service, the sound methods don't work.
System.Console.Beep() ultimately calls the kernel32.dll Beep() function. It's restricted to desktop apps only (scroll down to the Requirements section).
Same for System.Media.SoundPlayer and System.Media.SystemSounds.Beep(). They call the kernel32.dll MessageBeep() and the winmm.dll PlaySound() functions, respectively. They, too, are restricted to desktop apps.
One way to get sounds to play in a service is use NAudio. It's easy to install via NuGet.
This chunk of code is the only way I could get the sound to play. It has to be played on a separate worker thread, and the execution of the worker thread needs to be paused to let the .wav file finish playing.
using System;
using System.Diagnostics;
using System.Threading;
using NAudio.Dsp;
using NAudio.Wave;
...
protected void Button1_Click(object sender, EventArgs e)
{
var waveFilename = #"c:\Windows\Media\tada.wav";
/* Trying to play the .wav file on the main thread
doesn't seem to work. */
ThreadPool.QueueUserWorkItem(
(state) =>
{
using (var audioPlayback = new AudioPlayback())
{
audioPlayback.Load(waveFilename);
audioPlayback.Play(); // Asynchronous.
/* Need to sleep for the approximate length of .wav file,
otherwise no sound is produced because of the
asynchronous Play() call. */
Thread.Sleep(2000);
}
});
}
Here's the supporting code taken from code in NAudio's NAudioWPFDemo project:
public class MaxSampleEventArgs : EventArgs
{
[DebuggerStepThrough]
public MaxSampleEventArgs(float minValue, float maxValue)
{
this.MaxSample = maxValue;
this.MinSample = minValue;
}
public float MaxSample { get; private set; }
public float MinSample { get; private set; }
}
public class FftEventArgs : EventArgs
{
[DebuggerStepThrough]
public FftEventArgs(Complex[] result)
{
this.Result = result;
}
public Complex[] Result { get; private set; }
}
public class SampleAggregator : ISampleProvider
{
// volume
public event EventHandler<MaxSampleEventArgs> MaximumCalculated;
private float maxValue;
private float minValue;
public int NotificationCount { get; set; }
int count;
// FFT
public event EventHandler<FftEventArgs> FftCalculated;
public bool PerformFFT { get; set; }
private readonly Complex[] fftBuffer;
private readonly FftEventArgs fftArgs;
private int fftPos;
private readonly int fftLength;
private int m;
private readonly ISampleProvider source;
private readonly int channels;
public SampleAggregator(ISampleProvider source, int fftLength = 1024)
{
channels = source.WaveFormat.Channels;
if (!IsPowerOfTwo(fftLength))
throw new ArgumentException("FFT Length must be a power of two");
this.m = (int) Math.Log(fftLength, 2.0);
this.fftLength = fftLength;
this.fftBuffer = new Complex[fftLength];
this.fftArgs = new FftEventArgs(fftBuffer);
this.source = source;
}
private bool IsPowerOfTwo(int x)
{
return (x & (x - 1)) == 0;
}
public void Reset()
{
count = 0;
maxValue = minValue = 0;
}
private void Add(float value)
{
if (PerformFFT && FftCalculated != null)
{
fftBuffer[fftPos].X = (float) (value * FastFourierTransform.HammingWindow(fftPos, fftLength));
fftBuffer[fftPos].Y = 0;
fftPos++;
if (fftPos >= fftBuffer.Length)
{
fftPos = 0;
// 1024 = 2^10
FastFourierTransform.FFT(true, m, fftBuffer);
FftCalculated(this, fftArgs);
}
}
maxValue = Math.Max(maxValue, value);
minValue = Math.Min(minValue, value);
count++;
if (count >= NotificationCount && NotificationCount > 0)
{
if (MaximumCalculated != null)
MaximumCalculated(this, new MaxSampleEventArgs(minValue, maxValue));
Reset();
}
}
public WaveFormat WaveFormat { get { return source.WaveFormat; } }
public int Read(float[] buffer, int offset, int count)
{
var samplesRead = source.Read(buffer, offset, count);
for (int n = 0; n < samplesRead; n += channels)
Add(buffer[n + offset]);
return samplesRead;
}
}
public class AudioPlayback : IDisposable
{
private IWavePlayer _playbackDevice;
private WaveStream _fileStream;
public void Load(string fileName)
{
Stop();
CloseFile();
EnsureDeviceCreated();
OpenFile(fileName);
}
private void CloseFile()
{
if (_fileStream != null)
{
_fileStream.Dispose();
_fileStream = null;
}
}
private void OpenFile(string fileName)
{
try
{
var inputStream = new AudioFileReader(fileName);
_fileStream = inputStream;
var aggregator = new SampleAggregator(inputStream);
aggregator.NotificationCount = inputStream.WaveFormat.SampleRate / 100;
aggregator.PerformFFT = true;
_playbackDevice.Init(aggregator);
}
catch
{
CloseFile();
throw;
}
}
private void EnsureDeviceCreated()
{
if (_playbackDevice == null)
CreateDevice();
}
private void CreateDevice()
{
_playbackDevice = new WaveOut { DesiredLatency = 200 };
}
public void Play()
{
if (_playbackDevice != null && _fileStream != null && _playbackDevice.PlaybackState != PlaybackState.Playing)
_playbackDevice.Play();
}
public void Pause()
{
if (_playbackDevice != null)
_playbackDevice.Pause();
}
public void Stop()
{
if (_playbackDevice != null)
_playbackDevice.Stop();
if (_fileStream != null)
_fileStream.Position = 0;
}
public void Dispose()
{
Stop();
CloseFile();
if (_playbackDevice != null)
_playbackDevice.Dispose();
}
}
Try this System.Media.SystemSounds.Beep.Play();
I want to build an application with monodroid to have a live video stream from an IPCamera (with MJpeg format) to my tablet. after digging the internet I found that there is a Mjpeg Library project written in Java from here. it has two files MjpegView.java and MjpegInputStream.Java which I put them both here:
MjpegView.java
package de.mjpegsample.MjpegView;
import java.io.IOException;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class MjpegView extends SurfaceView implements SurfaceHolder.Callback {
public final static int POSITION_UPPER_LEFT = 9;
public final static int POSITION_UPPER_RIGHT = 3;
public final static int POSITION_LOWER_LEFT = 12;
public final static int POSITION_LOWER_RIGHT = 6;
public final static int SIZE_STANDARD = 1;
public final static int SIZE_BEST_FIT = 4;
public final static int SIZE_FULLSCREEN = 8;
private MjpegViewThread thread;
private MjpegInputStream mIn = null;
private boolean showFps = false;
private boolean mRun = false;
private boolean surfaceDone = false;
private Paint overlayPaint;
private int overlayTextColor;
private int overlayBackgroundColor;
private int ovlPos;
private int dispWidth;
private int dispHeight;
private int displayMode;
public class MjpegViewThread extends Thread {
private SurfaceHolder mSurfaceHolder;
private int frameCounter = 0;
private long start;
private Bitmap ovl;
public MjpegViewThread(SurfaceHolder surfaceHolder, Context context) { mSurfaceHolder = surfaceHolder; }
private Rect destRect(int bmw, int bmh) {
int tempx;
int tempy;
if (displayMode == MjpegView.SIZE_STANDARD) {
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_BEST_FIT) {
float bmasp = (float) bmw / (float) bmh;
bmw = dispWidth;
bmh = (int) (dispWidth / bmasp);
if (bmh > dispHeight) {
bmh = dispHeight;
bmw = (int) (dispHeight * bmasp);
}
tempx = (dispWidth / 2) - (bmw / 2);
tempy = (dispHeight / 2) - (bmh / 2);
return new Rect(tempx, tempy, bmw + tempx, bmh + tempy);
}
if (displayMode == MjpegView.SIZE_FULLSCREEN) return new Rect(0, 0, dispWidth, dispHeight);
return null;
}
public void setSurfaceSize(int width, int height) {
synchronized(mSurfaceHolder) {
dispWidth = width;
dispHeight = height;
}
}
private Bitmap makeFpsOverlay(Paint p, String text) {
Rect b = new Rect();
p.getTextBounds(text, 0, text.length(), b);
int bwidth = b.width()+2;
int bheight = b.height()+2;
Bitmap bm = Bitmap.createBitmap(bwidth, bheight, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(bm);
p.setColor(overlayBackgroundColor);
c.drawRect(0, 0, bwidth, bheight, p);
p.setColor(overlayTextColor);
c.drawText(text, -b.left+1, (bheight/2)-((p.ascent()+p.descent())/2)+1, p);
return bm;
}
public void run() {
start = System.currentTimeMillis();
PorterDuffXfermode mode = new PorterDuffXfermode(PorterDuff.Mode.DST_OVER);
Bitmap bm;
int width;
int height;
Rect destRect;
Canvas c = null;
Paint p = new Paint();
String fps = "";
while (mRun) {
if(surfaceDone) {
try {
c = mSurfaceHolder.lockCanvas();
synchronized (mSurfaceHolder) {
try {
bm = mIn.readMjpegFrame();
destRect = destRect(bm.getWidth(),bm.getHeight());
c.drawColor(Color.BLACK);
c.drawBitmap(bm, null, destRect, p);
if(showFps) {
p.setXfermode(mode);
if(ovl != null) {
height = ((ovlPos & 1) == 1) ? destRect.top : destRect.bottom-ovl.getHeight();
width = ((ovlPos & 8) == 8) ? destRect.left : destRect.right -ovl.getWidth();
c.drawBitmap(ovl, width, height, null);
}
p.setXfermode(null);
frameCounter++;
if((System.currentTimeMillis() - start) >= 1000) {
fps = String.valueOf(frameCounter)+"fps";
frameCounter = 0;
start = System.currentTimeMillis();
ovl = makeFpsOverlay(overlayPaint, fps);
}
}
} catch (IOException e) {}
}
} finally { if (c != null) mSurfaceHolder.unlockCanvasAndPost(c); }
}
}
}
}
private void init(Context context) {
SurfaceHolder holder = getHolder();
holder.addCallback(this);
thread = new MjpegViewThread(holder, context);
setFocusable(true);
overlayPaint = new Paint();
overlayPaint.setTextAlign(Paint.Align.LEFT);
overlayPaint.setTextSize(12);
overlayPaint.setTypeface(Typeface.DEFAULT);
overlayTextColor = Color.WHITE;
overlayBackgroundColor = Color.BLACK;
ovlPos = MjpegView.POSITION_LOWER_RIGHT;
displayMode = MjpegView.SIZE_STANDARD;
dispWidth = getWidth();
dispHeight = getHeight();
}
public void startPlayback() {
if(mIn != null) {
mRun = true;
thread.start();
}
}
public void stopPlayback() {
mRun = false;
boolean retry = true;
while(retry) {
try {
thread.join();
retry = false;
} catch (InterruptedException e) {}
}
}
public MjpegView(Context context, AttributeSet attrs) { super(context, attrs); init(context); }
public void surfaceChanged(SurfaceHolder holder, int f, int w, int h) { thread.setSurfaceSize(w, h); }
public void surfaceDestroyed(SurfaceHolder holder) {
surfaceDone = false;
stopPlayback();
}
public MjpegView(Context context) { super(context); init(context); }
public void surfaceCreated(SurfaceHolder holder) { surfaceDone = true; }
public void showFps(boolean b) { showFps = b; }
public void setSource(MjpegInputStream source) { mIn = source; startPlayback();}
public void setOverlayPaint(Paint p) { overlayPaint = p; }
public void setOverlayTextColor(int c) { overlayTextColor = c; }
public void setOverlayBackgroundColor(int c) { overlayBackgroundColor = c; }
public void setOverlayPosition(int p) { ovlPos = p; }
public void setDisplayMode(int s) { displayMode = s; }
}
MjpegInputStream.Java
package de.mjpegsample.MjpegView;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Properties;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(String url) {
HttpResponse res;
DefaultHttpClient httpclient = new DefaultHttpClient();
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
}
so I converted that (actually create a c# wrapper) with Binding Library project.
but although I followed the Sample code tutorial of this project as following:
The sample itself:
public class MjpegSample extends Activity {
private MjpegView mv;
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
//sample public cam
String URL = "http://webcam5.hrz.tu-darmstadt.de/axis-cgi/mjpg/video.cgi?resolution=320x240";
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mv = new MjpegView(this);
setContentView(mv);
mv.setSource(MjpegInputStream.read(URL));
mv.setDisplayMode(MjpegView.SIZE_BEST_FIT);
mv.showFps(true);
}
What I have Done in Monodroid:
namespace AndroidApplication8
{
[Activity(Label = "AndroidApplication8", MainLauncher = true, Icon = "#drawable/icon")]
public class Activity1 : Activity
{
int count = 1;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
String URL = "rtsp://192.168.1.3/Mjpeg/video.cgi";
var mv = new MjpegView(this);
SetContentView(mv);
**mv.SetSource(MjpegInputStream.Read(URL));
mv.SetDisplayMode(MjpegView.SizeBestFit);
mv.StartPlayback();
}
}
}
but it gives me an error in the line indicated with ** when it wants to execute MjpegInputStream.Read()
and it jumps to the class converted from the native Java files without any more information.
You should check your video type.For example if your video encoding is compressed over there(before getting to your android device) you should encode it before put it into your browser.This could let you write a code in java for example to verify the incoming stream from cameras first(don't use build-in browser of android) and then decode it manually.
Good luck!
class Sound
{
private NAudio.Wave.BlockAlignReductionStream stream = null;
private NAudio.Wave.DirectSoundOut output = null;
private string fileName;
public Sound(string fileName)
{
this.fileName = fileName;
}
public void PlaySound()
{
if(fileName.EndsWith(".mp3"))
{
NAudio.Wave.WaveStream pcm = NAudio.Wave.WaveFormatConversionStream.CreatePcmStream(new NAudio.Wave.Mp3FileReader(fileName));
stream = new NAudio.Wave.BlockAlignReductionStream(pcm);
}
else if (fileName.EndsWith(".wav"))
{
NAudio.Wave.WaveStream pcm = new NAudio.Wave.WaveChannel32(new NAudio.Wave.WaveFileReader(fileName));
stream = new NAudio.Wave.BlockAlignReductionStream(pcm);
}
else throw new InvalidOperationException("Not a correct audio file type.");
output = new NAudio.Wave.DirectSoundOut();
output.Init(stream);
output.Play();
output.Volume = 0.5f;
}
public void Volume(float vol)
{
}
public void PausePlay()
{
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) output.Pause();
else if (output.PlaybackState == NAudio.Wave.PlaybackState.Paused) output.Play();
}
}
public void Pause()
{
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) output.Pause();
}
}
public void Play()
{
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Paused) output.Play();
}
}
public void DisposeWave()
{
if (output != null)
{
if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) output.Stop();
output.Dispose();
output = null;
}
if (stream != null)
{
stream.Dispose();
stream = null;
}
}
public bool Over()
{
if (stream.Position == stream.Length)
return true;
return false;
}
public void Loop()
{
if (Over())
{
stream.Position = 0;
output.Play();
}
}
I really don't know what's the problem here, I'd be glad for a help, I'm trying to change the volume of the output audio.
When I compile this code I'm getting an error in the output.volume = 0.5. The error is:
Setting volume not supported on DirectSoundOut, adjust the volume on your WaveProvider instead.
It means, use the Volume property on WaveChannel32 instead. Also, unless you are using an old version of NAudio, the BlockAlignReductionStream and the WaveFormatConversion stream are unneccessary, since MP3FileReader emits PCM.