Unity: Live Video Streaming ARCore Camera Texture - c#

I'm trying to use TCP sockets to stream the texture from the ARCore camera on one device to the background image on another device. I've found some great resources that have got me close. This question helped me a lot: Unity: Live Video Streaming
The only difference is that this example uses a WebcamTexture, which I've got working without any lag and smoothly.
However, obtaining the camera texture from ARCore is a bit of a different process. I think I'm doing this correctly but maybe I'm missing something. When I run this with the ARCore camera texture and my minor changes, the server lags significantly.
Below is my code. I made no changes to the client side code so I know it has to do with my server side changes.
SERVER:
public class MySocketServer : MonoBehaviour
{
WebCamTexture webCam;
public RawImage myImage;
public bool enableLog = false;
Texture2D currentTexture;
private TcpListener listner;
private const int port = 8010;
private bool stop = false;
private List<TcpClient> clients = new List<TcpClient>();
//This must be the-same with SEND_COUNT on the client
const int SEND_RECEIVE_COUNT = 15;
private void Start()
{
Application.runInBackground = true;
//Start WebCam coroutine
StartCoroutine(initAndWaitForWebCamTexture());
}
//Converts the data size to byte array and put result to the fullBytes array
void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
{
//Clear old data
Array.Clear(fullBytes, 0, fullBytes.Length);
//Convert int to bytes
byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
//Copy result to fullBytes
bytesToSendCount.CopyTo(fullBytes, 0);
}
//Converts the byte array to the data size and returns the result
int frameByteArrayToByteLength(byte[] frameBytesLength)
{
int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
return byteLength;
}
IEnumerator initAndWaitForWebCamTexture()
{
// Open the Camera on the desired device, in my case IPAD pro
//webCam = new WebCamTexture();
// Get all devices , front and back camera
//webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;
// request the lowest width and heigh possible
//webCam.requestedHeight = 10;
//webCam.requestedWidth = 10;
//myImage.texture = webCam;
//webCam.Play();
//currentTexture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24,false);
//currentTexture = new Texture2D(webCam.width, webCam.height);
// Connect to the server
listner = new TcpListener(IPAddress.Any, port);
listner.Start();
while (Screen.width < 100)
{
yield return null;
}
//Start sending coroutine
StartCoroutine(senderCOR());
}
WaitForEndOfFrame endOfFrame = new WaitForEndOfFrame();
IEnumerator senderCOR()
{
bool isConnected = false;
TcpClient client = null;
NetworkStream stream = null;
// Wait for client to connect in another Thread
Loom.RunAsync(() =>
{
while (!stop)
{
// Wait for client connection
client = listner.AcceptTcpClient();
// We are connected
clients.Add(client);
isConnected = true;
stream = client.GetStream();
}
});
//Wait until client has connected
while (!isConnected)
{
yield return null;
}
LOG("Connected!");
bool readyToGetFrame = true;
byte[] frameBytesLength = new byte[SEND_RECEIVE_COUNT];
while (!stop)
{
//Wait for End of frame
yield return endOfFrame;
//currentTexture.ReadPixels(new Rect(0,0,Screen.width,Screen.height), 0, 0);
//currentTexture.Apply();
//NEW CODE TO TRY
using (var image = Frame.CameraImage.AcquireCameraImageBytes()) {
if(!image.IsAvailable)
{
yield return null;
}
_OnImageAvailable(image.Width, image.Height, image.Y, image.Width * image.Height);
}
//currentTexture.SetPixels(webCam.GetPixels());
byte[] pngBytes = currentTexture.EncodeToPNG();
//Fill total byte length to send. Result is stored in frameBytesLength
byteLengthToFrameByteArray(pngBytes.Length, frameBytesLength);
//Set readyToGetFrame false
readyToGetFrame = false;
Loom.RunAsync(() =>
{
//Send total byte count first
stream.Write(frameBytesLength, 0, frameBytesLength.Length);
LOG("Sent Image byte Length: " + frameBytesLength.Length);
//Send the image bytes
stream.Write(pngBytes, 0, pngBytes.Length);
LOG("Sending Image byte array data : " + pngBytes.Length);
//Sent. Set readyToGetFrame true
readyToGetFrame = true;
});
//Wait until we are ready to get new frame(Until we are done sending data)
while (!readyToGetFrame)
{
LOG("Waiting To get new frame");
yield return null;
}
}
}
void LOG(string messsage)
{
if (enableLog)
Debug.Log(messsage);
}
private void Update()
{
myImage.texture = webCam;
}
// stop everything
private void OnApplicationQuit()
{
if (webCam != null && webCam.isPlaying)
{
webCam.Stop();
stop = true;
}
if (listner != null)
{
listner.Stop();
}
foreach (TcpClient c in clients)
c.Close();
}
private void _OnImageAvailable(int width, int height, IntPtr pixelBuffer, int bufferSize){
currentTexture = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
byte[] bufferYUV = new byte[width * height * 3 / 2];
bufferSize = width * height * 3 / 2;
System.Runtime.InteropServices.Marshal.Copy(pixelBuffer, bufferYUV, 0, bufferSize);
Color color = new Color();
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
float Yvalue = bufferYUV[y * width + x];
float Uvalue = bufferYUV[(y / 2) * (width / 2) + x / 2 + (width * height)];
float Vvalue = bufferYUV[(y / 2) * (width / 2) + x / 2 + (width * height) + (width * height) / 4];
color.r = Yvalue + (float)(1.37705 * (Vvalue - 128.0f));
color.g = Yvalue - (float)(0.698001 * (Vvalue - 128.0f)) - (float)(0.337633 * (Uvalue - 128.0f));
color.b = Yvalue + (float)(1.732446 * (Uvalue - 128.0f));
color.r /= 255.0f;
color.g /= 255.0f;
color.b /= 255.0f;
if (color.r < 0.0f)
color.r = 0.0f;
if (color.g < 0.0f)
color.g = 0.0f;
if (color.b < 0.0f)
color.b = 0.0f;
if (color.r > 1.0f)
color.r = 1.0f;
if (color.g > 1.0f)
color.g = 1.0f;
if (color.b > 1.0f)
color.b = 1.0f;
color.a = 1.0f;
currentTexture.SetPixel(width - 1 - x, y, color);
}
}
currentTexture.Apply();
//this.GetComponent<RawImage>().texture = m_TextureRender;
}
}
CLIENT
public class MySocketsClient : MonoBehaviour
{
public RawImage image;
public bool enableLog = false;
const int port = 8010;
//public string IP = "xxx.xxx.x.xx";
public string IP = "xxx.xxx.x.x";
TcpClient client;
Texture2D tex;
private bool stop = false;
//This must be the-same with SEND_COUNT on the server
const int SEND_RECEIVE_COUNT = 15;
// Use this for initialization
void Start()
{
Application.runInBackground = true;
tex = new Texture2D(0, 0);
client = new TcpClient();
//Connect to server from another Thread
Loom.RunAsync(() =>
{
LOGWARNING("Connecting to server...");
// if on desktop
//client.Connect(IPAddress.Loopback, port);
// if using the IPAD
client.Connect(IPAddress.Parse(IP), port);
LOGWARNING("Connected!");
imageReceiver();
});
}
void imageReceiver()
{
Debug.Log("Here");
//While loop in another Thread is fine so we don't block main Unity Thread
Loom.RunAsync(() =>
{
while (!stop)
{
//Read Image Count
int imageSize = readImageByteSize(SEND_RECEIVE_COUNT);
LOGWARNING("Received Image byte Length: " + imageSize);
//Read Image Bytes and Display it
readFrameByteArray(imageSize);
}
});
}
//Converts the data size to byte array and put result to the fullBytes array
void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
{
//Clear old data
Array.Clear(fullBytes, 0, fullBytes.Length);
//Convert int to bytes
byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
//Copy result to fullBytes
bytesToSendCount.CopyTo(fullBytes, 0);
}
//Converts the byte array to the data size and returns the result
int frameByteArrayToByteLength(byte[] frameBytesLength)
{
int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
return byteLength;
}
/////////////////////////////////////////////////////Read Image SIZE from Server///////////////////////////////////////////////////
private int readImageByteSize(int size)
{
bool disconnected = false;
NetworkStream serverStream = client.GetStream();
byte[] imageBytesCount = new byte[size];
var total = 0;
do
{
var read = serverStream.Read(imageBytesCount, total, size - total);
//Debug.LogFormat("Client recieved {0} bytes", total);
if (read == 0)
{
disconnected = true;
break;
}
total += read;
} while (total != size);
int byteLength;
if (disconnected)
{
byteLength = -1;
}
else
{
byteLength = frameByteArrayToByteLength(imageBytesCount);
}
return byteLength;
}
/////////////////////////////////////////////////////Read Image Data Byte Array from Server///////////////////////////////////////////////////
private void readFrameByteArray(int size)
{
bool disconnected = false;
NetworkStream serverStream = client.GetStream();
byte[] imageBytes = new byte[size];
var total = 0;
do
{
var read = serverStream.Read(imageBytes, total, size - total);
//Debug.LogFormat("Client recieved {0} bytes", total);
if (read == 0)
{
disconnected = true;
break;
}
total += read;
} while (total != size);
bool readyToReadAgain = false;
//Display Image
if (!disconnected)
{
//Display Image on the main Thread
Loom.QueueOnMainThread(() =>
{
displayReceivedImage(imageBytes);
readyToReadAgain = true;
});
}
//Wait until old Image is displayed
while (!readyToReadAgain)
{
System.Threading.Thread.Sleep(1);
}
}
void displayReceivedImage(byte[] receivedImageBytes)
{
tex.LoadImage(receivedImageBytes);
image.texture = tex;
}
// Update is called once per frame
void Update()
{
}
void LOG(string messsage)
{
if (enableLog)
Debug.Log(messsage);
}
void LOGWARNING(string messsage)
{
if (enableLog)
Debug.LogWarning(messsage);
}
void OnApplicationQuit()
{
LOGWARNING("OnApplicationQuit");
stop = true;
if (client != null)
{
client.Close();
}
}
}
Would appreciate any help or thoughts on what might be causing the severe lag. Thanks!

Related

C# Winform OnPaint flickering

I am recreating a simple tile based game (ref: Javidx9 cpp tile game) on c# winforms and the screen flickers as i move, I have DoubleBuffered = true. i will show an example with textures and one without.
TEXTURES > e.Graphics.DrawImage()
NO TEXTURES > e.Graphics.FillRectangle()
in the code I made a GameManager, CameraManager, PlayerModel, lastly the form OnPaint that draws the game information. the way it works is the GameManager tells the Player to update itself depending on user input (move, jump, ect...), then tells the Camera to update depending on the players position. at first i called the GameManager.Update() from the Paint event but then i separated the Paint event from the GameManager and made the GameManager update asynchronous because the Paint event updates too slow. thats when the problem started.
//GameManager
public void CreateSoloGame(MapModel map)
{
CurrentMap = map;
ResetPlayer();
_inGame = true;
new Task(() =>
{
while (_inGame)
{
Elapsed = _stopwatch.ElapsedMilliseconds;
_stopwatch.Restart();
int i = 0;
Step(Elapsed);
while (i < _gameTime) //_gameTime controls the speed of the game
{
i++;
}
}
}).Start();
}
public void Step(double elapsed)
{
Player.Update(CurrentMap, elapsed);
Camera.SetCamera(Player.Position, CurrentMap);
}
//PlayerModel
public void DetectCollision(MapModel CurrentMap, double Elapsed)
{
//adds velocity to players position
float NextPlayerX = Position.X + (VelX * (float)Elapsed);
float NextPlayerY = Position.Y + (VelY * (float)Elapsed);
//collision detection
OnFloor = false;
if (VelY > 0)
{
//bottom
if (CurrentMap.GetTile((int)(Position.X + .1), (int)(NextPlayerY + 1)) == '#' || CurrentMap.GetTile((int)(Position.X + .9), (int)(NextPlayerY + 1)) == '#')
{
NextPlayerY = (int)NextPlayerY;
VelY = 0;
OnFloor = true;
_jumps = 2;
}
}
else
{
//top
if (CurrentMap.GetTile((int)(Position.X + .1), (int)NextPlayerY) == '#' || CurrentMap.GetTile((int)(Position.X + .9), (int)NextPlayerY) == '#')
{
NextPlayerY = (int)NextPlayerY + 1;
VelY = 0;
}
}
if (VelX < 0)
{
//left
if (CurrentMap.GetTile((int)NextPlayerX, (int)Position.Y) == '#' || CurrentMap.GetTile((int)NextPlayerX, (int)(Position.Y + .9)) == '#')
{
NextPlayerX = (int)NextPlayerX + 1;
VelX = 0;
}
}
else
{
//right
if (CurrentMap.GetTile((int)(NextPlayerX + 1), (int)Position.Y) == '#' || CurrentMap.GetTile((int)(NextPlayerX + 1), (int)(Position.Y + .9)) == '#')
{
NextPlayerX = (int)NextPlayerX;
VelX = 0;
}
}
//updates player position
Position = new PointF(NextPlayerX, NextPlayerY);
}
public void Jump()
{
if (_jumps > 0)
{
VelY = -.06f;
_jumps--;
}
}
public void ReadInput(double elapsed)
{
//resets velocity back to 0 if player isnt moving
if (Math.Abs(VelY) < 0.001f) VelY = 0;
if (Math.Abs(VelX) < 0.001f) VelX = 0;
//sets velocity according to player input - S and W are used for no clip free mode
//if (UserInput.KeyInput[Keys.W]) _playerVelY -= .001f;
//if (UserInput.KeyInput[Keys.S]) _playerVelY += .001f;
if (Input.KEYINPUT[Keys.A]) VelX -= .001f * (float)elapsed;
else if (Input.KEYINPUT[Keys.D]) VelX += .001f * (float)elapsed;
else if (Math.Abs(VelX) > 0.001f && OnFloor) VelX += -0.06f * VelX * (float)elapsed;
//resets jumping
if (!OnFloor)
VelY += .0004f * (float)elapsed;
//limits velocity
//if (_playerVelY <= -.014) _playerVelY = -.014f; //disabled to allow jumps
if (VelY >= .05) VelY = .05f;
if (VelX >= .02 && !Input.KEYINPUT[Keys.ShiftKey]) VelX = .02f;
else if (VelX >= .005 && Input.KEYINPUT[Keys.ShiftKey]) VelX = .005f;
if (VelX <= -.02 && !Input.KEYINPUT[Keys.ShiftKey]) VelX = -.02f;
else if (VelX <= -.005 && Input.KEYINPUT[Keys.ShiftKey]) VelX = -.005f;
}
public void Update(MapModel map, double elapsed)
{
ReadInput(elapsed);
DetectCollision(map, elapsed);
}
//CameraManager
public void SetCamera(PointF center, MapModel map, bool clamp = true)
{
//changes the tile size according to the screen size
TileSize = Input.ClientScreen.Width / Tiles;
//amount of tiles along thier axis
TilesX = Input.ClientScreen.Width / TileSize;
TilesY = Input.ClientScreen.Height / TileSize;
//camera offset
OffsetX = center.X - TilesX / 2.0f;
OffsetY = center.Y - TilesY / 2.0f;
//make sure the offset does not go beyond bounds
if (OffsetX < 0 && clamp) OffsetX = 0;
if (OffsetY < 0 && clamp) OffsetY = 0;
if (OffsetX > map.MapWidth - TilesX && clamp) OffsetX = map.MapWidth - TilesX;
if (OffsetY > map.MapHeight - TilesY && clamp) OffsetY = map.MapHeight - TilesY;
//smooths out movement for tiles
TileOffsetX = (OffsetX - (int)OffsetX) * TileSize;
TileOffsetY = (OffsetY - (int)OffsetY) * TileSize;
}
//Form Paint event
private void Draw(object sender, PaintEventArgs e)
{
Brush b;
Input.ClientScreen = ClientRectangle;
for (int x = -1; x < _camera.TilesX + 1; x++)
{
for (int y = -1; y < _camera.TilesY + 1; y++)
{
switch (_map.GetTile(x + (int)_camera.OffsetX, y + (int)_camera.OffsetY))
{
case '.':
//e.Graphics.DrawImage(sky, x * _camera.TileSize - _camera.TileOffsetX, y * _camera.TileSize - _camera.TileOffsetY, _camera.TileSize, _camera.TileSize);
//continue;
b = Brushes.MediumSlateBlue;
break;
case '#':
//e.Graphics.DrawImage(block, x * _camera.TileSize - _camera.TileOffsetX, y * _camera.TileSize - _camera.TileOffsetY, _camera.TileSize, _camera.TileSize);
//continue;
b = Brushes.DarkGray;
break;
case 'o':
b = Brushes.Yellow;
break;
case '%':
b = Brushes.Green;
break;
default:
b = Brushes.MediumSlateBlue;
break;
}
e.Graphics.FillRectangle(b, x * _camera.TileSize - _camera.TileOffsetX, y * _camera.TileSize - _camera.TileOffsetY, (x + 1) * _camera.TileSize, (y + 1) * _camera.TileSize);
}
}
e.Graphics.FillRectangle(Brushes.Purple, (_manager.Player.Position.X - _camera.OffsetX) * _camera.TileSize, (_manager.Player.Position.Y - _camera.OffsetY) * _camera.TileSize, _camera.TileSize, _camera.TileSize);
//e.Graphics.DrawImage(chef, (_manager.Player.Position.X - _camera.OffsetX) * _camera.TileSize, (_manager.Player.Position.Y - _camera.OffsetY) * _camera.TileSize, _camera.TileSize, _camera.TileSize);
Invalidate();
}
P.S. i use winforms because i dont work with GUIs much and its the one im most familiar with and this is just something quick i wanted to try out but i've never had this issue. i tried a couple of things but nothing worked so this is my last resort. if you think i should use another GUI let me know and ill look into it. also if you think my code is ugly lmk why.
Fill the form with a PictureBox and hook into the .Paint() event. For some reason, there is no flicker drawing on a PictureBox compared to drawing on a form.
Also having a game loop improves things a lot. I am getting 600+ fps with my example code.
full code below:
public partial class RunningForm1 : Form
{
static readonly Random rng = new Random();
float offset;
int index;
Queue<int> town;
const int grid = 12;
Color[] pallete;
FpsCounter clock;
#region Windows API - User32.dll
[StructLayout(LayoutKind.Sequential)]
public struct WinMessage
{
public IntPtr hWnd;
public Message msg;
public IntPtr wParam;
public IntPtr lParam;
public uint time;
public System.Drawing.Point p;
}
[System.Security.SuppressUnmanagedCodeSecurity] // We won't use this maliciously
[DllImport("User32.dll", CharSet = CharSet.Auto)]
public static extern bool PeekMessage(out WinMessage msg, IntPtr hWnd, uint messageFilterMin, uint messageFilterMax, uint flags);
#endregion
public RunningForm1()
{
InitializeComponent();
this.pic.Paint += new PaintEventHandler(this.pic_Paint);
this.pic.SizeChanged += new EventHandler(this.pic_SizeChanged);
//Initialize the machine
this.clock = new FpsCounter();
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
Setup();
System.Windows.Forms.Application.Idle += new EventHandler(OnApplicationIdle);
}
void UpdateMachine()
{
pic.Refresh();
}
#region Main Loop
private void OnApplicationIdle(object sender, EventArgs e)
{
while (AppStillIdle)
{
// Render a frame during idle time (no messages are waiting)
UpdateMachine();
}
}
private bool AppStillIdle
{
get
{
WinMessage msg;
return !PeekMessage(out msg, IntPtr.Zero, 0, 0, 0);
}
}
#endregion
private void pic_SizeChanged(object sender, EventArgs e)
{
pic.Refresh();
}
private void pic_Paint(object sender, PaintEventArgs e)
{
// Show FPS counter
var fps = clock.Measure();
var text = $"{fps:F2} fps";
var sz = e.Graphics.MeasureString(text, SystemFonts.DialogFont);
var pt = new PointF(pic.Width - 1 - sz.Width - 4, 4);
e.Graphics.DrawString(text, SystemFonts.DialogFont, Brushes.Black, pt);
// draw on e.Graphics
e.Graphics.SmoothingMode = SmoothingMode.HighQuality;
e.Graphics.TranslateTransform(0, pic.ClientSize.Height - 1);
e.Graphics.ScaleTransform(1, -1);
int wt = pic.ClientSize.Width / (grid - 1);
int ht = pic.ClientSize.Height / (grid + 1);
SolidBrush fill = new SolidBrush(Color.Black);
for (int i = 0; i < town.Count; i++)
{
float x = offset + i * wt;
var building = new RectangleF(
x, 0,
wt, ht * town.ElementAt(i));
fill.Color = pallete[(index + i) % pallete.Length];
e.Graphics.FillRectangle(fill, building);
}
offset -= 0.4f;
if (offset <= -grid - wt)
{
UpdateTown();
offset += wt;
}
}
private void Setup()
{
offset = 0;
index = 0;
town = new Queue<int>();
pallete = new Color[]
{
Color.FromKnownColor(KnownColor.Purple),
Color.FromKnownColor(KnownColor.Green),
Color.FromKnownColor(KnownColor.Yellow),
Color.FromKnownColor(KnownColor.SlateBlue),
Color.FromKnownColor(KnownColor.LightCoral),
Color.FromKnownColor(KnownColor.Red),
Color.FromKnownColor(KnownColor.Blue),
Color.FromKnownColor(KnownColor.LightCyan),
Color.FromKnownColor(KnownColor.Crimson),
Color.FromKnownColor(KnownColor.GreenYellow),
Color.FromKnownColor(KnownColor.Orange),
Color.FromKnownColor(KnownColor.LightGreen),
Color.FromKnownColor(KnownColor.Gold),
};
for (int i = 0; i <= grid; i++)
{
town.Enqueue(rng.Next(grid) + 1);
}
}
private void UpdateTown()
{
town.Dequeue();
town.Enqueue(rng.Next(grid) + 1);
index = (index + 1) % pallete.Length;
}
}
public class FpsCounter
{
public FpsCounter()
{
this.PrevFrame = 0;
this.Frames = 0;
this.PollOverFrames = 100;
this.Clock = Stopwatch.StartNew();
}
/// <summary>
/// Use this method to poll the FPS counter
/// </summary>
/// <returns>The last measured FPS</returns>
public float Measure()
{
Frames++;
PrevFrame++;
var dt = Clock.Elapsed.TotalSeconds;
if (PrevFrame > PollOverFrames || dt > PollOverFrames / 50)
{
LastFps = (float)(PrevFrame / dt);
PrevFrame = 0;
Clock.Restart();
}
return LastFps;
}
public float LastFps { get; private set; }
public long Frames { get; private set; }
private Stopwatch Clock { get; }
private int PrevFrame { get; set; }
/// <summary>
/// The number of frames to average to get a more accurate frame count.
/// The higher this is the more stable the result, but it will update
/// slower. The lower this is, the more chaotic the result of <see cref="Measure()"/>
/// but it will get a new result sooner. Default is 100 frames.
/// </summary>
public int PollOverFrames { get; set; }
}

How to adjust the brightness of an Image efficiently

Does anyone know of a more efficient way of adjusting the brightness of an image at runtime in UWP?
I found this question which works fine but runs terribly slow.
However, I can't find any documentation online suggesting there is an alternative method.
Here is my problematic code.
// TODO Make Image Brightness Slider quicker and more intuitive.
private WriteableBitmap ChangeBrightness(WriteableBitmap source, int increment)
{
var dest = new WriteableBitmap(source.PixelWidth, source.PixelHeight);
byte[] color = new byte[4];
using (var srcBuffer = source.PixelBuffer.AsStream())
using (var dstBuffer = dest.PixelBuffer.AsStream())
{
while (srcBuffer.Read(color, 0, 4) > 0)
{
for (int i = 0; i < 3; i++)
{
var value = (float)color[i];
var alpha = color[3] / (float)255;
value /= alpha;
value += increment;
value *= alpha;
if (value > 255)
{
value = 255;
}
color[i] = (byte)value;
}
dstBuffer.Write(color, 0, 4);
}
}
return dest;
}
This might work. I didn't test it:
private async Task<WriteableBitmap> ChangeBrightness(WriteableBitmap source, float increment)
{
var canvasBitmap = CanvasBitmap.CreateFromBytes(CanvasDevice.GetSharedDevice(), source.PixelBuffer,
source.PixelWidth, source.PixelHeight, DirectXPixelFormat.B8G8R8A8UIntNormalized);
var brightnessFx = new BrightnessEffect
{
Source = canvasBitmap,
BlackPoint = new Vector2(0, increment)
};
var crt = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), source.PixelWidth, source.PixelHeight, 96);
using (var ds = crt.CreateDrawingSession())
{
ds.DrawImage(brightnessFx);
}
crt.GetPixelBytes(source.PixelBuffer);
return source;
}
You have to reference win2d nuget

Xamarin Urho IOS how to set up an application?

I am following this example but it is not that useful:
https://github.com/xamarin/urho-samples/tree/master/FeatureSamples/Core/29_SoundSynthesis
anyhow I am getting an run time error that says: The application is not configured yet.
but I made an application object .
the error happen a node = new Node();
what am I missing
this is my class:
using System;
using Urho.Audio;
using Urho;
using Urho.Resources;
using Urho.Gui;
using System.Diagnostics;
using System.Globalization;
namespace Brain_Entrainment
{
public class IsochronicTones : Urho.Application
{
/// Scene node for the sound component.
Node node;
/// Sound stream that we update.
BufferedSoundStream soundStream;
public double Frequency { get; set; }
public double Beat { get; set; }
public double Amplitude { get; set; }
public float Bufferlength { get; set; }
const int numBuffers = 3;
public IsochronicTones(ApplicationOptions AppOption) : base(AppOption)
{
Amplitude = 1;
Frequency = 100;
Beat = 0;
Bufferlength = Int32.MaxValue;
}
public void play()
{
Start();
}
protected override void OnUpdate(float timeStep)
{
UpdateSound();
base.OnUpdate(timeStep);
}
protected override void Start()
{
base.Start();
CreateSound();
}
void CreateSound()
{
// Sound source needs a node so that it is considered enabled
node = new Node();
SoundSource source = node.CreateComponent();
soundStream = new BufferedSoundStream();
// Set format: 44100 Hz, sixteen bit, mono
soundStream.SetFormat(44100, true, false);
// Start playback. We don't have data in the stream yet, but the
//SoundSource will wait until there is data
// as the stream is by default in the "don't stop at end" mode
source.Play(soundStream);
}
void UpdateSound()
{
// Try to keep 1/10 seconds of sound in the buffer, to avoid both
//dropouts and unnecessary latency
float targetLength = 1.0f / 10.0f;
float requiredLength = targetLength -
Bufferlength;//soundStream.BufferLength;
float w = 0;
if (requiredLength < 0.0f)
return;
uint numSamples = (uint)(soundStream.Frequency * requiredLength);
if (numSamples == 0)
return;
// Allocate a new buffer and fill it with a simple two-oscillator
//algorithm.The sound is over - amplified
// (distorted), clamped to the 16-bit range, and finally lowpass -
//filtered according to the coefficient
var newData = new short[numSamples];
for (int i = 0; i < numSamples; ++i)
{
float newValue = 0;
if (Beat == 0)
{
newValue = (float)(Amplitude * Math.Sin(Math.PI * Frequency * i / 44100D));
}
else
{
w = (float)(1D * Math.Sin(i * Math.PI * Beat / 44100D));
if (w < 0)
{
w = 0;
}
newValue = (float)(Amplitude * Math.Sin(Math.PI * Frequency * i / 44100D));
}
//accumulator = MathHelper.Lerp(accumulator, newValue, filter);
newData[i] = (short)newValue;
}
// Queue buffer to the stream for playback
soundStream.AddData(newData, 0, newData.Length);
}
}
}

AVT Firewrap.net Camera zooms in even though it wasn't coded

For my internship I have to create photo's with a prosillica camera, I have been looking at different api's to be able to work with the camera. Now that i found one that works, and I write the code that a previous intern wrote("guessing" he says). i get images but they are all really zoomed in. While in the official Firegrab program the pictures look fine and aren't zoomed in at all. You can look at the images here. The code i wrote to connect to the camera was as followed:
Ctrl = FireWrap_CtrlCenter.GetInstance();
Ctrl.OnFrameReady += OnFrameReady;
Result = Ctrl.FGInitModule();
if (Result == enFireWrapResult.E_NOERROR)
{
Result = InfoContainer.FGGetNodeList();
var NodeCnt = InfoContainer.Size();
InfoContainer.GetAt(NodeInfo, 0);
Result = Cam.Connect(NodeInfo.Guid);
cCamera.Items.Add(Cam.DeviceAll);
if (Result == enFireWrapResult.E_NOERROR)
{
Cam.m_Guid = NodeInfo.Guid;
}
if (Result == enFireWrapResult.E_NOERROR)
{
Result = Cam.SetParameter(enFGParameter.E_IMAGEFORMAT,
(((uint)enFGResolution.E_RES_SCALABLE << 16) |
((uint)enColorMode.E_CCOLORMODE_Y8 << 8) |
0));
}
if (Result == enFireWrapResult.E_NOERROR)
Result = Cam.OpenCapture();
// Print device settings
Result = Cam.GetParameter(enFGParameter.E_XSIZE, ref XSize);
Result = Cam.GetParameter(enFGParameter.E_YSIZE, ref YSize);
width = XSize;
height = YSize;
// Start camera
if (Result == enFireWrapResult.E_NOERROR)
{
Result = Cam.StartDevice();
}
}
When i connect to the camera, I also tell it to start recording instantly. The frames i get when the camera turns on are processed in OnFrameReady, which I used the following code for:
Debug.WriteLine("OnFrameReady is called");
FGEventArgs args = (FGEventArgs)__p2;
FGFrame Frame;
Guid.High = args.High;
Guid.Low = args.Low;
if (Guid.Low == Cam.m_Guid.Low)
{
Result = Cam.GetFrame(Frame, 0);
// Process frame, skip FrameStart notification
if (Result == enFireWrapResult.E_NOERROR & Frame.Length > 0)
{
byte[] data = new byte[Frame.Length];
// Access to frame data
if (Frame.CloneData(data))
{
//DisplayImage(data.Clone());
SaveImageFromByteArray(data);
// Here you can start your image processsing logic on data
string debug = String.Format("[{6}] Frame #{0} length:{1}byte [ {2} {3} {4} {5} ... ]",
Frame.Id, Frame.Length, data[0], data[1], data[2], data[3], Cam.m_Guid.Low);
Debug.WriteLine(debug);
}
// Return frame to module as fast as posible after this the Frame is not valid
Result = Cam.PutFrame(Frame);
}
}
So in this function i get the frame and put it in a byte[], then i call the function SaveImageFromByteArray(); where i put the byte[] in a list. So i can access all my pictures later on to save them. The code for the SaveImageFromByteArray is as followed:
public void SaveImageFromByteArray(byte[] byteArray)
{
try
{
//bytearray size determined
byte[] data = new byte[width * height * 4];
int o = 0;
//bytearray size filled
for (int io = 0; io < width * height; io++)
{
byte value = byteArray[io];
data[o++] = value;
data[o++] = value;
data[o++] = value;
data[o++] = 0;
}
bytearrayList.Add(data);
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString());
}
}
After im done recoding all of my frames, I click save, stops the camera and then i call the following functions to save it to a bitmap file:
public void SaveData()
{
try
{
foreach (byte[] data1 in bytearrayList)
{
byte[] data = Save(data1);
lock (this)
{
unsafe
{
fixed (byte* ptr = data)
{
try
{
using (image = new Bitmap((int) width, (int) height, (int) width * 4,
System.Drawing.Imaging.PixelFormat.Format32bppPArgb, new IntPtr(ptr)))
{
image.Save(path + nextpicture + ".bmp", ImageFormat.Bmp);
Debug.WriteLine("Image saved at " + path + nextpicture + ".bmp");
nextpicture++;
}
}
catch (Exception ex)
{
Debug.Write(ex.ToString());
}
}
}
}
}
}
catch (Exception ex)
{
Debug.Write(ex.ToString());
}
}
The save function called in the function above is written as followed:
private byte[] Save(byte[] data1)
{
//bytearray size determined
byte[] data = new byte[width * height * 4];
int o = 0;
//bytearray size filled
for (int io = 0; io < width * height; io++)
{
byte value = data1[io];
data[o++] = value;
data[o++] = value;
data[o++] = value;
data[o++] = 0;
}
return data;
}
I think the problem of the zooming happens when we connect to the camera and we execute this line of code:
if (Result == enFireWrapResult.E_NOERROR)
{
Result = Cam.SetParameter(enFGParameter.E_IMAGEFORMAT,
(((uint)enFGResolution.E_RES_SCALABLE << 16) |
((uint)enColorMode.E_CCOLORMODE_Y8 << 8)|
0));
}
But the problem is that there is no documentation to be found about Firewrap.net or their api. Even when we try to edit the 16 to like 15, the camera won't even startup
Found the problem! pixels were stretched out into 4 pixels horizontally, that was because we did this twice:
byte[] data = new byte[width * height * 4];
int o = 0;
//bytearray size filled
for (int io = 0; io < width * height; io++)
{
byte value = byteArray[io];
data[o++] = value;
data[o++] = value;
data[o++] = value;
data[o++] = 0;
}

I'd like to assign data to a object

I receive the data which are X, Y, Z position and X, Y, Z rotation by Using UDP real time(60hz)
I used the code by la1n.(http://forum.unity3d.com/threads/simple-udp-implementation-send-read-via-mono-c.15900/)
I write the code myself by using la1n's code. But I have some troubles.
First, I think I complete string split and parse by using array.
Is there any problem in split and parse?
I have a problem about matching data(x,y,z pos and x,y,z rot) to Object(box).
I attached my code(which are c# and lua script)
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
public class UDPReceive : MonoBehaviour
{
// receiving Thread
Thread receiveThread;
// udpclient object
UdpClient client;
// port
public int port = 8051; // define > init
// infos
public string lastReceivedUDPPacket = "";
// start from shell
private static void Main()
{
UDPReceive receiveObj = new UDPReceive();
receiveObj.init();
string text = "";
do
{
text = Console.ReadLine();
}
while (!text.Equals("exit"));
}
// start from unity3d
public void Start()
{
init();
}
void SetTransform(float XPos, float YPos, float ZPos)
{
transform.position = new Vector3(XPos, YPos, ZPos);
}
void SetRotation(float XRot, float YRot, float ZRot)
{
transform.eulerAngles = new Vector3(XRot, YRot, ZRot);
}
void Update()
{
SetTransform();
SetRotation();
}
// OnGUI
void OnGUI()
{
Rect rectObj = new Rect(40, 10, 200, 400);
GUIStyle style = new GUIStyle();
style.alignment = TextAnchor.UpperLeft;
GUI.Box(rectObj, "\nLast Packet: \n" + lastReceivedUDPPacket, style);
}
// init
private void init()
{
// Endpunkt definieren, von dem die Nachrichten gesendet werden.
print("UDPSend.init()");
// define port
port = 8051;
// status
print("Sending to 127.0.0.1 : " + port);
print("Test-Sending to this Port: nc -u 127.0.0.1 " + port + "");
// ----------------------------
// Abhören
// ----------------------------
// Lokalen Endpunkt definieren (wo Nachrichten empfangen werden).
// Einen neuen Thread für den Empfang eingehender Nachrichten erstellen.
receiveThread = new Thread(
new ThreadStart(ReceiveData));
receiveThread.IsBackground = true;
receiveThread.Start();
}
// Unity Application Quit Function
void OnApplicationQuit()
{
stopThread();
}
// Stop reading UDP messages
private void stopThread()
{
if (receiveThread.IsAlive)
{
receiveThread.Abort();
}
client.Close();
}
// receive thread
private void ReceiveData()
{
client = new UdpClient(port);
while (true)
{
try
{
// Bytes empfangen.
IPEndPoint anyIP = new IPEndPoint(IPAddress.Any, 0);
byte[] data = client.Receive(ref anyIP);
// Bytes mit der UTF8-Kodierung in das Textformat kodieren.
string text = Encoding.UTF8.GetString(data);
// Data Received from simulation
print("Received Data >> " + text);
// latest UDPpacket
lastReceivedUDPPacket = text;
}
catch (Exception err)
{
print(err.ToString());
}
}
}
private void parseDataToCrane ( string text )
{
try
{
float[] floatData = Array.ConvertAll(text.Split(';'), float.Parse);
float XPos = floatData[0];
float YPos = floatData[1];
float ZPos = floatData[2];
float XRot = floatData[3];
float YRot = floatData[4];
float ZRot = floatData[5];
}
catch (Exception err)
{
print(err.ToString());
}
}
}
This code is simulation code and It is written by Lua Script.
socket = require("socket")
print(socket._VERSION)
function dataListener:post( t )
local XPos = ship.rb:getPosition():x()
local YPos = ship.rb:getPosition():y()
local ZPos = ship.rb:getPosition():z()
local XXPos = math.floor( XPos * 1000 + 0.5 ) / 1000
local YYPos = math.floor( YPos * 1000 + 0.5 ) / 1000
local ZZPos = math.floor( ZPos * 1000 + 0.5 ) / 1000
local XRot = ship.rb:getRotation():x()
local YRot = ship.rb:getRotation():y()
local ZRot = ship.rb:getRotation():z()
local XXRot = math.floor( XPos * 1000 + 0.5 ) / 1000
local YYRot = math.floor( YPos * 1000 + 0.5 ) / 1000
local ZZRot = math.floor( ZPos * 1000 + 0.5 ) / 1000
udp=socket.udp();
udp:setpeername("127.0.0.1",8051)
udp:send(string.format("%f; %f; %f; %f; %f; %f", XXPos, YYPos, ZZPos, XXRot, YYRot, ZZRot));
end
I have worked for 3 hours. But Whenever I debug, Errors occur and It doesn't work...

Categories