Hand tracking Kinect SDK 1.7 - c#

I want to detect hand state if it gripped or released using Kinect SDK v1.7 implemented with C#, but I'm tired from this an exception:
Exception thrown: 'System.InvalidOperationException' in
Microsoft.Kinect.Toolkit.Interaction.dll
The code:
private void InteractionStreamOnInteractionFrameReady(object sender,InteractionFrameReadyEventArgs e)
{
using (InteractionFrame frame = e.OpenInteractionFrame())
{
if (frame != null)
{
if (this.userInfos == null)
{
this.userInfos = new UserInfo[InteractionFrame.UserInfoArrayLength];
}
frame.CopyInteractionDataTo(this.userInfos);
}
else
{ return;}
}
foreach (UserInfo userInfo in this.userInfos)
{
foreach (InteractionHandPointer handPointer in userInfo.HandPointers)
{
string action = null;
switch (handPointer.HandEventType)
{
case InteractionHandEventType.Grip:
action = "gripped";
break;
case InteractionHandEventType.GripRelease:
action = "released";
break;
}
if (action != null)
{
string handSide = "unknown";
switch (handPointer.HandType)
{
case InteractionHandType.Left:
handSide = "left";
break;
case InteractionHandType.Right:
handSide = "right";
break;
}
if (handSide == "left")
{
if (action == "released")
{
// left hand released code here
//MessageBox.Show("left hand released");
System.Console.WriteLine("left hand released");
}
else
{
// left hand gripped code here
System.Console.WriteLine("left hand gripped");
}
}
else
{
if (action == "released")
{
// right hand released code here
System.Console.WriteLine("right hand released");
}
else
{
// right hand gripped code here
System.Console.WriteLine("right hand gripped");
}
}
}
}
}
}
private void WindowLoaded(object sender, RoutedEventArgs e)
{
// Create the drawing group we'll use for drawing
this.drawingGroup = new DrawingGroup();
// Create an image source that we can use in our image control
this.imageSource = new DrawingImage(this.drawingGroup);
// Display the drawing using our image control
Image.Source = this.imageSource;
sensor = KinectSensor.KinectSensors.FirstOrDefault();
if (null != this.sensor )
{
System.Console.WriteLine("Start streaming");
skeletons = new
Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength];
userInfos = new
UserInfo[InteractionFrame.UserInfoArrayLength];
this.sensor.SkeletonStream.Enable();
this.interactionStream = new InteractionStream(this.sensor,
new
DummyInteractionClient());
this.interactionStream.InteractionFrameReady +=
InteractionStreamOnInteractionFrameReady;
sensor.SkeletonFrameReady += SensorSkeletonFrameReady;
// Start the sensor !
try
{
this.sensor.Start();
}
catch (IOException)
{
//////// this.sensor = null;
}
}
if (null == this.sensor)
{
System.Console.WriteLine("NoKinectReady");
}
}

Related

Turn camera picture depending on the device orientation (Xamarin.iOS)

I want to make a Xamarin.iOS app where I can capture pictures like the camera... My app supports only portrait.
I want to turn the captured picture to portrait if the camera was landscape when i capture the picture.
Does someone know how I can do this?
Code
public async void CapturePhoto()
{
var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
var sampleBuffer = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);
var jpegImageAsBytes = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer).ToArray();
string base64StringImage = Convert.ToBase64String(jpegImageAsBytes);
FaceRecognition faceRecognition = new FaceRecognition();
int result = faceRecognition.SendPhoto(base64StringImage);
}
Try this :
var currentOrientation = UIApplication.SharedApplication.StatusBarOrientation;
if (currentOrientation == UIInterfaceOrientation.Portrait)
{
videoConnection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
}
else if (currentOrientation == UIInterfaceOrientation.LandscapeRight)
{
videoConnection.VideoOrientation = AVCaptureVideoOrientation.LandscapeRight;
}
//xxx
Update:
If the app only supports an orientation or you lock the screen , there is another old way to detect device orientation. Core Motion
public void LockOrientation()
{
CMMotionManager CMManager = new CMMotionManager();
CMManager.DeviceMotionUpdateInterval = 0.2f;
CMManager.StartDeviceMotionUpdates(NSOperationQueue.MainQueue, (motion, error) => {
if (Math.Abs(motion.Gravity.X) > Math.Abs(motion.Gravity.Y))
{
Console.WriteLine("Lan");
if (motion.Gravity.X > 0)
{
UIDevice.CurrentDevice.SetValueForKey(new NSNumber((int)UIInterfaceOrientation.LandscapeLeft), new NSString("orientation"));
Console.WriteLine("Left");
}
else
{
UIDevice.CurrentDevice.SetValueForKey(new NSNumber((int)UIInterfaceOrientation.LandscapeRight), new NSString("orientation"));
Console.WriteLine("Right");
}
}
else
{
if (motion.Gravity.Y >= 0)
{
Console.WriteLine("Down");
}
else
{
Console.WriteLine("UP");
}
}
CMManager.StopDeviceMotionUpdates();
});
}
Refer to here

Leap Motion gets confused between different gesture types. How to optimize?

The swipe gestures Left,Right, Up and Down are not optimized. If a task has to be performed with an upward swipe, it would get confused with a left swipe and still perform it.Please suggest me the parameters to be optimized for differentiating swipes.Or commands for perfect swipe recognition
Also it gets confused with a circle and a swipe.
I have added my code. In one single frame only one gesture should be detected.How do i limit this here.
public MainWindow()
{
InitializeComponent();
this.controller = new Controller();
this.listener = new LeapListener(this);
controller.AddListener(listener);
for (int iCount = 1; iCount < 23; iCount++)
{
images[iCount] = new BitmapImage();
images[iCount].BeginInit();
images[iCount].UriSource = new Uri("Images/" + iCount + ".png", UriKind.Relative);
images[iCount].EndInit();
}
image.Source = images[1];
}
delegate void LeapEventDelegate(string EventName);
public void LeapEventNotification(string EventName)
{
if (this.CheckAccess())
{
switch (EventName)
{
case "onInit":
break;
case "onConnect":
this.connectHandler();
break;
case "onFrame":
this.checkGestures(this.controller.Frame());
break;
}
}
else
{
Dispatcher.Invoke(new LeapEventDelegate(LeapEventNotification
), new object[] { EventName });
}
}
public void connectHandler()
{
controller.EnableGesture(Gesture.GestureType.TYPE_SWIPE);
this.controller.EnableGesture(Gesture.GestureType.TYPE_CIRCLE);
this.controller.EnableGesture(Gesture.GestureType.TYPE_KEY_TAP);
this.controller.EnableGesture(Gesture.GestureType.TYPE_SCREEN_TAP);
// controller.Config.SetFloat("Gesture.Swipe.Speed", 4000.0f);
controller.Config.SetFloat("Gesture.Swipe.MinVelocity", 750f);
controller.Config.SetFloat("Gesture.Swipe.MinLength", 200f);
controller.Config.SetFloat("Gesture.KeyTap.MinDownVelocity", 40.0f);
controller.Config.SetFloat("Gesture.KeyTap.HistorySeconds", .2f);
controller.Config.SetFloat("Gesture.KeyTap.MinDistance", 40.0f);
controller.Config.SetFloat("Gesture.Circle.MinRadius", 15.0f);
controller.Config.SetFloat("Gesture.Circle.MinArc", 15f);
controller.Config.SetFloat("InteractionBox.Width", 1600.0f);
controller.Config.SetFloat("InteractionBox.Height", 1600.0f);
controller.Config.Save();
}
public void checkGestures(Frame frame)
{
GestureList gestures = frame.Gestures();
foreach (Gesture gesture in gestures)
{
// For Image 1
if (image.Source.Equals(images[1]))
{
if (gesture.Type == Gesture.GestureType.TYPE_SWIPE)
{
SwipeGesture swipe = new SwipeGesture(gesture);
if (swipe.State == Gesture.GestureState.STATE_START && swipe.Direction.x > 0 && Math.Abs(swipe.Direction.y) < 5)
{
image.Source = images[2];
// Console.WriteLine("Second");
}
}
}
// For Image 2
else if (image.Source.Equals(images[2]))
{
if (gesture.Type == Gesture.GestureType.TYPE_SWIPE)
{
SwipeGesture swipe = new SwipeGesture(gesture);
if (swipe.State == Gesture.GestureState.STATE_START && swipe.Direction.y > 0)
{
image.Source = images[3];
}
else if (swipe.State == Gesture.GestureState.STATE_START && swipe.Direction.x > 0 && Math.Abs(swipe.Direction.y) < 5)
{
image.Source = images[7];
}
}
if (gesture.Type == Gesture.GestureType.TYPE_KEY_TAP)
{
KeyTapGesture TapGesture = new KeyTapGesture(gesture);
image.Source = images[1];
Console.WriteLine("Circle");
}
}
// For Image 3
else if (image.Source.Equals(images[3]))
{
if (gesture.Type == Gesture.GestureType.TYPE_SWIPE)
{
SwipeGesture swipe = new SwipeGesture(gesture);
if (swipe.State == Gesture.GestureState.STATE_START && swipe.Direction.y < 0)
{
image.Source = images[4];
}
else if (swipe.State == Gesture.GestureState.STATE_START && swipe.Direction.x < 0 && Math.Abs(swipe.Direction.y) < 5)
{
image.Source = images[16];
}
}
if (gesture.Type == Gesture.GestureType.TYPE_KEY_TAP)
{
KeyTapGesture TapGesture = new KeyTapGesture(gesture);
image.Source = images[1];
}
}
}//foreach
}
}
}
The out of the box Gestures were deprecated in 3.x (Orion) as they weren't to reliable.
The best thing to do is to understand the SDK and just create your own gestures, you can go beyond whats already Out of the box and more reliable.
Everything its about 3d positioning so pay attention to your Y,X,Z, use the debuger hands or create your own debug system to see how it behaves.

how to set capture source of AudioVideoCaptureDevice on Windows Phone

I'm trying to capture video on Windows Phone using the AudioVideoCaptureDevice but when I try to set the captureSource I get a error message saying " Operation not valid due to the state of the object". Can you tell me what to do right in the following code?
Code snippet:
// Viewfinder for capturing video.
private VideoBrush videoRecorderBrush;
// Source and device for capturing video.
private CaptureSource _cs;
private VideoCaptureDevice _cd;
private AudioVideoCaptureDevice vcDevice;
double w, h;
// File details for storing the recording.
private IsolatedStorageFileStream isoVideoFile;
private FileSink fileSink;
private string isoVideoFileName = "iClips_Video.mp4";
private StorageFile sfVideoFile;
// For managing button and application state.
private enum ButtonState { Initialized, Stopped, Ready, Recording, Playback, Paused, NoChange, CameraNotSupported };
private ButtonState currentAppState;
// Constructor
public MainPage()
{
try
{
InitializeComponent();
//setup recording
// Prepare ApplicationBar and buttons.
PhoneAppBar = (ApplicationBar)ApplicationBar;
PhoneAppBar.IsVisible = true;
StartRecording = ((ApplicationBarIconButton)ApplicationBar.Buttons[0]);
StopPlaybackRecording = ((ApplicationBarIconButton)ApplicationBar.Buttons[1]);
StartPlayback = ((ApplicationBarIconButton)ApplicationBar.Buttons[2]);
PausePlayback = ((ApplicationBarIconButton)ApplicationBar.Buttons[3]);
SetScreenResolution();
//initialize the camera task
cameraCaptureTask = new CameraCaptureTask();
cameraCaptureTask.Completed += new EventHandler<PhotoResult>(cameraCaptureTask_Completed);
if (isFilePresent("username") && isFilePresent("Password"))
{
if (isFilePresent("IsProfilePhotoOnServer"))
{
connectToSocket();
}
else
{
SignUpProfilePhoto();
}
}
else
{
SignIn();
}
}
catch (Exception ex)
{
this.Dispatcher.BeginInvoke(delegate()
{
MessageBox.Show("Constructor Error:\n"+ ex.Message);
});
}
}
protected override void OnNavigatedTo(NavigationEventArgs e)
{
base.OnNavigatedTo(e);
// Initialize the video recorder.
InitializeVideoRecorder();
//prepare shutter hot keys
CameraButtons.ShutterKeyHalfPressed += OnButtonHalfPress;
CameraButtons.ShutterKeyPressed += OnButtonFullPress;
CameraButtons.ShutterKeyReleased += OnButtonRelease;
}
protected override void OnNavigatedFrom(NavigationEventArgs e)
{
// Dispose of camera and media objects.
DisposeVideoPlayer();
DisposeVideoRecorder();
base.OnNavigatedFrom(e);
CameraButtons.ShutterKeyHalfPressed -= OnButtonHalfPress;
CameraButtons.ShutterKeyPressed -= OnButtonFullPress;
CameraButtons.ShutterKeyReleased -= OnButtonRelease;
}
protected override void OnOrientationChanged(OrientationChangedEventArgs e)
{
if (vcDevice != null)
{
if (e.Orientation == PageOrientation.LandscapeLeft)
{
txtDebug.Text = "LandscapeLeft";
videoRecorderBrush.RelativeTransform =
new CompositeTransform() { CenterX = 0.5, CenterY = 0.5, Rotation = 90 };
//rotate logo
if (logo != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = 90;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
logo.RenderTransformOrigin = new Point(0.5, 0.5);
logo.RenderTransform = rt;
}
//rotate sign in link
if (MyGrid != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = 90;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
MyGrid.RenderTransformOrigin = new Point(0.5, 0.5);
MyGrid.RenderTransform = rt;
}
}
if (e.Orientation == PageOrientation.PortraitUp)
{
txtDebug.Text = "PortraitUp";
videoRecorderBrush.RelativeTransform =
new CompositeTransform() { CenterX = 0.5, CenterY = 0.5, Rotation = 90 };
//rotate logo
if (logo != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = 0;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
logo.RenderTransformOrigin = new Point(0.5, 0.5);
logo.RenderTransform = rt;
}
//rotate sign in link
if (MyGrid != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = 0;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
MyGrid.RenderTransformOrigin = new Point(0.5, 0.5);
MyGrid.RenderTransform = rt;
}
}
if (e.Orientation == PageOrientation.LandscapeRight)
{
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = "LandscapeRight";
// Rotate for LandscapeRight orientation.
//videoRecorderBrush.RelativeTransform =
//new CompositeTransform() { CenterX = 0.5, CenterY = 0.5, Rotation = 180 };
//rotate logo
if (logo != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = -90;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
logo.RenderTransformOrigin = new Point(0.5, 0.5);
logo.RenderTransform = rt;
}
//rotate MyGrid
if (MyGrid != null)
{
RotateTransform rt = new RotateTransform();
rt.Angle = -90;
//default rotation is around top left corner of the control,
//but you sometimes want to rotate around the center of the control
//to do that, you need to set the RenderTransFormOrigin
//of the item you're going to rotate
//I did not test this approach, maybe You're going to need to use actual coordinates
//so this bit is for information purposes only
MyGrid.RenderTransformOrigin = new Point(0.5, 0.5);
MyGrid.RenderTransform = rt;
}
});
}
if (e.Orientation == PageOrientation.PortraitDown)
{
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = "PortraitDown";
videoRecorderBrush.RelativeTransform =
new CompositeTransform() { CenterX = 0.5, CenterY = 0.5, Rotation = 270 };
});
}
}
}
// Hardware shutter button Hot-Actions.
private void OnButtonHalfPress(object sender, EventArgs e)
{
//toggle between video- play and pause
try
{
this.Dispatcher.BeginInvoke(delegate()
{
if (StartPlayback.IsEnabled)
{
PlayVideo();
}
if (PausePlayback.IsEnabled)
{
PauseVideo();
}
});
}
catch (Exception focusError)
{
// Cannot focus when a capture is in progress.
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = focusError.Message;
});
}
}
private void OnButtonFullPress(object sender, EventArgs e)
{
// Focus when a capture is not in progress.
try
{
this.Dispatcher.BeginInvoke(delegate()
{
if (vcDevice != null)
{
//stopVideoPlayer if it's playing back
if (currentAppState == ButtonState.Playback || currentAppState == ButtonState.Paused)
{
DisposeVideoPlayer();
StartVideoPreview();
}
if (StartRecording.IsEnabled)
{
StartVideoRecording();
}
else
{
StopVideoRecording();
}
}
});
}
catch (Exception focusError)
{
// Cannot focus when a capture is in progress.
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = focusError.Message;
});
}
}
private void OnButtonRelease(object sender, EventArgs e)
{
try
{
this.Dispatcher.BeginInvoke(delegate()
{
});
}
catch (Exception focusError)
{
// Cannot focus when a capture is in progress.
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = focusError.Message;
});
}
}
// Update the buttons and text on the UI thread based on app state.
private void UpdateUI(ButtonState currentButtonState, string statusMessage)
{
// Run code on the UI thread.
Dispatcher.BeginInvoke(delegate
{
switch (currentButtonState)
{
// When the camera is not supported by the phone.
case ButtonState.CameraNotSupported:
StartRecording.IsEnabled = false;
StopPlaybackRecording.IsEnabled = false;
StartPlayback.IsEnabled = false;
PausePlayback.IsEnabled = false;
break;
// First launch of the application, so no video is available.
case ButtonState.Initialized:
StartRecording.IsEnabled = true;
StopPlaybackRecording.IsEnabled = false;
StartPlayback.IsEnabled = false;
PausePlayback.IsEnabled = false;
break;
// Ready to record, so video is available for viewing.
case ButtonState.Ready:
StartRecording.IsEnabled = true;
StopPlaybackRecording.IsEnabled = false;
StartPlayback.IsEnabled = true;
PausePlayback.IsEnabled = false;
break;
// Video recording is in progress.
case ButtonState.Recording:
StartRecording.IsEnabled = false;
StopPlaybackRecording.IsEnabled = true;
StartPlayback.IsEnabled = false;
PausePlayback.IsEnabled = false;
break;
// Video playback is in progress.
case ButtonState.Playback:
StartRecording.IsEnabled = false;
StopPlaybackRecording.IsEnabled = true;
StartPlayback.IsEnabled = false;
PausePlayback.IsEnabled = true;
break;
// Video playback has been paused.
case ButtonState.Paused:
StartRecording.IsEnabled = false;
StopPlaybackRecording.IsEnabled = true;
StartPlayback.IsEnabled = true;
PausePlayback.IsEnabled = false;
break;
default:
break;
}
// Display a message.
txtDebug.Text = statusMessage;
// Note the current application state.
currentAppState = currentButtonState;
});
}
public async void InitializeVideoRecorder()
{
try
{
if (_cs == null)
{
_cs = new CaptureSource();
fileSink = new FileSink();
_cd = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice();
CameraSensorLocation location = CameraSensorLocation.Back;
var captureResolutions =
AudioVideoCaptureDevice.GetAvailableCaptureResolutions(location);
vcDevice = await AudioVideoCaptureDevice.OpenAsync(location, captureResolutions[0]);
vcDevice.RecordingFailed += OnCaptureFailed;
vcDevice.VideoEncodingFormat = CameraCaptureVideoFormat.H264;
vcDevice.AudioEncodingFormat = CameraCaptureAudioFormat.Aac;
// Initialize the camera if it exists on the phone.
if (vcDevice != null)
{
//initialize fileSink
await InitializeFileSink();
// Create the VideoBrush for the viewfinder.
videoRecorderBrush = new VideoBrush();
videoRecorderBrush.SetSource(_cs);
// Display the viewfinder image on the rectangle.
viewfinderRectangle.Fill = videoRecorderBrush;
_cs.Start();
// Set the button state and the message.
UpdateUI(ButtonState.Initialized, "Tap record to start recording...");
}
else
{
// Disable buttons when the camera is not supported by the phone.
UpdateUI(ButtonState.CameraNotSupported, "A camera is not supported on this phone.");
}
}
}
catch(Exception ex)
{
MessageBox.Show("InitializeVideoRecorder Error:\n" + ex.Message);
}
}
public async Task InitializeFileSink()
{
StorageFolder isoStore = ApplicationData.Current.LocalFolder;
sfVideoFile = await isoStore.CreateFileAsync(
isoVideoFileName,
CreationCollisionOption.ReplaceExisting);
}
private void OnCaptureFailed(AudioVideoCaptureDevice sender, CaptureFailedEventArgs args)
{
MessageBox.Show(args.ToString());
}
private void OnCaptureSourceFailed(object sender, ExceptionRoutedEventArgs e)
{
MessageBox.Show(e.ErrorException.Message.ToString());
}
// Set the recording state: display the video on the viewfinder.
private void StartVideoPreview()
{
try
{
// Display the video on the viewfinder.
if (_cs.VideoCaptureDevice != null
&& _cs.State == CaptureState.Stopped)
{
// Add captureSource to videoBrush.
videoRecorderBrush.SetSource(_cs);
// Add videoBrush to the visual tree.
viewfinderRectangle.Fill = videoRecorderBrush;
_cs.Start();
// Set the button states and the message.
UpdateUI(ButtonState.Ready, "Ready to record.");
//Create optional Resolutions
}
}
// If preview fails, display an error.
catch (Exception e)
{
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = "ERROR: " + e.Message.ToString();
});
}
}
// Set recording state: start recording.
private void StartVideoRecording()
{
try
{
// Connect fileSink to captureSource.
if (_cs.VideoCaptureDevice != null
&& _cs.State == CaptureState.Started)
{
_cs.Stop();
// Connect the input and output of fileSink.
fileSink.CaptureSource = _cs;
fileSink.IsolatedStorageFileName = isoVideoFileName;
}
// Begin recording.
if (_cs.VideoCaptureDevice != null
&& _cs.State == CaptureState.Stopped)
{
_cs.Start();
}
// Set the button states and the message.
UpdateUI(ButtonState.Recording, "Recording...");
StartTimer();
}
// If recording fails, display an error.
catch (Exception e)
{
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = "ERROR: " + e.Message.ToString();
});
}
}
// Set the recording state: stop recording.
private void StopVideoRecording()
{
try
{
// Stop recording.
if (_cs.VideoCaptureDevice != null
&& _cs.State == CaptureState.Started)
{
_cs.Stop();
// Disconnect fileSink.
fileSink.CaptureSource = null;
fileSink.IsolatedStorageFileName = null;
// Set the button states and the message.
UpdateUI(ButtonState.Stopped, "Preparing viewfinder...");
StopTimer();
StartVideoPreview();
}
}
// If stop fails, display an error.
catch (Exception e)
{
this.Dispatcher.BeginInvoke(delegate()
{
txtDebug.Text = "ERROR: " + e.Message.ToString();
});
}
}
// Start the video recording.
private void StartRecording_Click(object sender, EventArgs e)
{
// Avoid duplicate taps.
StartRecording.IsEnabled = false;
StartVideoRecording();
}
private void DisposeVideoRecorder()
{
if (_cs != null)
{
// Stop captureSource if it is running.
if (_cs.VideoCaptureDevice != null
&& _cs.State == CaptureState.Started)
{
_cs.Stop();
}
// Remove the event handler for captureSource.
_cs.CaptureFailed -= OnCaptureFailed;
// Remove the video recording objects.
_cs = null;
vcDevice = null;
fileSink = null;
videoRecorderBrush = null;
}
}
private void OnCaptureFailed(object sender, ExceptionRoutedEventArgs e)
{
throw new NotImplementedException();
}
//STOP WATCH
private void StartTimer()
{
dispatcherTimer = new System.Windows.Threading.DispatcherTimer();
dispatcherTimer.Tick += new EventHandler(dispatcherTimer_Tick);
dispatcherTimer.Interval = new TimeSpan(0, 0, 1);
dispatcherTimer.Start();
startTime = System.DateTime.Now;
}
private void StopTimer()
{
dispatcherTimer.Stop();
}
private void dispatcherTimer_Tick(object sender, EventArgs e)
{
System.DateTime now = System.DateTime.Now;
txtRecTime.Text = now.Subtract(startTime).ToString();
}
the error is thrown inside initializeVideoRecorder().
Seems like you're code is too long to analyze as #john mentioned in the comment. But since you've suggested to post some helpful links here you go: Following these steps here in msdn, would work without any issues as I've tried it once.
If you really need a sample to kick off, there's one from msdn. Hope you'll maximize the utilities.
I figured out I have to pass the capture device to the videobrush to make it work. I'm still not sure where and if I should use CaptureSource when capturing with AudioVideoCaptureDevice. Anyway here is the solution code:
// Create the VideoBrush for the viewfinder.
videoRecorderBrush = new VideoBrush();
videoRecorderBrush.SetSource(vcDevice); //substitute vcDevice with captureSource - vcDevice is the reference of AudioVideoCaptureDevice
// Display the viewfinder image on the rectangle.
viewfinderRectangle.Fill = videoRecorderBrush;

Recognize two skeletons Kinect

I have make a game (pong) using a kinect, I can recognize one skeleton and I can make gestures to control the left stick, but when I try to play with two players donĀ“t recognize the skeleton of other player. This is what I do so far:
private void SensorSkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e)
{
Skeleton[] skeletons = new Skeleton[0];
using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
{
if (skeletonFrame != null)
{
skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
skeletonFrame.CopySkeletonDataTo(skeletons);
}
if (skeletons.Length != 0)
{
foreach (Skeleton skel in skeletons)
{
if (skel.TrackingState == SkeletonTrackingState.Tracked)
{
this.tracked(skel);
this.trackedLeft(skel);
}
}
}
}
}
public void tracked(Skeleton skeleton)
{
Joint jHandRight = skeleton.Joints[JointType.HandRight];
Joint jHipCenter = skeleton.Joints[JointType.HipCenter];
if ((jHipCenter.Position.Z - jHandRight.Position.Z) > 0.2)
{
//Consider hand raised in front of them
//System.Diagnostics.Debug.WriteLine("Hand: Raised");
//MessageBox.Show("POR FAVORRRRRRRR");
//movement[0] = false;
movement[0] = true;
movement[1] = false;
}
else
{
//Hand is lowered by the users side
//System.Diagnostics.Debug.WriteLine("Hand: Lowered");
//MessageBox.Show("A SERRRRIIIIIOOOOOOOOOOOOOO");
//movement[1] = false;
movement[1] = true;
movement[0] = false;
}
}
Someone could help me.
You need a way to distinguish between skeleton one and two. See Kinect user Detection for how to do this. You can then pass the skeleton for player one and two to your two different methods. I use the player ID because if a skeleton is lost for a frame or two, their ID remains the same
int id1 = 0, id2 = 0;
...
if (skeletons.Length != 0)
{
foreach (Skeleton skel in skeletons)
{
if (skel.TrackingState == SkeletonTrackingState.Tracked)
{
if (skel.TrackingID == id1)
this.tracked(skel);
else if (skel.TrackingID == id2)
this.trackedLeft(skel);
else
{
if (id1 != 0 && id2 == 0)
id2 = skel.TrackingID;
else if (id2 != 0 && id1 == 0)
id1 = skel.TrackingID;
}
}
}
}

Implementing the APNG Render Function

Hey everyone,
So, I'm currently trying the implement the APNG Specification, but am having some trouble with the frame rendering. My function is
private void UpdateUI()
{
foreach (PictureBox pb in pics)
{
APNGBox box = (APNGBox)pb.Tag;
APNGLib.APNG png = box.png;
if (box.buffer == null)
{
box.buffer = new Bitmap((int)png.Width, (int)png.Height);
}
APNGLib.Frame f = png.GetFrame(box.frameNum);
using (Graphics g = Graphics.FromImage(box.buffer))
{
switch (f.DisposeOp)
{
case APNGLib.Frame.DisposeOperation.NONE:
break;
case APNGLib.Frame.DisposeOperation.BACKGROUND:
g.Clear(Color.Transparent);
break;
case APNGLib.Frame.DisposeOperation.PREVIOUS:
if (box.prevBuffer != null)
{
g.DrawImage(box.prevBuffer, Point.Empty);
}
else
{
g.Clear(Color.Transparent);
}
break;
default:
break;
}
Bitmap read = png.ToBitmap(box.frameNum++);
switch (f.BlendOp)
{
case APNGLib.Frame.BlendOperation.OVER:
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceOver;
break;
case APNGLib.Frame.BlendOperation.SOURCE:
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceCopy;
break;
default:
break;
}
g.DrawImage(read, new Point((int)f.XOffset, (int)f.YOffset));
}
box.prevBuffer = box.buffer;
pb.Image = box.buffer;
if (box.frameNum >= box.png.FrameCount)
{
box.frameNum = 0;
box.buffer = null;
box.prevBuffer = null;
}
}
}
The APNGBox is
internal class APNGBox
{
public int frameNum = 0;
public APNGLib.APNG png;
public Bitmap buffer;
public Bitmap prevBuffer;
}
I think that this is mostly correct, as I've run it against all the images in the APNG Gallery. However, some of them are rendering incorrectly (This one has artifacting issues, and this one does not retain the red bar on the left consistently). Please note, you'll have the view the page in Firefox 3 or higher to view the animations.
I'm lead to believe the issue has to do with how I handle the DISPOSE_OP_PREVIOUS, but I can't figure out what I'm doing wrong. Can anyone suggest what I might be missing?
So, I was able to figure it out in the end, and will post here in case anyone in the future comes across a similar problem. Turns out the issue was largely threefold:
One should not change the 'previous buffer' if the frame type is 'PREVIOUS'
One should use the previous frame's dispose_op, not the dispose_op of the current frame, to determine how it disposes
One should be sure to dispose only the old frame's region, not the whole frame
The new code is:
internal class APNGBox
{
public int frameNum { get; set; }
public APNGLib.APNG apng { get; set; }
public Bitmap buffer { get; set; }
public Bitmap prevBuffer { get; set; }
public APNGBox(APNGLib.APNG png)
{
frameNum = 0;
apng = png;
buffer = apng.ToBitmap(0);
prevBuffer = null;
}
}
private void UpdateUI()
{
foreach (PictureBox pb in pics)
{
APNGBox box = (APNGBox)pb.Tag;
APNGLib.APNG png = box.apng;
if (!png.IsAnimated)
{
if (pb.Image == null)
{
pb.Image = png.ToBitmap();
}
}
else
{
if (box.frameNum != png.FrameCount - 1)
{
Bitmap prev = box.prevBuffer == null ? null : new Bitmap(box.prevBuffer);
APNGLib.Frame f1 = png.GetFrame(box.frameNum);
if (f1.DisposeOp != APNGLib.Frame.DisposeOperation.PREVIOUS)
{
box.prevBuffer = new Bitmap(box.buffer);
}
DisposeBuffer(box.buffer, new Rectangle((int)f1.XOffset, (int)f1.YOffset, (int)f1.Width, (int)f1.Height), f1.DisposeOp, prev);
box.frameNum++;
APNGLib.Frame f2 = png.GetFrame(box.frameNum);
RenderNextFrame(box.buffer, new Point((int)f2.XOffset, (int)f2.YOffset), png.ToBitmap(box.frameNum), f2.BlendOp);
}
else
{
box.frameNum = 0;
box.prevBuffer = null;
ClearFrame(box.buffer);
RenderNextFrame(box.buffer, Point.Empty, png.ToBitmap(box.frameNum), APNGLib.Frame.BlendOperation.SOURCE);
}
pb.Invalidate();
}
}
}
private void DisposeBuffer(Bitmap buffer, Rectangle region, APNGLib.Frame.DisposeOperation dispose, Bitmap prevBuffer)
{
using (Graphics g = Graphics.FromImage(buffer))
{
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceCopy;
Brush b = new SolidBrush(Color.Transparent);
switch (dispose)
{
case APNGLib.Frame.DisposeOperation.NONE:
break;
case APNGLib.Frame.DisposeOperation.BACKGROUND:
g.FillRectangle(b, region);
break;
case APNGLib.Frame.DisposeOperation.PREVIOUS:
if(prevBuffer != null)
{
g.FillRectangle(b, region);
g.DrawImage(prevBuffer, region, region, GraphicsUnit.Pixel);
}
break;
default:
break;
}
}
}
private void RenderNextFrame(Bitmap buffer, Point point, Bitmap nextFrame, APNGLib.Frame.BlendOperation blend)
{
using(Graphics g = Graphics.FromImage(buffer))
{
switch(blend)
{
case APNGLib.Frame.BlendOperation.OVER:
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceOver;
break;
case APNGLib.Frame.BlendOperation.SOURCE:
g.CompositingMode = System.Drawing.Drawing2D.CompositingMode.SourceCopy;
break;
default:
break;
}
g.DrawImage(nextFrame, point);
}
}
private void ClearFrame(Bitmap buffer)
{
using(Graphics g = Graphics.FromImage(buffer))
{
g.Clear(Color.Transparent);
}
}

Categories