using System; using System.Windows; using System.Windows.Controls; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Shapes; using Microsoft.Kinect; using System.Linq; namespace kinectTest { public partial class MainWindow : Window { //Instantiate the Kinect runtime. Required to initialize the device. //IMPORTANT NOTE: You can pass the device ID here, in case more than one Kinect device is connected. KinectSensor sensor = KinectSensor.KinectSensors[0]; byte[] pixelData; Skeleton[] skeletons; public MainWindow() { InitializeComponent(); //Runtime initialization is handled when the window is opened. When the window //is closed, the runtime MUST be unitialized. this.Loaded += new RoutedEventHandler(MainWindow_Loaded); this.Unloaded += new RoutedEventHandler(MainWindow_Unloaded); sensor.ColorStream.Enable(); sensor.SkeletonStream.Enable(); } void runtime_SkeletonFrameReady(object sender, SkeletonFrameReadyEventArgs e) { bool receivedData = false; using (SkeletonFrame SFrame = e.OpenSkeletonFrame()) { if (SFrame == null) { // The image processing took too long. More than 2 frames behind. } else { skeletons = new Skeleton[SFrame.SkeletonArrayLength]; SFrame.CopySkeletonDataTo(skeletons); receivedData = true; } } if (receivedData) { Skeleton currentSkeleton = (from s in skeletons where s.TrackingState == SkeletonTrackingState.Tracked select s).FirstOrDefault(); if (currentSkeleton != null) { SetEllipsePosition(head, currentSkeleton.Joints[JointType.Head]); SetEllipsePosition(leftHand, currentSkeleton.Joints[JointType.HandLeft]); SetEllipsePosition(rightHand, currentSkeleton.Joints[JointType.HandRight]); } } } public void ClampPositionToCircle(SkeletonPoint center, float radius, ref SkeletonPoint position) { // Calculate the offset vector from the center of the circle to our position SkeletonPoint offset = new SkeletonPoint(); offset.X = position.X - center.X; offset.Y = position.Y - center.Y; offset.Z = position.Z; // Calculate the linear distance of this offset vector double distance = Math.Abs(Math.Sqrt((offset.X * offset.X) + (offset.Y * offset.Y))); if (radius < distance) { // If the distance is more than our radius we need to clamp // Calculate the direction to our position SkeletonPoint direction = new SkeletonPoint(); direction.X = (float)(offset.X / distance); direction.Y = (float)(offset.Y / distance); direction.Z = offset.Z; //Vector2 direction = offset / distance; // Calculate our new position using the direction to our old position and our radius position.X += center.X * radius; position.Y += center.Y * radius; //position = center + direction * radius; } } //This method is used to position the ellipses on the canvas //according to correct movements of the tracked joints. //IMPORTANT NOTE: Code for vector scaling was imported from the Coding4Fun Kinect Toolkit //available here: http://c4fkinect.codeplex.com/ //I only used this part to avoid adding an extra reference. private void SetEllipsePosition(Ellipse ellipse, Joint joint) { Microsoft.Kinect.SkeletonPoint vector = new Microsoft.Kinect.SkeletonPoint(); vector.X = ScaleVector(640, joint.Position.X); vector.Y = ScaleVector(480, -joint.Position.Y); vector.Z = joint.Position.Z; Joint updatedJoint = new Joint(); updatedJoint = joint; updatedJoint.TrackingState = JointTrackingState.Tracked; updatedJoint.Position = vector; SkeletonPoint center = new SkeletonPoint(); center.X = 0; center.Y = 0; SkeletonPoint pos = updatedJoint.Position; ClampPositionToCircle(center, 10.0f, ref pos); updatedJoint.Position = pos; Canvas.SetLeft(ellipse, updatedJoint.Position.X); Canvas.SetTop(ellipse, updatedJoint.Position.Y); } private float ScaleVector(int length, float position) { float value = (((((float)length) / 1f) / 2f) * position) + (length / 2); if (value > length) { return (float)length; } if (value < 0f) { return 0f; } return value; } void MainWindow_Unloaded(object sender, RoutedEventArgs e) { sensor.Stop(); } void MainWindow_Loaded(object sender, RoutedEventArgs e) { sensor.SkeletonFrameReady += runtime_SkeletonFrameReady; sensor.ColorFrameReady += runtime_VideoFrameReady; sensor.Start(); } void runtime_VideoFrameReady(object sender, ColorImageFrameReadyEventArgs e) { bool receivedData = false; using (ColorImageFrame CFrame = e.OpenColorImageFrame()) { if (CFrame == null) { // The image processing took too long. More than 2 frames behind. } else { pixelData = new byte[CFrame.PixelDataLength]; CFrame.CopyPixelDataTo(pixelData); receivedData = true; } } if (receivedData) { BitmapSource source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Bgr32, null, pixelData, 640 * 4); videoImage.Source = source; } } } }