来源于 MVA 的 快速入门:Kinect for Windows v2 开发 的学习随笔
具体内容为上图所示章节内容
章节内全部代码:GitHub地址点我(链接失效,待补档)
<Page
<Grid Background="{ThemeResource ApplicationPageBackgroundThemeBrush}">
<Image Name="image" Width="" Height=""/>
<Canvas Name="bodyCanves" Width="" Height=""/>
</Grid>
</Page>
Xaml Code
using WindowsPreview.Kinect;
using Windows.UI.Xaml.Media.Imaging;
using Windows.UI.Xaml.Shapes;
using Windows.UI; public sealed partial class MainPage : Page
{
public MainPage()
{
this.InitializeComponent();
this.Loaded += MainPage_Loaded;
} KinectSensor sensor;
InfraredFrameReader irReader;
ushort[] irData;
byte[] irDataConverted;
WriteableBitmap irBitmap; Body[] bodies;
MultiSourceFrameReader msfr; private void MainPage_Loaded(object sender, RoutedEventArgs e)
{
sensor = KinectSensor.GetDefault();
irReader = sensor.InfraredFrameSource.OpenReader();
FrameDescription fd = sensor.InfraredFrameSource.FrameDescription;
irData = new ushort[fd.LengthInPixels];
irDataConverted = new byte[fd.LengthInPixels * ];
irBitmap = new WriteableBitmap(fd.Width, fd.Height);
image.Source = irBitmap; bodies = new Body[];
msfr = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Infrared);
msfr.MultiSourceFrameArrived += msfr_MultiSourceFrameArrived; sensor.Open();
irReader.FrameArrived += irReader_frameArrived;
} private void msfr_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs args)
{
using (MultiSourceFrame msf = args.FrameReference.AcquireFrame())
{
if (msf != null)
{
using (BodyFrame bodyFrame = msf.BodyFrameReference.AcquireFrame())
{
using (InfraredFrame irFrame = msf.InfraredFrameReference.AcquireFrame())
{
if (bodyFrame != null && irFrame != null)
{
irFrame.CopyFrameDataToArray(irData);
for (int i = ; i < irData.Length; i++)
{
byte intensity = (byte)(irData[i] >> );
irDataConverted[i * ] = intensity;
irDataConverted[i * + ] = intensity;
irDataConverted[i * + ] = intensity;
irDataConverted[i * + ] = ;
}
irDataConverted.CopyTo(irBitmap.PixelBuffer);
irBitmap.Invalidate(); bodyFrame.GetAndRefreshBodyData(bodies);
bodyCanves.Children.Clear();
foreach(Body body in bodies)
{
if(body.IsTracked)
{
Joint headJoint = body.Joints[JointType.Head];
if (headJoint.TrackingState == TrackingState.Tracked)
{
DepthSpacePoint dsp = sensor.CoordinateMapper.MapCameraPointToDepthSpace(headJoint.Position);
Ellipse headcircle = new Ellipse() { Width = , Height = , Fill = new SolidColorBrush(Color.FromArgb(, , , )) };
bodyCanves.Children.Add(headcircle);
Canvas.SetLeft(headcircle ,dsp.X - );
Canvas.SetTop(headcircle ,dsp.Y - ); }
}
} }
}
}
}
}
} private void irReader_frameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args)
{
using (InfraredFrame irFrame = args.FrameReference.AcquireFrame())
{
if (irFrame != null)
{
irFrame.CopyFrameDataToArray(irData);
for (int i = ; i < irData.Length; i++)
{
byte intensity = (byte)(irData[i] >> );
irDataConverted[i * ] = intensity;
irDataConverted[i * + ] = intensity;
irDataConverted[i * + ] = intensity;
irDataConverted[i * + ] = ;
}
irDataConverted.CopyTo(irBitmap.PixelBuffer);
irBitmap.Invalidate();
}
}
}
}