void StopTracking() { handTracker = null; if (recogEngine != null) { recogEngine.Dispose(); } recogEngine = null; }
// Starts hand tracking and gesture recognition. void StartTracking() { StopReplay(); Log.Debug("Start tracking."); StartKinect(); handTracker = new SimpleSkeletonHandTracker(HandInputParams.DepthWidth, HandInputParams.DepthHeight, kinectSensor.CoordinateMapper); ResetGestureEngine(); }
// Update is called once per frame void Update() { // Update the sensor and underlying IHandTracker.updateSensor(); IHandTracker.updateProcessor(); IHandTracker.updateHandTracker(); // Now try and get the current depth frame bool success = IHandTracker.QueryCurrentContourFrame(); if (success) { // If we have a frame, set the texture map to the frame data and display it GetComponent <Renderer>().material.mainTexture = cameraFrame; cameraFrame.SetPixels(IHandTracker.currentContourFrame); cameraFrame.Apply(); } }
void StopTracking() { handTracker = null; if (recogEngine != null) recogEngine.Dispose(); recogEngine = null; }
void OnApplicationQuit() { IHandTracker.destroyEnv(); }
/// <summary> /// Awake and OnApplicationQuit are used to /// init and kill the underlying hand tracker /// </summary> void Awake() { IHandTracker.initEnv(); }
void ProcessFeature() { IHandTracker handTracker = null; Int16[] depthPixelData = null; Byte[] colorPixelData = null; Log.DebugFormat("Start processing {0}...", inputFile); int frameCount = replayer.GetFramesCount(); for (float i = 0; i < frameCount; i += sampleRate) { int index = (int)Math.Round(i); if (index >= frameCount) { break; } var skeletonFrame = replayer.GetSkeletonFrame(index); var depthFrame = replayer.GetDepthFrame(index); var colorFrame = replayer.GetColorFrame(index); if (handTracker == null) { handTracker = (IHandTracker)Activator.CreateInstance(handTrackerType, new Object[] { depthFrame.Width, depthFrame.Height, GetKinectParams(), bufferSize }); } if (featureProcessor == null) { featureProcessor = (IFeatureProcessor)Activator.CreateInstance( featureProcessorType, new Object[] { sampleRate }); } if (depthPixelData == null) { depthPixelData = new Int16[depthFrame.PixelDataLength]; } if (colorPixelData == null) { colorPixelData = new Byte[colorFrame.PixelDataLength]; } depthFrame.CopyPixelDataTo(depthPixelData); colorFrame.CopyPixelDataTo(colorPixelData); var skeleton = SkeletonUtil.FirstTrackedSkeleton(skeletonFrame.Skeletons); var result = handTracker.Update(depthPixelData, colorPixelData, skeleton); Option <Array> feature = featureProcessor.Compute(result); if (feature.IsSome) { if (replayerType == typeof(KinectAllFramesReplay)) { frameList.Add(depthFrame.GetFrameNumber()); } else { int curIndex = (int)Math.Round(i - sampleRate * (bufferSize - 1)); frameList.Add(curIndex); } featureList.Add(feature.Value); } } Log.DebugFormat("Finished processing {0}.", inputFile); }