private void OnEnable() { this.device = Device.Open(0); var config = new DeviceConfiguration { ColorResolution = ColorResolution.r720p, ColorFormat = ImageFormat.ColorBGRA32, DepthMode = DepthMode.NFOV_Unbinned }; device.StartCameras(config); var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution); this.tracker = BodyTracker.Create(calibration); debugObjects = new GameObject[(int)JointId.Count]; for (var i = 0; i < (int)JointId.Count; i++) { var cube = GameObject.CreatePrimitive(PrimitiveType.Cube); cube.name = Enum.GetName(typeof(JointId), i); cube.transform.localScale = Vector3.one * 0.4f; debugObjects[i] = cube; } handler.CreateNewModel(); streamer.Start(); //streamer.ListenForIncommingRequests(); print("Created new handler model"); }
//public Renderer renderer; private void OnEnable() { this.device = Device.Open(0); var config = new DeviceConfiguration { ColorResolution = ColorResolution.r720p, ColorFormat = ImageFormat.ColorBGRA32, DepthMode = DepthMode.NFOV_Unbinned }; device.StartCameras(config); var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution); var trackerConfiguration = new TrackerConfiguration { SensorOrientation = SensorOrientation.OrientationDefault, CpuOnlyMode = false }; this.tracker = BodyTracker.Create(calibration, trackerConfiguration); debugObjects = new GameObject[(int)JointType.Count]; for (var i = 0; i < (int)JointType.Count; i++) { var cube = GameObject.CreatePrimitive(PrimitiveType.Cube); cube.name = Enum.GetName(typeof(JointType), i); cube.transform.localScale = Vector3.one * 0.4f; debugObjects[i] = cube; } }
private void Window_Loaded(object sender, RoutedEventArgs e) { kinectManager = KinectManager.GetInstance; if (kinectManager.Sensor != null && kinectManager.Sensor.IsOpen == false) { kinectManager.Open(); wsClient = new WebSocketClient(); wsClient.InitializeConnection(); bodyTracker = new CVision.Tracking.BodyTracker(); frameProc = new FrameProcessor(kinectManager.Sensor); colorFrameReader = kinectManager.Sensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived; infraredFrameReader = kinectManager.Sensor.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += InfraredFrameReader_FrameArrived; depthFrameReader = kinectManager.Sensor.DepthFrameSource.OpenReader(); depthFrameReader.FrameArrived += DepthFrameReader_FrameArrived; bodyFrameReader = kinectManager.Sensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += BodyFrameReader_FramedArrived; } }
// TODO: unused now. Remove? public void GetContactVelocityChanges( Vector2 worldPoint, BodyTracker otherTracker, out Vector2 velocityChange1, out Vector2 velocityChange2 ) { var prevVelocity1 = GetPrevPointVelocity(worldPoint); var curVelocity1 = rigidbody2D.GetPointVelocity(worldPoint); velocityChange1 = curVelocity1 - prevVelocity1; var prevVelocity2 = otherTracker.GetPrevPointVelocity(worldPoint); var curVelocity2 = otherTracker.rigidbody2D.GetPointVelocity(worldPoint); velocityChange2 = curVelocity2 - prevVelocity2; }
void InitCamera() { this.device = Device.Open(0); var config = new DeviceConfiguration { ColorResolution = ColorResolution.r720p, ColorFormat = ImageFormat.ColorBGRA32, DepthMode = DepthMode.NFOV_Unbinned }; device.StartCameras(config); var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution); this.tracker = BodyTracker.Create(calibration); GameObject cameraFeed = GameObject.FindWithTag("CameraFeed"); renderer = cameraFeed.GetComponent <Renderer>(); }
private void InitCamera() { this.device = Device.Open(0); var config = new DeviceConfiguration { ColorResolution = ColorResolution.r720p, ColorFormat = ImageFormat.ColorBGRA32, DepthMode = DepthMode.NFOV_Unbinned }; device.StartCameras(config); //declare and initialize a calibration for the camera var calibration = device.GetCalibration(config.DepthMode, config.ColorResolution); //initialize a tracker with the calibration we just made this.tracker = BodyTracker.Create(calibration); renderer = GetComponent <Renderer>(); canUpdate = true; }
private void ProcessCollisions() { if (Time.fixedTime == lastDispatchTimestamp) { return; } lastDispatchTimestamp = Time.fixedTime; var contacts = collisionTracker.Contacts; if (contacts.Count != 0) { var contactsByBodies = contacts .Where(contact => contact.Collider != null) // Avoid destroyed contacts. .GroupBy(contact => contact.Collider.attachedRigidbody); // TODO: implement case when this.rigidbody2D is null. var thisTracker = BodyTracker.GetTracker(this.gameObject); foreach (var contactsOfBody in contactsByBodies) { var body = contactsOfBody.Key; SurfaceContact collisionContact; float otherImpulse; if (body != null) // Dynamic obstacle. { var otherTracker = BodyTracker.GetTracker(body.gameObject); var otherVelChanges = contactsOfBody.Select(contact => new { VelocityChange = otherTracker.GetVelocityChange(contact.Point), Contact = contact }); var otherMaxVelChange = otherVelChanges.WithMax(velChange => velChange.VelocityChange.sqrMagnitude); var otherVelChange = otherMaxVelChange.VelocityChange; otherImpulse = otherVelChange.magnitude; if (IncomingImpulseNormalization == ImpulseNormalizationType.MultiplyByCoefficient) { otherImpulse *= body.mass * IncomingImpulseMultiplier; } collisionContact = otherMaxVelChange.Contact; } else // Static obstacle. { otherImpulse = 0; collisionContact = contactsOfBody.First(); } var thisVelChange = thisTracker.GetVelocityChange(collisionContact.Point); float thisImpulse = thisVelChange.magnitude; if (SelfImpulseNormalization == ImpulseNormalizationType.MultiplyByCoefficient) { thisImpulse *= rigidbody2D.mass * IncomingImpulseMultiplier; } var messageData = new CollisionMessageData( thisImpulse, otherImpulse, collisionContact, contactsOfBody.ToList() ); SendMessage(CollisionMessageData.MessageName, messageData, SendMessageOptions.DontRequireReceiver); } } }
private async void Window_Loaded(object sender, RoutedEventArgs e) { using (Device device = Device.Open(0)) { device.StartCameras(new DeviceConfiguration { ColorFormat = ImageFormat.ColorBGRA32, ColorResolution = ColorResolution.R720p, DepthMode = DepthMode.WFOV_2x2Binned, SynchronizedImagesOnly = true, CameraFPS = FPS.FPS30, }); int colorWidth = device.GetCalibration().ColorCameraCalibration.ResolutionWidth; int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight; Stopwatch sw = new Stopwatch(); int frameCount = 0; sw.Start(); // Allocate image buffers for us to manipulate using (Image transformedDepth = new Image(ImageFormat.Depth16, colorWidth, colorHeight)) using (Image outputColorImage = new Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight)) using (Transformation transform = device.GetCalibration().CreateTransformation()) using (BodyTracker tracker = new BodyTracker(device.GetCalibration())) { tracker.SetTemporalSmoothing(0); while (this.running) { //if (!Environment.Is64BitProcess) { // In 32-bit the BitmapSource memory runs out quickly and we can hit OutOfMemoryException. // Force garbage collection in each loop iteration to keep memory in check. GC.Collect(); } // Wait for a capture on a thread pool thread using (Capture capture = await Task.Run(() => { return(device.GetCapture()); }).ConfigureAwait(true)) { // Create a BitmapSource for the unmodified color image. // Creating the BitmapSource is slow, so do it asynchronously on another thread Task <BitmapSource> createInputColorBitmapTask = Task.Run(() => { BitmapSource source = capture.Color.CreateBitmapSource(); // Allow the bitmap to move threads source.Freeze(); return(source); }); // Compute the colorized output bitmap on a thread pool thread Task <BitmapSource> createOutputColorBitmapTask = Task.Run(() => { // Transform the depth image to the perspective of the color camera transform.DepthImageToColorCamera(capture, transformedDepth); // Get Span<T> references to the pixel buffers for fast pixel access. Span <ushort> depthBuffer = transformedDepth.GetPixels <ushort>().Span; Span <BGRA> colorBuffer = capture.Color.GetPixels <BGRA>().Span; Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span; // Create an output color image with data from the depth image for (int i = 0; i < colorBuffer.Length; i++) { // The output image will be the same as the input color image, // but colorized with Red where there is no depth data, and Green // where there is depth data at more than 1.5 meters outputColorBuffer[i] = colorBuffer[i]; if (depthBuffer[i] == 0) { outputColorBuffer[i].R = 255; } else if (depthBuffer[i] > 1500) { outputColorBuffer[i].G = 255; } } BitmapSource source = outputColorImage.CreateBitmapSource(); // Allow the bitmap to move threads source.Freeze(); return(source); }); // Wait for both bitmaps to be ready and assign them. BitmapSource inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true); BitmapSource outputColorBitmap = await createOutputColorBitmapTask.ConfigureAwait(true); this.inputColorImageViewPane.Source = inputColorBitmap; this.outputColorImageViewPane.Source = outputColorBitmap; ++frameCount; if (sw.Elapsed > TimeSpan.FromSeconds(2)) { double framesPerSecond = (double)frameCount / sw.Elapsed.TotalSeconds; this.fps.Content = $"{framesPerSecond:F2} FPS"; frameCount = 0; sw.Restart(); } tracker.EnqueueCapture(capture); using (BodyFrame frame = tracker.PopFrame()) { for (uint i = 0; i < frame.GetNumBodies(); ++i) { Body body = new Body() { id = frame.GetBodyID(i), skeleton = frame.GetBodySkeleton(i) }; Joint chest = body.skeleton.joints[(int)Joints.SPINE_CHEST]; Console.WriteLine($"User {body.id}: ({chest.position.X}, {chest.position.Y}, {chest.position.Z})"); Vector2?convertedPos = device.GetCalibration().TransformTo2D(chest.position, CalibrationDeviceType.Depth, CalibrationDeviceType.Color); if (convertedPos.HasValue) { Console.WriteLine($"Converted: ({convertedPos.Value.X}, {convertedPos.Value.Y})"); } } frame.GetCapture().Dispose(); } } } } } }
void Start() { tracker = GetComponentInParent <BodyTracker>(); }