/// <summary> /// Handles the IR frame data arriving from the sensor /// </summary> private void IR_Reader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { /*if (this.StatusText == Properties.Resources.RunningStatusText) * { * if (this.extractType == "IR") * { * using (InfraredFrame IRFrame = e.FrameReference.AcquireFrame()) * { * if (IRFrame != null) * { * // the fastest way to process the body index data is to directly access * // the underlying buffer * using (Microsoft.Kinect.KinectBuffer IRbuffer = IRFrame.LockImageBuffer()) * { * * { * IRFrame.CopyFrameDataToArray(IRFrameData); * this.frameCount++; * this.timing[frameCount] = (ushort)IRFrame.RelativeTime.Milliseconds; * filePath = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "/Kinect to MatLab" + "/IRframe" + frameCount.ToString() + ".MAT"; * this.matfw = new MATWriter("IRmat", filePath, IRFrameData, IRFrame.FrameDescription.Height, IRFrame.FrameDescription.Width); * * } * } * } * } * } * } * else * { * SaveParamsToFile(Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "/Kinect to MatLab" + "/Intrinsic parameters.txt"); * this.StatusText = Properties.Resources.SensorIsAvailableStatusText; * }*/ }
private void InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (null == e.FrameReference) { return; } // If you do not dispose of the frame, you never get another one... using (InfraredFrame _InfraredFrame = e.FrameReference.AcquireFrame()) { if (null == _InfraredFrame) { return; } BitmapToDisplay.Lock(); _InfraredFrame.CopyFrameDataToIntPtr( BitmapToDisplay.BackBuffer, Convert.ToUInt32(BitmapToDisplay.BackBufferStride * BitmapToDisplay.PixelHeight)); BitmapToDisplay.AddDirtyRect( new Int32Rect( 0, 0, _InfraredFrame.FrameDescription.Width, _InfraredFrame.FrameDescription.Height)); BitmapToDisplay.Unlock(); } }
void irReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (InfraredFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { FrameDescription desc = frame.FrameDescription; KinectBase.ColorFrameEventArgs irE = new KinectBase.ColorFrameEventArgs(); irE.bytesPerPixel = (int)desc.BytesPerPixel; irE.pixelFormat = PixelFormats.Gray16; irE.height = desc.Height; irE.width = desc.Width; irE.kinectID = kinectID; irE.timeStamp = frame.RelativeTime; irE.isIR = true; //irE.image = new byte[desc.LengthInPixels * sizeof(UInt16)]; irE.image = irImagePool.GetObject(); unsafe { fixed(byte *ptr = irE.image) { frame.CopyFrameDataToIntPtr((IntPtr)ptr, desc.LengthInPixels * sizeof(UInt16)); } } OnColorFrameReceived(irE); } } }
/// <summary> /// Handles the infrared frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> public void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((infraredFrame.FrameDescription.Width * infraredFrame.FrameDescription.Height) == (infraredBuffer.Size / infraredFrame.FrameDescription.BytesPerPixel)) && (infraredFrame.FrameDescription.Width == this.infraredSource.PixelWidth) && (infraredFrame.FrameDescription.Height == this.infraredSource.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.RenderInfraredPixels(); } }
void irreader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (this.IRFrameReady != null) { this.IRFrameReady(sender, e); } }
/// <summary> /// 赤外線センサーからフレームが到着した際に呼びされるイベントハンドラーです。 /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { //赤外線フレームが存在するか場合処理を継続します。 if (infraredFrame != null) { //赤外線のフレームに関する情報を含んだオブジェクトを取得します。 FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; //infraredFrameDescription が持っている縦横のサイズと到着した赤外線フレームの縦横サイズを比較検証し、 //一致していれば次に進みます。 if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this.infraredFrameData.Length) && (infraredFrameDescription.Width == this.bitmap.PixelWidth) && (infraredFrameDescription.Height == this.bitmap.PixelHeight)) { // フレームデータの情報を配列にコピーします。 infraredFrame.CopyFrameDataToArray(this.infraredFrameData); infraredFrameProcessed = true; } } } // 赤外線データを元に描画処理を実行します。 if (infraredFrameProcessed) { //赤外線データを、RGB 値に変換します。 this.ConvertInfraredData(); //変換したデータを画面に描画します。 this.RenderInfraredPixels(this.infraredPixels); } }
/// <summary> /// Handles the infrared frame data arriving from the sensor. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // verify data and write the new infrared frame data to the display bitmap if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this.infraredFrameData.Length) && (infraredFrameDescription.Width == this.bitmap.PixelWidth) && (infraredFrameDescription.Height == this.bitmap.PixelHeight)) { // Copy the pixel data from the image to a temporary array infraredFrame.CopyFrameDataToArray(this.infraredFrameData); infraredFrameProcessed = true; } } } // we got a frame, convert and render if (infraredFrameProcessed) { this.ConvertInfraredData(); this.RenderInfraredPixels(this.infraredPixels); } }
private unsafe void IRFrameReady(object sender, InfraredFrameArrivedEventArgs e) { if (e.FrameReference != null) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { using (var buffer = frame.LockImageBuffer()) { short *data = (short *)buffer.UnderlyingBuffer; FOutValue.SliceCount = 0; int pixelX = 10; int pixelY = 10; foreach (var item in FInPixelPos) { pixelX = (int)item.X; pixelY = (int)item.Y; pixelX = pixelX < 0 ? 0 : pixelX; pixelY = pixelY < 0 ? 0 : pixelY; pixelX = pixelX > 511 ? 511 : pixelX; pixelY = pixelY > 423 ? 423 : pixelY; double pixel = data[pixelY * 512 + pixelX]; FOutValue.Add(pixel); } } } } } }
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (this.processingInfraredFrame) { return; } this.processingInfraredFrame = true; bool infraredFrameProcessed = false; using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel))) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size, this.infraredFrameDescription.BytesPerPixel); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.Rescale(this.infraredPixels, this.truncatedInfraredPixels); this.infraredFrameCallback(this.truncatedInfraredPixels); } this.processingInfraredFrame = false; }
private async void _infraredReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs e) { bool shouldRecord = _recorder != null && _recorder.IsStarted && InfraredCheckBox.IsChecked.GetValueOrDefault(); bool shouldDisplay = _displayType == FrameTypes.Infrared; if (shouldRecord || shouldDisplay) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.CopyFrameDataToArray(_infraredData); if (shouldRecord) { _recorder.RecordFrame(frame, _infraredData); } } else { shouldDisplay = false; System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Infrared in MainPage)"); } } if (shouldDisplay) { await _infraredBitmap.UpdateAsync(_infraredData); } } }
private void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (_displayType == FrameTypes.Infrared) { _infraredBitmap.Update(e.FrameReference); } }
private void InfraredFrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (InfraredFrame frame = args.FrameReference.AcquireFrame()) { if (frame != null) { } } }
/// <summary> /// The Infrared reader event /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { InfraredView.Source = KinectUtils.ToBitmap(frame); } } }
void v2IRReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { } } }
/// <summary> /// Process the infrared frames and update UI /// </summary> public void OnInfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // Reference to infrared frame InfraredFrameReference refer = e.FrameReference; if (refer == null) { return; } // Get infrared frame InfraredFrame frame = refer.AcquireFrame(); if (frame == null) { return; } // Process it using (frame) { // Get the description FrameDescription frameDesc = frame.FrameDescription; if (((frameDesc.Width * frameDesc.Height) == _infraData.Length) && (frameDesc.Width == _infraBitmap.PixelWidth) && (frameDesc.Height == _infraBitmap.PixelHeight)) { // Copy data frame.CopyFrameDataToArray(_infraData); int colorPixelIndex = 0; for (int i = 0; i < _infraData.Length; ++i) { // Get infrared value ushort ir = _infraData[i]; // Bitshift byte intensity = (byte)(ir >> 8); // Assign infrared intensity _infraPixels[colorPixelIndex++] = intensity; _infraPixels[colorPixelIndex++] = intensity; _infraPixels[colorPixelIndex++] = intensity; ++colorPixelIndex; } // Copy output to bitmap _infraBitmap.WritePixels( new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height), _infraPixels, frameDesc.Width * _bytePerPixel, 0); } } }
/// <summary> /// Handles infrared /// </summary> /// <param name="infraredCollectedCancelTokenSource">cancelTokenSource used to stop the task</param> private static void HandleInfrared(CancellationTokenSource infraredCollectedCancelTokenSource) { InfraredFrameArrivedEventArgs e = null; String frameNumber = String.Empty; InfraredFrame infraredFrame; while (true) { infraredFrame = null; if (infraredCollectedCancelTokenSource.IsCancellationRequested) { break; } if (infraredFrameQueue.Count != 0) { lock (FramesAndPaths.allFrameInfo) { if ((FramesAndPaths.allFrameInfo.allFrameFlag & 16) != 0) { continue; } try { e = infraredFrameQueue.Dequeue(); } catch (InvalidOperationException) { continue; } try { infraredFrame = e.FrameReference.AcquireFrame(); } catch (NullReferenceException) { } if (infraredFrame == null) { continue; } frameNumber = FramesAndPaths.allFrameInfo.frameNumber; FramesAndPaths.allFrameInfo.allFrameFlag |= 16; if ((FramesAndPaths.allFrameInfo.allFrameFlag ^ 31) == 0) { FramesAndPaths.allFrameInfo.allFrameFlag = 0; FramesAndPaths.FrameNumberIncrement(); ++writtenCount; } } StoreFramesData.Handle_InfraredFrame(infraredFrame, frameNumber); } } }
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } }
void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs args) #endif { using (var frame = args.FrameReference.AcquireFrame()) { if (_isStarted) { RecordFrame(frame); } } }
private void UpdateInfraredFrame( InfraredFrameArrivedEventArgs e ) { // カラーフレームを取得する using ( var infraredFrame = e.FrameReference.AcquireFrame() ) { if ( infraredFrame == null ) { return; } // 赤外線画像データを取得する infraredFrame.CopyFrameDataToArray( infraredBuffer ); } }
private void FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { frame.CopyFrameDataToIntPtr(this.frameData.DataPointer, (uint)this.frameData.SizeInBytes); frame.Dispose(); if (this.FrameReceived != null) { this.FrameReceived(this, new InfraredFrameDataEventArgs(this.frameData)); } } }
private void UpdateInfraredFrame(InfraredFrameArrivedEventArgs e) { // カラーフレームを取得する using (var infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame == null) { return; } // 赤外線画像データを取得する infraredFrame.CopyFrameDataToArray(infraredBuffer); } }
private void FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { frame.CopyFrameDataToIntPtr(this.frameData.DataPointer, (uint)this.frameData.SizeInBytes); frame.Dispose(); if (this.FrameReceived != null) { this.FrameReceived(this, new InfraredFrameDataEventArgs(this.frameData)); } } }
private static void IrReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // ColorFrame is IDisposable using (InfraredFrame irFrame = e.FrameReference.AcquireFrame()) { if (irFrame != null) { irFrame.CopyFrameDataToArray(irData); foreach (var data in irData) { Console.Write(data); } } } }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (InfraredFrame infrFrame = e.FrameReference.AcquireFrame()) { if (infrFrame != null) { using (KinectBuffer infrBuffer = infrFrame.LockImageBuffer()) { if ((infrFrame.FrameDescription.Width * infrFrame.FrameDescription.Height) == (infrBuffer.Size / infrFrame.FrameDescription.BytesPerPixel)) { this.ProcessInfraredFrameData(infrBuffer.UnderlyingBuffer, infrBuffer.Size, infrFrame.FrameDescription.BytesPerPixel); } } } } }
private void infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { var width = frame.FrameDescription.Width; var height = frame.FrameDescription.Height; var infrared = new ushort[width * height]; frame.CopyFrameDataToArray(infrared); var pixels = this.ConvertToByteArray(infrared); InfraredImage.Source = BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, BitmapPalettes.Halftone256, pixels, width * PixelFormats.Bgr32.BitsPerPixel / 8); } } }
void infraredFrameReader_FrameArrived( object sender, InfraredFrameArrivedEventArgs e ) { // カラーフレームを取得する using ( var colorFrame = e.FrameReference.AcquireFrame() ) { if ( colorFrame == null ) { return; } // 赤外線画像データを取得する var infraredBuffer = new ushort[infraredFrameDesc.Width * infraredFrameDesc.Height]; colorFrame.CopyFrameDataToArray( infraredBuffer ); // ビットマップにする ImageColor.Source = BitmapSource.Create( infraredFrameDesc.Width, infraredFrameDesc.Height, 96, 96, PixelFormats.Gray16, null, infraredBuffer, infraredFrameDesc.Width * (int)infraredFrameDesc.BytesPerPixel ); } }
void infraredFrameReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // カラーフレームを取得する using (var infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame == null) { return; } // 赤外線画像データを取得する infraredFrame.CopyFrameDataToArray(infraredBuffer); infraredBitmap.WritePixels(infraredRect, infraredBuffer, infraredStride, 0); // フレームレート更新 TextFps.Text = counter.Update().ToString(); } }
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs args) { using (var frame = args.FrameReference.AcquireFrame()) { if (frame != null) { using (Microsoft.Kinect.KinectBuffer infraredBuffer = frame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((_infraredFrameDescription.Width * _infraredFrameDescription.Height) == (infraredBuffer.Size / _infraredFrameDescription.BytesPerPixel)) && (_infraredFrameDescription.Width == _infraredBitmap.PixelWidth) && (_infraredFrameDescription.Height == _infraredBitmap.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } } } }
void infraredFrameReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // カラーフレームを取得する using (var colorFrame = e.FrameReference.AcquireFrame()) { if (colorFrame == null) { return; } // 赤外線画像データを取得する var infraredBuffer = new ushort[infraredFrameDesc.Width * infraredFrameDesc.Height]; colorFrame.CopyFrameDataToArray(infraredBuffer); // ビットマップにする ImageColor.Source = BitmapSource.Create(infraredFrameDesc.Width, infraredFrameDesc.Height, 96, 96, PixelFormats.Gray16, null, infraredBuffer, infraredFrameDesc.Width * (int)infraredFrameDesc.BytesPerPixel); } }
private void InfraredFrameReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (InfraredFrame frame = e.FrameReference.AcquireFrame()) { if (frame == null) { return; } if (Mode == CameraMode.Infrared) { if (blobDetect) { KeyValuePair <BitmapSource, List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > > kvp = frameProc.processBlobs(depthFrameReader, frame, BlobTreshold); List <KeyValuePair <Emgu.CV.Cvb.CvBlob, CameraSpacePoint> > blobs = kvp.Value; if (kvp.Key == null || blobs == null) { return; } camera.Source = kvp.Key; blobCamera.Source = camera.Source; if (blobs.Count > 0) { foreach (var blob in blobs) { BlobX = m2cm(blob.Value.X); BlobY = m2cm(blob.Value.Y); BlobZ = m2cm(blob.Value.Z); var blobEventModel = new BlobEventModel(blob.Value, blob.Key.Area); wsClient.SendData(BlobSerializer.SerializeBlob(blobEventModel)); } } } else { camera.Source = frameProc.processIRFrame(frame); } } } }
/// <summary> /// Performs triangle detection on the infrared stream. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { //check if infrared frame processing is enabled by the user if (!infraredRadioButton.IsChecked ?? false) { return; } using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { var infraredFrameDescription = this.infraredFrameReader.InfraredFrameSource.FrameDescription; using (Mat infraredMat = new Mat(infraredFrameDescription.Height, infraredFrameDescription.Width, DepthType.Cv16U, 1)) { //convert to Emgu.CV.Mat infraredFrame?.CopyFrameDataToIntPtr(infraredMat.DataPointer, infraredFrameDescription.LengthInPixels * 2); //main processing TriangleFromInfrared(infraredMat); //display CvInvoke.Imshow("infrared", infraredMat); } } }
private void _IFReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { // do the work with the frames. // process the frame bool infraredFrameProcessed = false; using (InfraredFrame infraredFrame = args.FrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // verify the frame data and write to the display bitmap if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this._infraredFrameData.Length) && (infraredFrameDescription.Width == this._bitmap.PixelWidth) && (infraredFrameDescription.Height == this._bitmap.PixelHeight)) { // Copy the pixel data from the image to a // temporary array infraredFrame.CopyFrameDataToArray( this._infraredFrameData); infraredFrameProcessed = true; } } // end if(infraredFrame != null) } // endusing if (infraredFrameProcessed == true) { // render to the screen. convertFrameDataToPixels(); renderPixelArray(this._infraredPixels); } }
void irReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (InfraredFrame irFrame = args.FrameReference.AcquireFrame()) { if (irFrame != null) { irFrame.CopyFrameDataToArray(irData); for (int i =0; i < irData.Length; i++) { byte intesity = (byte)(irData[i] >> 8); irDataConverted[i * 4] = intesity; irDataConverted[i * 4+1] = intesity; irDataConverted[i * 4+2] = intesity; irDataConverted[i * 4+3] = 255; } irDataConverted.CopyTo(irBitmap.PixelBuffer); irBitmap.Invalidate(); } } }
void irReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (InfraredFrame irFrame = args.FrameReference.AcquireFrame()) { if (irFrame != null) { irFrame.CopyFrameDataToArray(irData); for (int i = 0; i < irData.Length; i++) { byte intesity = (byte)(irData[i] >> 8); irDataConverted[i * 4] = intesity; irDataConverted[i * 4 + 1] = intesity; irDataConverted[i * 4 + 2] = intesity; irDataConverted[i * 4 + 3] = 255; } irDataConverted.CopyTo(irBitmap.PixelBuffer); irBitmap.Invalidate(); } } }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap var t = this.infraredFrameDescription; var tt = this.infraredBitmap; if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) && (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } } } }
internal MessageInfraredFrameArrived(InfraredFrameArrivedEventArgs infraredFrame) { this.InfraredFrameArgs = infraredFrame; }
void irReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (InfraredFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { FrameDescription desc = frame.FrameDescription; KinectBase.ColorFrameEventArgs irE = new KinectBase.ColorFrameEventArgs(); irE.bytesPerPixel = (int)desc.BytesPerPixel; irE.pixelFormat = PixelFormats.Gray16; irE.height = desc.Height; irE.width = desc.Width; irE.kinectID = kinectID; irE.timeStamp = frame.RelativeTime; irE.isIR = true; //irE.image = new byte[desc.LengthInPixels * sizeof(UInt16)]; irE.image = irImagePool.GetObject(); unsafe { fixed (byte* ptr = irE.image) { frame.CopyFrameDataToIntPtr((IntPtr)ptr, desc.LengthInPixels * sizeof(UInt16)); } } OnColorFrameReceived(irE); } } }
private void DepthFrameReady(object sender, InfraredFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { lock (m_depthlock) { frame.CopyFrameDataToBuffer(512 * 424 * 2, this.depthwrite); IntPtr swap = this.depthread; this.depthread = this.depthwrite; this.depthwrite = swap; } this.FInvalidate = true; } } }
private void infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { var width = frame.FrameDescription.Width; var height = frame.FrameDescription.Height; var infrared = new ushort[width * height]; frame.CopyFrameDataToArray(infrared); var pixels = this.ConvertToByteArray(infrared); InfraredImage.Source = BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, BitmapPalettes.Halftone256, pixels, width * PixelFormats.Bgr32.BitsPerPixel / 8); } } }
void infraredFrameReader_FrameArrived( object sender, InfraredFrameArrivedEventArgs e ) { UpdateInfraredFrame( e ); DrawInfraredFrame(); }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (_needInfrared) { //lowers fps to improve performance if (_frameCount % 2 == 0) { // ColorFrame is IDisposable using (InfraredFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { FrameDescription frameDescription = this.GetFrameDescriptionForMode(BackgroundMode.Infrared); using (KinectBuffer buffer = frame.LockImageBuffer()) { // verify data and write the new color frame data to the display bitmap if (this._source == null || (frameDescription.Width != this._source.PixelWidth) || (frameDescription.Height != this._source.PixelHeight) || this._source.Format != PixelFormats.Gray32Float) { this._source = new WriteableBitmap(frameDescription.Width, frameDescription.Height, 96.0, 96.0, PixelFormats.Gray32Float, null); } this._source.Lock(); this.ProcessInfraredFrameData(buffer.UnderlyingBuffer, buffer.Size); _gaussianSource = this._source; this.Update(); this._source.Unlock(); } } } } _frameCount++; } }
void frameReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // rulesAnalyzer.AnalyseRules(); switch(myState) { case States.freestyle: //rulesAnalyzerFIFO.AnalyseRules(); rulesAnalyzerImproved.AnalyseRules(); freestyleMode.checkPause(); break; case States.individual: if(individualSkills.ready) { individualSkills.analyze(); } break; } }
/// <summary> /// Handles the infrared frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> public void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the infrared frame data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((infraredFrame.FrameDescription.Width * infraredFrame.FrameDescription.Height) == (infraredBuffer.Size / infraredFrame.FrameDescription.BytesPerPixel)) && (infraredFrame.FrameDescription.Width == this.infraredSource.PixelWidth) && (infraredFrame.FrameDescription.Height == this.infraredSource.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.RenderInfraredPixels(); } }
private void DepthFrameReady(object sender, InfraredFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { lock (m_lock) { frame.CopyFrameDataToIntPtr(this.depthwrite, 512 * 424 * 2); IntPtr swap = this.depthread; this.depthread = this.depthwrite; this.depthwrite = swap; } this.FInvalidate = true; this.frameindex = frame.RelativeTime.Ticks; } } }
private void OnInfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // Reference to infrared frame var refer = e.FrameReference; // Get infrared frame var frame = refer.AcquireFrame(); if (frame == null) return; // Process it using (frame) { // Get the description var frameDesc = frame.FrameDescription; if (((frameDesc.Width*frameDesc.Height) != _infraData.Length) || (frameDesc.Width != _infraBitmap.PixelWidth) || (frameDesc.Height != _infraBitmap.PixelHeight)) return; // Copy data frame.CopyFrameDataToArray(_infraData); var colorPixelIndex = 0; foreach (var intensity in _infraData.Select(ir => (byte)(ir))) { // Assign infrared intensity InfraPixels[colorPixelIndex++] = intensity; InfraPixels[colorPixelIndex++] = intensity; InfraPixels[colorPixelIndex++] = intensity; colorPixelIndex++; } Utils.OldBytes = Utils.BufBytes; decimal heartBeat = Utils.ProcessInfraredData(); if (heartBeat == 0) return; // Copy output to bitmap _infraBitmap.WritePixels( new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height), Utils.BufBytes, frameDesc.Width * _bytePerPixel, 0); image1.Source = _infraBitmap; label.Content = heartBeat; } }
private void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (_isStarted && infraredFpsCounter > 0) { if (frame != null) { //infraredSw.Start(); _recordInfraredQueue.Enqueue(new RecordInfraredFrame(frame)); //infraredSw.Stop(); //infraredSum += infraredSw.Elapsed.TotalMilliseconds; //infraredSw.Reset(); TimeCheckInfrared(); infraredFrames++; infraredCounter++; // Console.WriteLine("Infrared Enqueue time = {0}", sw.Elapsed); // System.Diagnostics.Debug.WriteLine("+++ Enqueued Infrared Frame ({0})", _recordQueue.Count); } else { // System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Infrared in KinectRecorder)"); } infraredFpsCounter -= InfraredFramerate; ; } else if (infraredFpsCounter == -50 || infraredFpsCounter == -125) { infraredFpsCounter -= InfraredFramerate; } else { infraredFpsCounter = 100; } } }
private void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (_displayType == FrameTypes.Infrared) { _infraredBitmap.Update(e.FrameReference); } }
void irreader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (this.IRFrameReady != null) { this.IRFrameReady(sender, e); } }
/// <summary> /// Handles the infrared frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { //Process the IR frame from kinect which is more beutiful than the default image is lighter imgEmguFormat = new Image<Bgr, Byte>(new Bitmap(ToBitmap(ToBitmapSource(infraredFrame)))); GaitAnalysisStates(); // the fastest way to process the infrared frame data is to directly access // the underlying buffer //using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) //{ // // verify data and write the new infrared frame data to the display bitmap // if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) && // (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight)) // { // this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); // } //} } } }
void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs args) { using (var frame = args.FrameReference.AcquireFrame()) { if (_isStarted) RecordFrame(frame); } }
private async void _infraredReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs e) { bool shouldRecord = _recorder != null && _recorder.IsStarted && InfraredCheckBox.IsChecked.GetValueOrDefault(); bool shouldDisplay = _displayType == FrameTypes.Infrared; if (shouldRecord || shouldDisplay) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.CopyFrameDataToArray(_infraredData); if (shouldRecord) { _recorder.RecordFrame(frame, _infraredData); } } else { shouldDisplay = false; System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Infrared in MainPage)"); } } if (shouldDisplay) { await _infraredBitmap.UpdateAsync(_infraredData); } } }
private void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (_displayType == FrameTypes.Infrared) { _infraredBitmap.Update(e.FrameReference); } if (_isStarted) infraredFps.Text = _recorder.DisplayInfraredFrames.ToString(); }
private void IR_Reader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args) { using (var irFrame = args.FrameReference.AcquireFrame()) { if (irFrame != null) { irFrame.CopyFrameDataToArray(IR_DataArray); for (var i = 0; i < IR_DataArray.Length; i++) { var intensity = (byte)(IR_DataArray[i] >> 8); IR_ConvertedData[i * 4] = intensity; IR_ConvertedData[i * 4 + 1] = intensity; IR_ConvertedData[i * 4 + 2] = intensity; IR_ConvertedData[i * 4 + 3] = 255; } IR_ConvertedData.CopyTo(IRwBmp.PixelBuffer); IRwBmp.Invalidate(); } } }
void infraredFrameReader_FrameArrived( InfraredFrameReader sender, InfraredFrameArrivedEventArgs args ) { UpdateInfraredFrame( args ); DrawInfraredFrame(); }
/// <summary> /// Called when an infrared fame has arrived from the sensor. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="InfraredFrameArrivedEventArgs"/> instance containing the event data.</param> private void OnReaderInfraredFrame_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { // Notify flowrate occurrence m_performanceAnalyzer.NotifyFlowRateOccurrence( Constants.KINECT_PERF_FLOWRATE_INFRARED_FRAME); using (var frame = e.FrameReference.AcquireFrame()) { if (frame == null) { return; } this.AquiredInfraredFrames = this.AquiredInfraredFrames + 1; // Publich event to whole application m_Messenger.Publish( new MessageInfraredFrameArrived(e)); } }
private void Reader_InfraredFrameArrived(object sender, InfraredFrameArrivedEventArgs e) { bool infraredFrameProcessed = false; // InfraredFrame is IDisposable using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; // verify data and write the new infrared frame data to the display bitmap if (((infraredFrameDescription.Width * infraredFrameDescription.Height) == this.infraredFrameData.Length) && (infraredFrameDescription.Width == this.bitmap.PixelWidth) && (infraredFrameDescription.Height == this.bitmap.PixelHeight)) { // Copy the pixel data from the image to a temporary array infraredFrame.CopyFrameDataToArray(this.infraredFrameData); infraredFrameProcessed = true; } } } // we got a frame, convert and render if (infraredFrameProcessed) { ConvertInfraredDataToPixels(); RenderPixelArray(this.infraredPixels); } }
void _infraredReader_FrameArrived(InfraredFrameReader sender, InfraredFrameArrivedEventArgs args)
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (this.processingInfraredFrame) { return; } this.processingInfraredFrame = true; bool infraredFrameProcessed = false; using (InfraredFrame infraredFrame = e.FrameReference.AcquireFrame()) { if (infraredFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel))) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size, this.infraredFrameDescription.BytesPerPixel); infraredFrameProcessed = true; } } } } if (infraredFrameProcessed) { this.infraredFrameCallback(this.infraredPixels); } this.processingInfraredFrame = false; }
private void _infraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { if (_mode == EVisualization.Infrared) { _infraredBitmap.GetBitmap(e.FrameReference); cameraOne.Source = _infraredBitmap.Bitmap; } }
private void InfraredReader_FrameArrived(object sender, InfraredFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } }