public void Initialize(BaseCameraApplication capture) { DepthCameraFrame frame = capture.GetDevices()[0].GetDepthImage(); try { StreamReader reader = new StreamReader(new MemoryStream(Perceptual.Foundation.Properties.Resources.AdaptiveTemporalFilter)); string text = reader.ReadToEnd(); CLCalc.Program.Compile(capture.GetPrimaryDevice().GetPreprocessCode() + "\n#define HISTORY_SIZE 0\n" + text); reader.Close(); } catch (Exception ex) { System.Console.WriteLine(ex.Message); System.Console.WriteLine("Could not find DoNothingFilter.cl"); Environment.Exit(1); } updateBuffer = new CLCalc.Program.Kernel("UpdateFilter"); copyToTemporalBuffer = new CLCalc.Program.Kernel("CopyToTemporalBuffer"); depthBuffer = CLCalc.Program.Variable.Create(new ComputeBuffer<float>(CLCalc.Program.Context, ComputeMemoryFlags.ReadWrite, 4 * frame.Width * frame.Height)); depthCopyBuffer = new CLCalc.Program.Variable(new float[4 * frame.Width * frame.Height]); depthImage = new CLCalc.Program.Image2D(new float[frame.Height * frame.Width * 4], frame.Width, frame.Height); uvImage = new CLCalc.Program.Image2D(new float[frame.Height * frame.Width * 4], frame.Width, frame.Height); kernelCopyImage = new CLCalc.Program.Kernel("CopyImage"); }
public void Initialize(BaseCameraApplication app) { try { CLCalc.Program.Compile(app.GetPrimaryDevice().GetPreprocessCode() + src); } catch (BuildProgramFailureComputeException ex) { System.Console.WriteLine(ex.Message); Environment.Exit(1); } int width = app.GetPrimaryDevice().GetDepthImage().Width; int height = app.GetPrimaryDevice().GetDepthImage().Height; pointBuffer = CLCalc.Program.Variable.Create(new ComputeBuffer<float>(CLCalc.Program.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.CopyHostPointer, points = new float[width * height * 4])); colorBuffer = CLCalc.Program.Variable.Create(new ComputeBuffer<float>(CLCalc.Program.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.CopyHostPointer, colors = new float[width * height * 4])); kernelCopyImage = new CLCalc.Program.Kernel("CopyToPoinCloud"); }
public void Process(BaseCameraApplication capture) { DepthCameraFrame depthFrame = capture.GetDevices()[0].GetDepthImage(); TextureMapFrame textureFrame = capture.GetDevices()[0].GetTextureImage(); this.rgbImage = (CLCalc.Program.Image2D)capture.GetDevices()[0].GetColorImage().GetMemoryObject(); kernelCopyImage.Execute(new CLCalc.Program.MemoryObject[] { depthFrame.GetMemoryObject(), textureFrame.GetMemoryObject(), uvImage, depthImage }, new int[] { depthFrame.Width, depthFrame.Height }); }
public void Process(BaseCameraApplication capture) { GL.BindTexture(TextureTarget.Texture2D, textTextureId); bool dirty = false; using (Graphics gfx = Graphics.FromImage(textImage)) { gfx.Clear(Color.Transparent); foreach (Message message in Messages) { if (message.isDirty()) { dirty = true; } message.Draw(gfx); } } if (dirty) { BitmapData data = textImage.LockBits(new Rectangle(0, 0, textImage.Width, textImage.Height), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, textImage.Width, textImage.Height, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0); textImage.UnlockBits(data); } GL.BindTexture(TextureTarget.Texture2D, 0); }
public virtual void Initialize(BaseCameraApplication capture, GLAdvancedRender glw) { // Create Bitmap and OpenGL texture textImage = new Bitmap(glw.GLCtrl.Width, glw.GLCtrl.Height); // match window size textTextureId = GL.GenTexture(); GL.BindTexture(TextureTarget.Texture2D, textTextureId); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Linear); GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Linear); using (Graphics gfx = Graphics.FromImage(textImage)) { gfx.Clear(Color.Transparent); } BitmapData data = textImage.LockBits(new Rectangle(0, 0, textImage.Width, textImage.Height), ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, textImage.Width, textImage.Height, 0, OpenTK.Graphics.OpenGL.PixelFormat.Bgra, PixelType.UnsignedByte, data.Scan0); textImage.UnlockBits(data); GL.BindTexture(TextureTarget.Texture2D, 0); }
public void Process(BaseCameraApplication capture) { const double ScaleFactor = 1.0850; DepthCameraFrame depthFrame = capture.GetPrimaryDevice().GetDepthImage(); ColorCameraFrame rgbFrame = capture.GetPrimaryDevice().GetColorImage(); TextureMapFrame uvFrame = capture.GetPrimaryDevice().GetTextureImage(); kernelCopyIRImage.Execute(new CLCalc.Program.MemoryObject[] { depthFrame.GetMemoryObject(), irImageBuffer }, width * height); CLCalc.Program.CommQueues[CLCalc.Program.DefaultCQ].Read<byte>(((ComputeBuffer<byte>)irImageBuffer.VarPointer), true, 0, width * height, gray.ImageData, null); storage.Clear(); //Use OpenCV for face tracking in IR image. SDK has its own face tracker, but it only operates in RGB. Either could be used for this example. CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(gray, faceCascade, storage, ScaleFactor, 2, 0, new CvSize(40, 40)); if (faces.Total > 0) { CvRect face = faces[0].Value.Rect; Cv.SetImageROI(gray, face); Cv.SetImageROI(dilate, face); Cv.SetImageROI(erode, face); Cv.SetImageROI(tmp, face); //Filter the image to enhance contrast between eyes/face. Cv.Dilate(gray, tmp); Cv.Dilate(tmp, dilate); Cv.Threshold(gray, tmp, 0, 1, ThresholdType.Binary); Cv.Erode(gray, erode); Cv.Sub(gray, erode, gray); Cv.Mul(gray, tmp, gray); Cv.SetImageROI(mask, face); Cv.SetImageROI(imgLabel, face); //Threshold out peaks. Cv.Threshold(gray, mask, 128, 255, ThresholdType.Binary); blobs.Clear(); uint result = blobs.Label(mask, imgLabel); double minDistLeft = 1E10; double minDistRight = 1E10; int xCenter = face.Width / 2; int yCenter = (int)((face.Height) * 0.35); CvPoint center = new CvPoint(xCenter, yCenter); CvPoint right = new CvPoint(-1, -1); CvPoint left = new CvPoint(-1, -1); //Assign blobs to eyes. foreach (KeyValuePair<uint, CvBlob> item in blobs) { CvBlob b = item.Value; double d = CvPoint.Distance(b.Centroid, center); if (b.Centroid.X < center.X) { if (d < minDistLeft) { minDistLeft = d; right = b.Centroid; } } else { if (d < minDistRight) { minDistRight = d; left = b.Centroid; } } } if (right.X >= 0 && left.X >= 0) { rightEye2D = new int2(right.X + face.X, right.Y + face.Y); leftEye2D = new int2(left.X + face.X, left.Y + face.Y); boundingBox2D = new int4(face.X, face.Y, face.Width, face.Height); //Find bridge and nose. This was done in opencl to leverage read_imagef. kernelFindFaceLandmarks.Execute(new CLCalc.Program.MemoryObject[] { rightEye2D, leftEye2D, boundingBox2D, faceDetectionBuffer, filter.GetDepthImage() }, 1); ReadFaceLandmarksFromBuffer(); foundFace = true; } else { foundFace = false; } Cv.ResetImageROI(gray); Cv.ResetImageROI(erode); Cv.ResetImageROI(dilate); Cv.ResetImageROI(tmp); } else { foundFace = false; WriteFaceLandmarksToBuffer(); } }
public void Initialize(BaseCameraApplication capture, GLAdvancedRender glw) { try { CLCalc.Program.Compile(capture.GetPrimaryDevice().GetPreprocessCode() + src); } catch (BuildProgramFailureComputeException ex) { System.Console.WriteLine(ex.Message); Environment.Exit(1); } DepthCameraFrame frame = capture.GetDevices()[0].GetDepthImage(); kernelCopyBmp = new CLCalc.Program.Kernel("CopyImageToPointCloud"); int size = frame.Width * frame.Height; bufs = new int[4]; ColorData = new float[4 * size]; PositionData = new float[4 * size]; GL.GenBuffers(2, bufs); GL.BindBuffer(BufferTarget.ArrayBuffer, bufs[0]); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(ColorData.Length * sizeof(float)), ColorData, BufferUsageHint.StreamDraw); GL.BindBuffer(BufferTarget.ArrayBuffer, bufs[1]); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(PositionData.Length * sizeof(float)), PositionData, BufferUsageHint.StreamDraw);//Notice STREAM DRAW GL.Enable(EnableCap.PointSmooth); GL.PointSize(4.0f); positions = new CLCalc.Program.Variable(bufs[1], typeof(float)); colors = new CLCalc.Program.Variable(bufs[0], typeof(float)); }
public void Initialize(BaseCameraApplication app, OpenTKWrapper.CLGLInterop.GLAdvancedRender glw) { }
public virtual void Initialize(BaseCameraApplication app, GLAdvancedRender glw) { initScreenWidth = glw.GLCtrl.Width; initScreenHeight = glw.GLCtrl.Height; this.glw = glw; }
public void Process(BaseCameraApplication app) { if (Visible || WireFrame) { DepthCameraFrame depthFrame = app.GetPrimaryDevice().GetDepthImage(); ColorCameraFrame colorFrame = app.GetPrimaryDevice().GetColorImage(); TextureMapFrame textureFrame = app.GetPrimaryDevice().GetTextureImage(); CameraDataFilter filter = (CameraDataFilter)app.GetImageFilter(); CLGLInteropFunctions.AcquireGLElements(new CLCalc.Program.MemoryObject[] { positionBuffer, colorBuffer, normalBuffer }); CLCalc.Program.MemoryObject[] args = new CLCalc.Program.MemoryObject[] { app.GetPrimaryDevice().GetBoundingBox(),filter.GetDepthImage(),filter.GetTextureImage(),colorFrame.GetMemoryObject(),positionBuffer,colorBuffer,normalBuffer}; kernelCopyImage.Execute(args, new int[] { depthFrame.Width, depthFrame.Height }); CLGLInteropFunctions.ReleaseGLElements(new CLCalc.Program.MemoryObject[] { positionBuffer, colorBuffer, normalBuffer }); } }
public void Initialize(BaseCameraApplication app, OpenTKWrapper.CLGLInterop.GLAdvancedRender glw) { try { CLCalc.Program.Compile(app.GetPrimaryDevice().GetPreprocessCode() + src); } catch (BuildProgramFailureComputeException ex) { System.Console.WriteLine(ex.Message); Environment.Exit(1); } kernelCopyImage = new CLCalc.Program.Kernel("CopyImageToMesh"); BoundingBox bbox = app.GetPrimaryDevice().GetBoundingBox(); int w = app.GetPrimaryDevice().GetDepthImage().Width; int h = app.GetPrimaryDevice().GetDepthImage().Height; int size = w * h; ColorData = new float[16 * size]; PositionData = new float[16 * size]; NormalData = new float[12 * size]; for (int i = 0; i < size; i++) { PositionData[4 * i] = (i / w) - w / 2; PositionData[4 * i + 2] = i % w - h / 2; PositionData[4 * i + 1] = i % 7; PositionData[4 * i + 3] = 1.0f; } GL.GenBuffers(3, QuadMeshBufs); GL.BindBuffer(BufferTarget.ArrayBuffer, QuadMeshBufs[0]); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(ColorData.Length * sizeof(float)), ColorData, BufferUsageHint.StreamDraw); GL.BindBuffer(BufferTarget.ArrayBuffer, QuadMeshBufs[1]); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(PositionData.Length * sizeof(float)), PositionData, BufferUsageHint.StreamDraw);//Notice STREAM DRAW GL.BindBuffer(BufferTarget.ArrayBuffer, QuadMeshBufs[2]); GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(NormalData.Length * sizeof(float)), NormalData, BufferUsageHint.StreamDraw);//Notice STREAM DRAW colorBuffer = new CLCalc.Program.Variable(QuadMeshBufs[0], typeof(float)); positionBuffer = new CLCalc.Program.Variable(QuadMeshBufs[1], typeof(float)); normalBuffer = new CLCalc.Program.Variable(QuadMeshBufs[2], typeof(float)); GL.BindBuffer(BufferTarget.ArrayBuffer, 0); GL.Enable(EnableCap.Blend); }
public void Initialize(BaseCameraApplication capture, GLAdvancedRender glw) { }
public void Process(BaseCameraApplication app) { this.GroundPlane = app.GetPrimaryDevice().GetGroundPlane(); }
public void Initialize(BaseCameraApplication app, GLAdvancedRender glw) { minDepth = app.GetPrimaryDevice().GetMinDepth(); maxDepth = app.GetPrimaryDevice().GetMaxDepth(); bbox = app.GetPrimaryDevice().GetBoundingBox(); screenWidth = glw.GLCtrl.Width; screenHeight = glw.GLCtrl.Height; }
public void Process(BaseCameraApplication app) { CameraDataFilter filter = app.GetImageFilter(); kernelCopyImage.Execute(new CLCalc.Program.MemoryObject[] { app.GetPrimaryDevice().GetDepthImage().GetMemoryObject(), filter.GetTextureImage(), filter.GetColorImage(), pointBuffer, colorBuffer}, new int[] { filter.GetDepthImage().Width, filter.GetDepthImage().Height }); pointBuffer.ReadFromDeviceTo(points); int vertexCount = points.Length / 4; /* for (int i = 0; i < points.Length; i += 4) { float4 pt = new float4(points[i], points[i + 1], points[i + 2], points[i + 3]); if (pt.z>200.0&&pt.z < 2000.0f&&pt.w>100.0f) { vertexCount++; } } * */ colorBuffer.ReadFromDeviceTo(colors); string path = outputDir + "pointcloud" + counter.ToString("0000") + ".xyz"; MeshReaderWriter writer = new MeshReaderWriter(vertexCount, 0, path); float minDepth = app.GetPrimaryDevice().GetMinDepth(); float maxDepth = app.GetPrimaryDevice().GetMaxDepth(); for (int i = 0; i < points.Length; i += 4) { float4 pt = new float4(points[i], points[i + 1], points[i + 2], points[i + 3]); float4 rgb = new float4(colors[i], colors[i + 1], colors[i + 2], colors[i + 3]); if (pt.z > minDepth && pt.z < maxDepth && pt.w > 100.0f) { writer.AddPoint(pt, rgb); } else { writer.AddPoint(new float4(), new float4()); } } writer.Close(); counter++; }
public void Process(BaseCameraApplication capture) { DepthCameraFrame depthFrame = capture.GetDevices()[0].GetDepthImage(); TextureMapFrame textureFrame = capture.GetDevices()[0].GetTextureImage(); CLCalc.Program.MemoryObject input = depthFrame.GetMemoryObject(); CLCalc.Program.MemoryObject output = depthBuffer; CLCalc.Program.MemoryObject tmp; this.rgbImage = (CLCalc.Program.Image2D)capture.GetDevices()[0].GetColorImage().GetMemoryObject(); kernelCopyBuffer.Execute(new CLCalc.Program.MemoryObject[] { input, depthCopyBuffer }, new int[] { depthFrame.Width * depthFrame.Height }); for (int cycle = 0; cycle < smoothIterations; cycle++) { kernelMedianFilter1.Execute(new CLCalc.Program.MemoryObject[] { input, output }, new int[] { depthFrame.Width, depthFrame.Height }); tmp = input; input = output; output = tmp; } if (input != depthFrame.GetMemoryObject()) { System.Console.WriteLine("Wrong Buffer!"); Environment.Exit(1); } input = depthCopyBuffer; output = depthBuffer; for (int cycle = 0; cycle < smoothIterations2; cycle++) { kernelMedianFilter2.Execute(new CLCalc.Program.MemoryObject[] { input, output }, new int[] { depthFrame.Width, depthFrame.Height }); tmp = input; input = output; output = tmp; } for (int cycle = 0; cycle < erodeIterations; cycle++) { kernelErodeFilter.Execute(new CLCalc.Program.MemoryObject[] { input, output }, new int[] { depthFrame.Width, depthFrame.Height }); tmp = input; input = output; output = tmp; } for (int cycle = 0; cycle < dilateIterations; cycle++) { kernelDilateFilter.Execute(new CLCalc.Program.MemoryObject[] { input, output }, new int[] { depthFrame.Width, depthFrame.Height }); tmp = input; input = output; output = tmp; } if (input != depthCopyBuffer) { System.Console.WriteLine("Wrong Buffer!"); Environment.Exit(1); } if (once) { copyToTemporalBuffer.Execute(new CLCalc.Program.MemoryObject[] {historyIndex, depthCopyBuffer, depthFrame.GetMemoryObject(), depthTemporalBuffer }, new int[] { depthFrame.Width * depthFrame.Height }); } else { updateBuffer.Execute(new CLCalc.Program.MemoryObject[] { historyIndex,depthCopyBuffer, depthFrame.GetMemoryObject(), depthTemporalBuffer }, new int[] { depthFrame.Width * depthFrame.Height }); } historyIndex.value--; if (historyIndex.value < 0) { historyIndex.value = historySize - 1; once = false; } kernelCopyImage.Execute(new CLCalc.Program.MemoryObject[] { depthFrame.GetMemoryObject(), textureFrame.GetMemoryObject(), uvImage, depthImage }, new int[] { depthFrame.Width, depthFrame.Height }); }
public override void Initialize(BaseCameraApplication app, GLAdvancedRender glw) { base.Initialize(app, glw); Initialize(); AppendColorMixer(this); }
public virtual void Process(BaseCameraApplication app) { }
public void Initialize(BaseCameraApplication capture) { DepthCameraFrame depthImage = capture.GetPrimaryDevice().GetDepthImage(); this.width = depthImage.Width; this.height = depthImage.Height; this.filter = ((AdaptiveTemporalFilter)capture.GetImageFilter()); CvSize sz = new CvSize(depthImage.Width, depthImage.Height); gray = new IplImage(sz, BitDepth.U8, 1); erode = new IplImage(sz, BitDepth.U8, 1); dilate = new IplImage(sz, BitDepth.U8, 1); tmp = new IplImage(sz, BitDepth.U8, 1); mask = new IplImage(sz, BitDepth.U8, 1); imgLabel = new IplImage(sz, BitDepth.F32, 1); faceDetectionBuffer = CLCalc.Program.Variable.Create(new ComputeBuffer<FaceLandmarks>(CLCalc.Program.Context, ComputeMemoryFlags.ReadWrite, 1)); try { CLCalc.Program.Compile(capture.GetPrimaryDevice().GetPreprocessCode() + src); } catch (BuildProgramFailureComputeException ex) { System.Console.WriteLine(ex.Message); Environment.Exit(1); } irImageBuffer = CLCalc.Program.Variable.Create(new ComputeBuffer<byte>(CLCalc.Program.Context, ComputeMemoryFlags.ReadWrite | ComputeMemoryFlags.CopyHostPointer, ir = new byte[width * height])); kernelCopyIRImage = new CLCalc.Program.Kernel("CopyIRImage"); kernelFindFaceLandmarks = new CLCalc.Program.Kernel("FindFaceLandmarks"); }
public void Process(BaseCameraApplication app) { bbox = app.GetPrimaryDevice().GetBoundingBox(); GroundPlane = app.GetPrimaryDevice().GetGroundPlane(); GroundPlane.Invert(); }
public void Process(BaseCameraApplication capture) { if (Visisble) { DepthCameraFrame depthFrame = capture.GetPrimaryDevice().GetDepthImage(); ColorCameraFrame colorFrame = capture.GetPrimaryDevice().GetColorImage(); TextureMapFrame textureFrame = capture.GetPrimaryDevice().GetTextureImage(); if (depthFrame != null && colorFrame != null) { CLCalc.Program.MemoryObject[] args = new CLCalc.Program.MemoryObject[] { depthFrame.GetMemoryObject(), textureFrame.GetMemoryObject(), colorFrame.GetMemoryObject(), positions, colors }; CLGLInteropFunctions.AcquireGLElements(args); kernelCopyBmp.Execute(args, new int[] { depthFrame.Width, depthFrame.Height }); CLGLInteropFunctions.ReleaseGLElements(args); } } }