public FaceLandmarks(int4 boundingBox)
 {
     this.boundingBox2D = boundingBox;
     rightEye = new float4(0, 0, 0, -1);
     leftEye = new float4(0, 0, 0, -1);
     noseBridge = new float4(0, 0, 0, -1);
     noseTip = new float4(0, 0, 0, -1);
 }
示例#2
0
 /// <summary>
 /// Returns true iff this equals rhs component-wise.
 /// </summary>
 public static bool Equals(int4 v, int4 rhs) => v.Equals(rhs);
示例#3
0
 /// <summary>
 /// Returns a string representation of this vector using a provided seperator and a format and format provider for each component.
 /// </summary>
 public static string ToString(int4 v, string sep, string format, IFormatProvider provider) => v.ToString(sep, format, provider);
示例#4
0
 /// <summary>
 /// Returns a string representation of this vector using a provided seperator.
 /// </summary>
 public static string ToString(int4 v, string sep) => v.ToString(sep);
示例#5
0
 /// <summary>
 /// Returns a int4 with independent and identically distributed uniform integer values between minValue (inclusive) and maxValue (exclusive). (minValue == maxValue is allowed and returns minValue. Negative values are allowed.)
 /// </summary>
 public static int4 RandomUniform(Random random, int4 minValue, int4 maxValue) => int4.RandomUniform(random, minValue, maxValue);
示例#6
0
 /// <summary>
 /// Returns a vector pointing in the same direction as another (faceforward orients a vector to point away from a surface as defined by its normal. If dot(Nref, I) is negative faceforward returns N, otherwise it returns -N).
 /// </summary>
 public static int4 FaceForward(int4 N, int4 I, int4 Nref) => int4.FaceForward(N, I, Nref);
示例#7
0
 /// <summary>
 /// Calculate the reflection direction for an incident vector (N should be normalized in order to achieve the desired result).
 /// </summary>
 public static int4 Reflect(int4 I, int4 N) => int4.Reflect(I, N);
        public void Process(BaseCameraApplication capture)
        {
            const double ScaleFactor = 1.0850;
            DepthCameraFrame depthFrame = capture.GetPrimaryDevice().GetDepthImage();
            ColorCameraFrame rgbFrame = capture.GetPrimaryDevice().GetColorImage();
            TextureMapFrame uvFrame = capture.GetPrimaryDevice().GetTextureImage();
            kernelCopyIRImage.Execute(new CLCalc.Program.MemoryObject[] { depthFrame.GetMemoryObject(), irImageBuffer }, width * height);
            CLCalc.Program.CommQueues[CLCalc.Program.DefaultCQ].Read<byte>(((ComputeBuffer<byte>)irImageBuffer.VarPointer), true, 0, width * height, gray.ImageData, null);
            storage.Clear();
            //Use OpenCV for face tracking in IR image. SDK has its own face tracker, but it only operates in RGB. Either could be used for this example.
            CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(gray, faceCascade, storage, ScaleFactor, 2, 0, new CvSize(40, 40));
            if (faces.Total > 0)
            {
                CvRect face = faces[0].Value.Rect;
                Cv.SetImageROI(gray, face);
                Cv.SetImageROI(dilate, face);
                Cv.SetImageROI(erode, face);
                Cv.SetImageROI(tmp, face);
                //Filter the image to enhance contrast between eyes/face.
                Cv.Dilate(gray, tmp);
                Cv.Dilate(tmp, dilate);
                Cv.Threshold(gray, tmp, 0, 1, ThresholdType.Binary);
                Cv.Erode(gray, erode);
                Cv.Sub(gray, erode, gray);
                Cv.Mul(gray, tmp, gray);
                Cv.SetImageROI(mask, face);
                Cv.SetImageROI(imgLabel, face);
                //Threshold out peaks.
                Cv.Threshold(gray, mask, 128, 255, ThresholdType.Binary);
                blobs.Clear();
                uint result = blobs.Label(mask, imgLabel);
                double minDistLeft = 1E10;
                double minDistRight = 1E10;
                int xCenter = face.Width / 2;
                int yCenter = (int)((face.Height) * 0.35);
                CvPoint center = new CvPoint(xCenter, yCenter);
                CvPoint right = new CvPoint(-1, -1);
                CvPoint left = new CvPoint(-1, -1);

                //Assign blobs to eyes.
                foreach (KeyValuePair<uint, CvBlob> item in blobs)
                {
                    CvBlob b = item.Value;
                    double d = CvPoint.Distance(b.Centroid, center);
                    if (b.Centroid.X < center.X)
                    {
                        if (d < minDistLeft)
                        {
                            minDistLeft = d;
                            right = b.Centroid;
                        }
                    }
                    else
                    {
                        if (d < minDistRight)
                        {
                            minDistRight = d;
                            left = b.Centroid;
                        }
                    }
                }
                if (right.X >= 0 && left.X >= 0)
                {
                    rightEye2D = new int2(right.X + face.X, right.Y + face.Y);
                    leftEye2D = new int2(left.X + face.X, left.Y + face.Y);
                    boundingBox2D = new int4(face.X, face.Y, face.Width, face.Height);
                    //Find bridge and nose. This was done in opencl to leverage read_imagef.
                    kernelFindFaceLandmarks.Execute(new CLCalc.Program.MemoryObject[] { rightEye2D, leftEye2D, boundingBox2D, faceDetectionBuffer, filter.GetDepthImage() }, 1);
                    ReadFaceLandmarksFromBuffer();
                    foundFace = true;
                }
                else
                {
                    foundFace = false;
                }
                Cv.ResetImageROI(gray);
                Cv.ResetImageROI(erode);
                Cv.ResetImageROI(dilate);
                Cv.ResetImageROI(tmp);
            }
            else
            {
                foundFace = false;
                WriteFaceLandmarksToBuffer();
            }
        }
示例#9
0
 /// <summary>
 /// Returns the minimal component of this vector.
 /// </summary>
 public static int MinElement(int4 v) => v.MinElement;
示例#10
0
 /// <summary>
 /// Returns an array with all values
 /// </summary>
 public static int[] Values(int4 v) => v.Values;
示例#11
0
 /// <summary>
 /// Returns a int4 from component-wise application of RightShift (lhs &gt;&gt; rhs).
 /// </summary>
 public static int4 RightShift(int4 lhs, int4 rhs) => int4.RightShift(lhs, rhs);
示例#12
0
 /// <summary>
 /// Returns a int4 from component-wise application of LeftShift (lhs &lt;&lt; rhs).
 /// </summary>
 public static int4 LeftShift(int4 lhs, int4 rhs) => int4.LeftShift(lhs, rhs);
示例#13
0
	public static extern int mono_return_int4 (int4 s, int addend);
示例#14
0
 /// <summary>
 /// Returns a hash code for this instance.
 /// </summary>
 public static int GetHashCode(int4 v) => v.GetHashCode();
示例#15
0
 /// <summary>
 /// Returns the maximal component of this vector.
 /// </summary>
 public static int MaxElement(int4 v) => v.MaxElement;
示例#16
0
 /// <summary>
 /// Returns a bool4 from component-wise application of NotEqual (lhs != rhs).
 /// </summary>
 public static bool4 NotEqual(int4 lhs, int4 rhs) => int4.NotEqual(lhs, rhs);
示例#17
0
 /// <summary>
 /// Returns the euclidean length of this vector.
 /// </summary>
 public static float Length(int4 v) => v.Length;
示例#18
0
 /// <summary>
 /// Returns the squared euclidean distance between the two vectors.
 /// </summary>
 public static float DistanceSqr(int4 lhs, int4 rhs) => int4.DistanceSqr(lhs, rhs);
示例#19
0
 /// <summary>
 /// Returns the squared euclidean length of this vector.
 /// </summary>
 public static float LengthSqr(int4 v) => v.LengthSqr;
示例#20
0
 /// <summary>
 /// Calculate the refraction direction for an incident vector (The input parameters I and N should be normalized in order to achieve the desired result).
 /// </summary>
 public static int4 Refract(int4 I, int4 N, int eta) => int4.Refract(I, N, eta);
示例#21
0
 /// <summary>
 /// Returns the sum of all components.
 /// </summary>
 public static int Sum(int4 v) => v.Sum;
示例#22
0
 /// <summary>
 /// Returns a int4 with independent and identically distributed uniform integer values between 0 (inclusive) and maxValue (exclusive). (A maxValue of 0 is allowed and returns 0.)
 /// </summary>
 public static int4 Random(Random random, int4 maxValue) => int4.Random(random, maxValue);
示例#23
0
 /// <summary>
 /// Returns the one-norm of this vector.
 /// </summary>
 public static float Norm1(int4 v) => v.Norm1;
示例#24
0
 /// <summary>
 /// Returns a string representation of this vector using ', ' as a seperator.
 /// </summary>
 public static string ToString(int4 v) => v.ToString();
示例#25
0
 /// <summary>
 /// Returns the two-norm (euclidean length) of this vector.
 /// </summary>
 public static float Norm2(int4 v) => v.Norm2;
示例#26
0
 /// <summary>
 /// Returns a string representation of this vector using a provided seperator and a format for each component.
 /// </summary>
 public static string ToString(int4 v, string sep, string format) => v.ToString(sep, format);
示例#27
0
 /// <summary>
 /// Returns the max-norm of this vector.
 /// </summary>
 public static float NormMax(int4 v) => v.NormMax;
示例#28
0
 /// <summary>
 /// Returns the number of components (4).
 /// </summary>
 public static int Count(int4 v) => v.Count;
示例#29
0
 /// <summary>
 /// Returns the p-norm of this vector.
 /// </summary>
 public static double NormP(int4 v, double p) => v.NormP(p);
示例#30
0
 /// <summary>
 /// Returns true iff this equals rhs type- and component-wise.
 /// </summary>
 public static bool Equals(int4 v, object obj) => v.Equals(obj);
示例#31
0
 /// <summary>
 /// Returns an enumerator that iterates through all components.
 /// </summary>
 public static IEnumerator <int> GetEnumerator(int4 v) => v.GetEnumerator();
示例#32
0
 /// <summary>
 /// Returns a bool4 from component-wise application of Equal (lhs == rhs).
 /// </summary>
 public static bool4 Equal(int4 lhs, int4 rhs) => int4.Equal(lhs, rhs);
示例#33
0
 /// <summary>
 /// Returns the inner product (dot product, scalar product) of the two vectors.
 /// </summary>
 public static int Dot(int4 lhs, int4 rhs) => int4.Dot(lhs, rhs);
示例#34
0
 public static extern CUResult cuMemcpyDtoH_v2(ref int4 dstHost, CUdeviceptr srcDevice, SizeT ByteCount);
示例#35
0
        public byte[] FilterGPU(byte[] bgra, ushort[] depth, DepthSpacePoint[] depthSpaceData,
            int nearThresh, int farThresh, int haloSize)
        {
            if (computeShader == null)
            {
                return new byte[0];
            }

            // Initialize last frame with current color frame, if it was reset
            if (bLastFrameReset)
            {
                lastFramePixels = bgra;
                bLastFrameReset = false;
            }

            // -- Create halo array --

            List<int2> halo = new List<int2>();

            int s = haloSize;
            int xd = s;
            int yd = s / 2;
            int S = (xd + yd) / 2;
            int x0 = -xd;
            int x1 = +xd;
            int y0 = -yd;
            int y1 = +yd;
            int actualHaloSize = 0;
            for (int y = y0; y < y1; ++y)
            {
                for (int x = x0; x < x1; ++x)
                {
                    if (Math.Abs(x) + Math.Abs(y) <= S)
                    {
                        halo.Add(new int2(x, y));
                        ++actualHaloSize;
                    }
                }
            }

            // --

            // -- Perform data transformations so the arrays can be passed to the GPU --

            var bgraDataTransformed = new int4[1920 * 1080];
            for (int i = 0, j = 0; i < bgra.Length; i += 4, ++j)
            {
                bgraDataTransformed[j] = new int4(bgra[i], bgra[i + 1], bgra[i + 2], bgra[i + 3]);
            }

            var lastFrameDataTransformed = new int4[1920 * 1080];
            for (int i = 0, j = 0; i < bgra.Length; i += 4, ++j)
            {
                lastFrameDataTransformed[j] = new int4(lastFramePixels[i], lastFramePixels[i + 1], lastFramePixels[i + 2], lastFramePixels[i + 3]);
            }

            // --

            //var sw = Stopwatch.StartNew();

            // Create a constant buffer to pass the filter configuration
            var cbuffer = GPGPUHelper.CreateConstantBuffer(device, new int[] { nearThresh, farThresh, haloSize });

            // -- Create GPULists using the immediate context and pass the data --

            GPUList<int4> bgraData = new GPUList<int4>(device.ImmediateContext);
            bgraData.AddRange(bgraDataTransformed);

            GPUList<uint> depthData = new GPUList<uint>(device.ImmediateContext);
            depthData.AddRange(depth.Select(d => (uint)d));

            GPUList<DepthSpacePoint> depthSpacePointData = new GPUList<DepthSpacePoint>(device.ImmediateContext, depthSpaceData);
            //depthSpacePointData.AddRange(depthSpaceData.Select(dsp => {

            //    if (dsp.X == float.NegativeInfinity || dsp.Y == -float.NegativeInfinity)
            //    {
            //        return new DepthSpacePoint() { X = -1, Y = -1 };
            //    }
            //    else
            //    {
            //        return dsp;
            //    }
            //}));

            GPUList<int4> lastFrameData = new GPUList<int4>(device.ImmediateContext);
            lastFrameData.AddRange(lastFrameDataTransformed);

            var resultArray = new int4[1920 * 1080];
            GPUList<int4> resultData = new GPUList<int4>(device.ImmediateContext, resultArray);

            GPUList<int2> haloData = new GPUList<int2>(device.ImmediateContext, halo);

            // --

            var sw = Stopwatch.StartNew();

            // Set the buffers and uavs
            device.ImmediateContext.ComputeShader.Set(computeShader);
            device.ImmediateContext.ComputeShader.SetConstantBuffer(cbuffer, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(bgraData.UnorderedAccess, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(depthData.UnorderedAccess, 1);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(depthSpacePointData.UnorderedAccess, 2);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(lastFrameData.UnorderedAccess, 3);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(resultData.UnorderedAccess, 4);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(haloData.UnorderedAccess, 5);

            // Run the compute shader
            device.ImmediateContext.Dispatch(1920 * 1080 / 256, 1, 1);

            // Get result. This call blocks, until the result was calculated
            // because the MapSubresource call waits.
            var result = resultData.ToArray();

            sw.Stop();

            // -- Clean up --

            device.ImmediateContext.ComputeShader.SetConstantBuffer(null, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 1);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 2);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 3);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 4);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 5);

            cbuffer.Dispose();
            bgraData.Dispose();
            depthData.Dispose();
            depthSpacePointData.Dispose();
            lastFrameData.Dispose();
            resultData.Dispose();
            haloData.Dispose();

            // --

            Debug.WriteLine($"Filtering took {sw.ElapsedMilliseconds} ms");

            var resultBytes = new byte[1920 * 1080 * 4];

            for (int i = 0, j = 0; i < resultBytes.Length; i += 4, ++j)
            {
                resultBytes[i] = (byte)result[j].x;
                resultBytes[i+1] = (byte)result[j].y;
                resultBytes[i+2] = (byte)result[j].z;
                resultBytes[i+3] = (byte)result[j].a;
            }

            lastFramePixels = resultBytes;

            return resultBytes;
        }