示例#1
0
    internal void ProcessFrames(FrameSet frames, FrameSource src, FramesReleaser releaser, Action <Frame> handleFrame, Action <FrameSet> handleFrameSet)
    {
        var pbs = _processingBlocks.OrderBy(i => i.Order).Where(i => i.Enabled).ToList();

        foreach (var vpb in pbs)
        {
            FrameSet processedFrames = frames;
            switch (vpb.ProcessingType)
            {
            case ProcessingBlockType.Single:
                processedFrames = HandleSingleFrameProcessingBlocks(frames, src, releaser, vpb, handleFrame);
                break;

            case ProcessingBlockType.Multi:
                processedFrames = HandleMultiFramesProcessingBlocks(frames, src, releaser, vpb, handleFrameSet);
                break;
            }
            frames = processedFrames;
        }

        handleFrameSet(frames);
        foreach (var fr in frames)
        {
            using (fr)
                handleFrame(fr);
        }
    }
示例#2
0
    private Frame ApplyFilter(Frame frame, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock vpb, Action <Frame> handleFrame)
    {
        if (!vpb.CanProcess(frame))
        {
            return(frame);
        }

        // run the processing block.
        var processedFrame = vpb.Process(frame, frameSource, framesReleaser);

        // incase fork is requested, notify on new frame and use the original frame for the new frameset.
        if (vpb.Fork())
        {
            handleFrame(processedFrame);
            processedFrame.Dispose();
            return(frame);
        }

        // avoid disposing the frame incase the filter returns the original frame.
        if (processedFrame == frame)
        {
            return(frame);
        }

        // replace the current frame with the processed one to be used as the input to the next iteration (next filter)
        frame.Dispose();
        return(processedFrame);
    }
示例#3
0
        public void SuperResolutionTest()
        {
            Mat frame  = new Mat(); // input video frame
            Mat result = new Mat(); // output superresolution image

            //FrameSource _frameSource = new FrameSource(0); // input frames are obtained from WebCam or USB Camera
            FrameSource _frameSource = new FrameSource(@"C:\Users\Samantha\Documents\University\Fourth Year\METR4810\Data\Backlight\vi_0004_20180430_015632.mp4", false); // input frames are read from a file

            _frameSource.NextFrame(frame);                                                                                                                                // input frames are obtained from WebCam or USB Camera

            try
            {
                for (int i = 0; i < 5; i++)
                {
                    frame.Save(@"C:\Users\Samantha\Documents\University\Fourth Year\METR4810\Data\SuperresTest\In" + i.ToString("00i") + ".png");

                    SuperResolution _superResolution = new SuperResolution(Emgu.CV.Superres.SuperResolution.OpticalFlowType.Btvl, _frameSource);
                    _superResolution.NextFrame(result); // output super resolution image

                    result.Save(@"C:\Users\Samantha\Documents\University\Fourth Year\METR4810\Data\SuperresTest\Out" + i.ToString("00i") + ".png");
                }
            }
            catch (Exception ex)
            {
                Console.Write(ex);
            }
        }
示例#4
0
        public override void RunTest()
        {
            var capture = new VideoCapture();

            capture.Set(VideoCaptureProperties.FrameWidth, 640);
            capture.Set(VideoCaptureProperties.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
            {
                throw new Exception("capture initialization failed");
            }

            var fs = FrameSource.CreateFrameSource_Camera(-1);
            var sr = SuperResolution.CreateBTVL1();

            sr.SetInput(fs);

            using var normalWindow = new Window("normal");
            using var srWindow     = new Window("super resolution");
            var normalFrame = new Mat();
            var srFrame     = new Mat();

            while (true)
            {
                capture.Read(normalFrame);
                sr.NextFrame(srFrame);
                if (normalFrame.Empty() || srFrame.Empty())
                {
                    break;
                }
                normalWindow.ShowImage(normalFrame);
                srWindow.ShowImage(srFrame);
                Cv2.WaitKey(100);
            }
        }
示例#5
0
    Frame ApplyFilter(DepthFrame depth, FrameSource frameSource)
    {
        using (var p = depth.Profile)
        {
            var count = depth.Width * depth.Height;
            if (depthData == null || depthData.Length != count)
            {
                depthData = new ushort[count];
            }

            depth.CopyTo(depthData);

            for (int i = 0; i < count; i++)
            {
                if (depthData[i] > Distance)
                {
                    depthData[i] = 0;
                }
            }

            var v = frameSource.AllocateVideoFrame <DepthFrame>(p, depth, depth.BitsPerPixel, depth.Width, depth.Height, depth.Stride, Extension.DepthFrame);
            v.CopyFrom(depthData);

            return(v);
        }
    }
 public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
 {
     lock (_lock)
     {
         using (var frameset = FrameSet.FromFrame(frame))
         {
             using (var depth = frameset.DepthFrame)
             {
                 using (var texture = frameset.FirstOrDefault <VideoFrame>(_textureStream))
                 {
                     _videoStreamFilter[depth.Profile.Stream].CopyProfile(depth);
                     _videoStreamFilter[texture.Profile.Stream].CopyProfile(texture);
                     if (_currVideoStreamFilter.Count == 0 ||
                         !_currVideoStreamFilter[depth.Profile.Stream].Equals(_videoStreamFilter[depth.Profile.Stream]) ||
                         !_currVideoStreamFilter[texture.Profile.Stream].Equals(_videoStreamFilter[texture.Profile.Stream]))
                     {
                         ResetProcessingBlock();
                         _currVideoStreamFilter[depth.Profile.Stream]   = new RsVideoStreamRequest(depth);
                         _currVideoStreamFilter[texture.Profile.Stream] = new RsVideoStreamRequest(texture);
                     }
                     var points = _pb.Calculate(depth, releaser);
                     _pb.MapTexture(texture);
                     return(frameSource.AllocateCompositeFrame(releaser, depth, points).AsFrame());
                 }
             }
         }
     }
 }
示例#7
0
    private FrameSet HandleSingleFrameProcessingBlocks(FrameSet frameSet, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock videoProcessingBlock, Action <Frame> handleFrame)
    {
        // single frame filters
        List <Frame> processedFrames = new List <Frame>();

        foreach (var frame in frameSet)
        {
            var currFrame = ApplyFilter(frame, frameSource, framesReleaser, videoProcessingBlock, handleFrame);

            // cache the pocessed frame
            processedFrames.Add(currFrame);
            if (frame != currFrame)
            {
                frame.Dispose();
            }
        }

        // Combine the frames into a single frameset
        var newFrameSet = frameSource.AllocateCompositeFrame(framesReleaser, processedFrames.ToArray());

        foreach (var f in processedFrames)
        {
            f.Dispose();
        }

        return(newFrameSet);
    }
示例#8
0
        private void UserControl_Unloaded(object sender, RoutedEventArgs e)
        {
            timer.Stop();
            timer.Tick -= timer1_Tick;


            _source = null;
        }
示例#9
0
//	// Pings the server
//	[DllImportAttribute(@"Kinect2UnityClient.dll")]
//	public static extern int PingKinect2Server();
//
//	// Initializes the default Kinect sensor
//	[DllImportAttribute(@"Kinect2UnityClient.dll")]
//	public static extern int InitDefaultKinectSensor(FrameSource dwFlags, int iColorWidth, int iColorHeight);
//
//	// Shuts down the opened Kinect2 sensor
//	[DllImportAttribute(@"Kinect2UnityClient.dll")]
//	public static extern void ShutdownKinectSensor();

//	// Returns the maximum number of the bodies
//	[DllImportAttribute(@"Kinect2UnityClient.dll")]
//	public static extern int GetBodyCount();
//
//	// Returns the latest body frame data available
//	[DllImportAttribute(@"Kinect2UnityClient.dll")]
//	public static extern int GetBodyFrameData(ref BodyFrameData pBodyFrame, bool bGetOrientations, bool bGetHandStates);


    // opens the default Kinect sensor and needed readers
    public static SensorData OpenDefaultKinectSensor(FrameSource dwFlags)
    {
        SensorData sensorData = new SensorData();

        sensorData.kinectSensor = KinectSensor.GetDefault();
        if (sensorData.kinectSensor == null)
        {
            return(null);
        }

        sensorData.coordMapper = sensorData.kinectSensor.CoordinateMapper;
        sensorData.bodyCount   = sensorData.kinectSensor.BodyFrameSource.BodyCount;

        if ((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            sensorData.bodyFrameReader = sensorData.kinectSensor.BodyFrameSource.OpenReader();
            sensorData.bodyData        = new Body[sensorData.bodyCount];
        }

        var frameDesc = sensorData.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

        sensorData.colorImageWidth  = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            sensorData.colorFrameReader = sensorData.kinectSensor.ColorFrameSource.OpenReader();
            sensorData.colorImage       = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth  = sensorData.kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = sensorData.kinectSensor.DepthFrameSource.FrameDescription.Height;

        if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            sensorData.depthFrameReader = sensorData.kinectSensor.DepthFrameSource.OpenReader();
            sensorData.depthImage       = new ushort[sensorData.kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if ((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            sensorData.bodyIndexFrameReader = sensorData.kinectSensor.BodyIndexFrameSource.OpenReader();
            sensorData.bodyIndexImage       = new byte[sensorData.kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            sensorData.infraredFrameReader = sensorData.kinectSensor.InfraredFrameSource.OpenReader();
            sensorData.infraredImage       = new ushort[sensorData.kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        if (!sensorData.kinectSensor.IsOpen)
        {
            sensorData.kinectSensor.Open();
        }

        return(sensorData);
    }
示例#10
0
        public void ShowGame(FrameSource source)
        {
            _source          = source;
            this.DataContext = _source;

            var frame = _source.NextFrame();

            controlScene.Show(frame);
        }
示例#11
0
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        if (_pb == null || _alignTo != _currAlignTo)
        {
            Init();
        }

        return(_pb.Process(frame));
    }
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        if (_pb == null)
        {
            Init();
        }

        UpdateOptions();

        return(_pb.Process(frame));
    }
示例#13
0
        public SourceUSBCam()
        {
            InitializeComponent();
            var camList = FrameSource.GetAllConnectedCameras();

            cameraList.ItemsSource = camList;
            if (camList.Count > 0)
            {
                var lastOpen = Datastore.GeneralGetValue("usb_cam_last");
                cameraList.SelectedItem = camList.FirstOrDefault(i => i.Name.Equals(lastOpen)) ?? camList[0];
            }
        }
示例#14
0
        public AddFrameForm()
        {
            InitializeComponent();

            FrameType = FrameSource.None;

            if (Globals.WorkingFrames == null)
            {
                MessageBox.Show("Cannot add a frame to a Frames not being editted!", "Error", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                DialogResult = DialogResult.Cancel;
                Close();
            }
        }
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        lock (_lock)
        {
            if (_pb == null)
            {
                Init();
            }
        }

        UpdateOptions(frame.IsComposite);

        return(_pb.Process(frame));
    }
示例#16
0
    internal void ProcessFrame(Frame frame, FrameSource src)
    {
        try
        {
            lock (_locker)
            {
                if (!Streaming)
                {
                    return;
                }

                Frame f = frame;

                if (profile != null)
                {
                    var filters = profile.ToArray();

                    foreach (var pb in filters)
                    {
                        if (pb == null || !pb.Enabled)
                        {
                            continue;
                        }

                        var r = pb.Process(f, src);
                        if (r != f)
                        {
                            // Prevent from disposing the original frame during post-processing
                            if (f != frame)
                            {
                                f.Dispose();
                            }
                            f = r;
                        }
                    }
                }

                src.FrameReady(f);

                if (f != frame)
                {
                    f.Dispose();
                }
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e);
        }
    }
示例#17
0
 public Streamer(FrameSource source, HttpListenerResponse response)
 {
     frameSource          = source;
     queue                = new BlockingCollection <StreamWatcher.InfoEvent>(new ConcurrentQueue <StreamWatcher.InfoEvent>());
     thread               = new Thread(WorkerThread);
     thread.Name          = "Web Streamer " + frameSource.Description;
     response.ContentType = "video/mp4";
     response.AddHeader("Cache-Control", "no-cache, no-store, must-revalidate");
     response.AddHeader("Pragma", "no-cache");
     response.AddHeader("Expires", "0");
     stream = response.OutputStream;
     thread.Start();
     frameSource.OnFrameInfo += HandleFrameInfo;
 }
示例#18
0
        public static void Run()
        {
            Mat image = new Mat();

            frameSource = Cv2.CreateFrameSource_Camera(0);
            while (true)
            {
                //Grab the current frame
                frameSource.NextFrame(image);

                Mat gray = new Mat();
                Cv2.CvtColor(image, gray, ColorConversionCodes.BGR2GRAY);

                Mat blur = new Mat();
                Cv2.GaussianBlur(gray, blur, new Size(19, 19), 0);

                Mat    threshImg = new Mat();
                double thresh    = Cv2.Threshold(gray, threshImg, 150, 255, ThresholdTypes.Otsu);

                Mat[] contours;

                Mat hierarchy = new Mat();
                Cv2.FindContours(threshImg, out contours, hierarchy, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
                double maxArea     = 100;
                Mat    handContour = new Mat();
                handContour = contours.OrderByDescending(x => (x.ContourArea())).ToList()[0];
                //foreach (var item in contours)
                //{
                //    if(item.ContourArea() > maxArea)
                //    {
                //        handContour = item;
                //        break;
                //    }
                //}

                Mat hull = new Mat();
                Cv2.ConvexHull(handContour, hull);

                Mat defects = new Mat();
                Cv2.ConvexityDefects(handContour, hull, defects);

                Cv2.ImShow("frame", hull);
                //Cv2.WaitKey(0);
                if (Cv2.WaitKey(1) == (int)ConsoleKey.Enter)
                {
                    break;
                }
            }
        }
示例#19
0
    // opens the default sensor and needed readers
    /// <summary>
    /// 打开默认的传感器和需要的读卡器
    /// </summary>
    /// <param name="listInterfaces">接口集合</param>
    /// <param name="dwFlags">绘制标志</param>
    /// <param name="sensorAngle">传感器角度</param>
    /// <param name="bUseMultiSource">用户多源</param>
    /// <returns></returns>
    public static SensorData OpenDefaultSensor(List <DepthSensorInterface> listInterfaces,
                                               FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        SensorData sensorData = null;

        if (listInterfaces == null)
        {
            return(sensorData);
        }

        //遍历传感器接口
        foreach (DepthSensorInterface sensorInt in listInterfaces)
        {
            try
            {
                if (sensorData == null)
                {
                    //更新传感器数据
                    sensorData = sensorInt.OpenDefaultSensor(dwFlags, sensorAngle, bUseMultiSource);

                    if (sensorData != null)
                    {
                        sensorData.sensorInterface = sensorInt;
                        //Debug.Log("Interface used: " + sensorInt.GetType().Name);
                    }
                }
                else
                {
                    sensorInt.FreeSensorInterface();                        //释放资源
                }
            }
            catch (Exception ex)
            {
                //Debug.LogError("Initialization of sensor failed.");
                Debug.LogError(ex.ToString());

                try
                {
                    sensorInt.FreeSensorInterface();                        //释放资源
                }
                catch (Exception)
                {
                    // do nothing
                }
            }
        }

        return(sensorData);
    }
示例#20
0
 public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
 {
     lock (_lock)
     {
         using (var frameset = FrameSet.FromFrame(frame))
         {
             using (var depth = frameset.DepthFrame)
                 using (var color = frameset.ColorFrame)
                     if (_profilesIds.Count == 0 != !_profilesIds.ContainsValue(color.Profile.UniqueID) || !_profilesIds.ContainsValue(depth.Profile.UniqueID))
                     {
                         ResetAligner();
                         _profilesIds[Stream.Depth] = depth.Profile.UniqueID;
                         _profilesIds[Stream.Color] = color.Profile.UniqueID;
                     }
             return((_enabled ? _pb.Process(frameset, releaser) : frameset).AsFrame());
         }
     }
 }
示例#21
0
        public _frameItem GetItemUnder(int x, int y, FrameSource sourceflags)
        {
            foreach (_frameItem fi in ListFrameItems)
            {
                if ((x >= fi.x) && (x <= fi.x + fi.width) &&
                    (y >= fi.y) && (y <= fi.y + fi.height))
                {
                    foreach (_frameName fn in fi.names)
                    {
                        if ((sourceflags & fn.source) > 0)
                        {
                            return(fi);
                        }
                    }
                }
            }

            return(null);
        }
    // opens the default sensor and needed readers
    public static SensorData OpenDefaultSensor(DepthSensorInterface sensorInt, FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        SensorData sensorData = null;

        if (sensorInt == null)
        {
            return(sensorData);
        }

        try {
            if (sensorData == null)
            {
                sensorData = sensorInt.OpenDefaultSensor(dwFlags, sensorAngle, bUseMultiSource);

                if (sensorData != null)
                {
                    sensorData.sensorInterface   = sensorInt;
                    sensorData.sensorIntPlatform = sensorInt.GetSensorPlatform();
                    Debug.Log("Interface used: " + sensorInt.GetType().Name);

                    Debug.Log("Shader level: " + SystemInfo.graphicsShaderLevel);
                }
            }
            else
            {
                sensorInt.FreeSensorInterface(false);
            }
        } catch (Exception ex) {
            Debug.LogError("Initialization of the sensor failed.");
            Debug.LogError(ex.ToString());

            try {
                sensorInt.FreeSensorInterface(false);
            } catch (Exception) {
                // do nothing
            }
        }


        return(sensorData);
    }
示例#23
0
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        lock (_lock)
        {
            if (currMode != Mode)
            {
                if (_pb != null)
                {
                    _pb.Dispose();
                    _pb = null;
                }
            }

            if (_pb == null)
            {
                Init();
            }

            return(_pb.Process(frame));
        }
    }
示例#24
0
 private FrameSet HandleMultiFramesProcessingBlocks(FrameSet frameSet, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock videoProcessingBlock, Action <FrameSet> handleFrameSet)
 {
     using (var frame = frameSet.AsFrame())
     {
         if (videoProcessingBlock.CanProcess(frame))
         {
             using (var f = videoProcessingBlock.Process(frame, frameSource, framesReleaser))
             {
                 if (videoProcessingBlock.Fork())
                 {
                     handleFrameSet(FrameSet.FromFrame(f, framesReleaser));
                 }
                 else
                 {
                     return(FrameSet.FromFrame(f, framesReleaser));
                 }
             }
         }
     }
     return(frameSet);
 }
示例#25
0
        void FrameShowFilterChanged(object o, EventArgs e)
        {
            FrameSource flag = FrameSource.None;
            bool        bOn  = false;

            if (o == cbShowGrid)
            {
                flag = FrameSource.Grid;
                bOn  = cbShowGrid.Checked;
            }
            else if (o == cbShowList)
            {
                flag = FrameSource.List;
                bOn  = cbShowList.Checked;
            }
            else if (o == cbShowNames)
            {
                flag = FrameSource.Name;
                bOn  = cbShowNames.Checked;
            }
            else if (o == cbShowAliases)
            {
                flag = FrameSource.Alias;
                bOn  = cbShowAliases.Checked;
            }

            if (bOn)
            {
                ValidFramesFlag |= flag;
            }
            else
            {
                if ((ValidFramesFlag & flag) > 0)
                {
                    ValidFramesFlag -= flag;
                }
            }

            Invalidate();
        }
示例#26
0
        public void Run()
        {
            var capture = new VideoCapture();

            //var capture = new VideoCapture("rtsp://*****:*****@192.168.0.69:554/Streaming/Channels/101");

            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            //capture.Open(-1);
            capture.Open(-1);

            if (!capture.IsOpened())
            {
                throw new Exception("capture initialization failed");
            }

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();

            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
                using (var srWindow = new Window("super resolution"))
                {
                    var normalFrame = new Mat();
                    var srFrame     = new Mat();
                    while (true)
                    {
                        capture.Read(normalFrame);
                        sr.NextFrame(srFrame);
                        if (normalFrame.Empty() || srFrame.Empty())
                        {
                            break;
                        }
                        normalWindow.ShowImage(normalFrame);
                        srWindow.ShowImage(srFrame);
                        Cv2.WaitKey(100);
                    }
                }
        }
示例#27
0
    internal void ProcessFrame(Frame frame, FrameSource src)
    {
        try
        {
            Frame f = frame;

            if (profile != null)
            {
                var filters = profile.ToArray();
                // foreach (var pb in profile)
                foreach (var pb in filters)
                {
                    if (pb == null || !pb.Enabled)
                    {
                        continue;
                    }

                    var r = pb.Process(f, src);
                    if (r != f)
                    {
                        f.Dispose();
                        f = r;
                    }
                }
            }

            src.FrameReady(f);

            if (f != frame)
            {
                f.Dispose();
            }
        }
        catch (Exception e)
        {
            Debug.LogException(e);
        }
    }
示例#28
0
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        if (frame.IsComposite)
        {
            using (var fs = FrameSet.FromFrame(frame))
                using (var depth = fs.DepthFrame)
                {
                    var v = ApplyFilter(depth, frameSource);
                    // return v;

                    // find and remove the original depth frame
                    var frames = new List <Frame>();
                    foreach (var f in fs)
                    {
                        using (var p1 = f.Profile)
                            if (p1.Stream == Stream.Depth && p1.Format == Format.Z16)
                            {
                                f.Dispose();
                                continue;
                            }
                        frames.Add(f);
                    }
                    frames.Add(v);

                    var res = frameSource.AllocateCompositeFrame(frames);
                    frames.ForEach(f => f.Dispose());
                    using (res)
                        return(res.AsFrame());
                }
        }

        if (frame is DepthFrame)
        {
            return(ApplyFilter(frame as DepthFrame, frameSource));
        }

        return(frame);
    }
    public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
    {
        if (!_enabled)
        {
            return(frame);
        }
        var org      = frame as VideoFrame;
        var stride   = org.Width * org.BitsPerPixel / 8;
        var newFrame = frameSource.AllocateVideoFrame(org.Profile, org, org.BitsPerPixel, org.Width, org.Height, stride, Extension.DepthFrame);

        if (_pixels == null || org.Profile.UniqueID != _uniqueID)
        {
            InitPixels(org);
        }
        Marshal.Copy(org.Data, _pixels, 0, _pixels.Length);
        for (int i = 0; i < _pixels.Length; i++)
        {
            _pixels[i] = (short)(_pixels[i] >> _depthResolution);
            _pixels[i] = (short)(_pixels[i] << _depthResolution);
        }
        Marshal.Copy(_pixels, 0, newFrame.Data, _pixels.Length);
        return(newFrame);
    }
示例#30
0
        public void UpdateFrameShowButtons(bool bShow)
        {
            cbShowGrid.Visible    = bShow;
            cbShowList.Visible    = bShow;
            cbShowNames.Visible   = bShow;
            cbShowAliases.Visible = bShow;

            if (!bShow)
            {
                ValidFramesFlag = FrameSource.None;
            }
            else
            {
                ValidFramesFlag    = FrameSource.None;
                cbShowGrid.Checked = Globals.WorkingFrames.frameGrid != null;
                if (cbShowGrid.Checked)
                {
                    ValidFramesFlag |= FrameSource.Grid;
                }
                cbShowList.Checked = Globals.WorkingFrames.frameList != null;
                if (cbShowList.Checked)
                {
                    ValidFramesFlag |= FrameSource.List;
                }
                cbShowNames.Checked = cbShowGrid.Checked && (Globals.WorkingFrames.frameGrid.names != null) && (Globals.WorkingFrames.frameGrid.names.Count > 0);
                if (cbShowNames.Checked)
                {
                    ValidFramesFlag |= FrameSource.Name;
                }
                cbShowAliases.Checked = Globals.WorkingFrames.aliases != null;
                if (cbShowAliases.Checked)
                {
                    ValidFramesFlag |= FrameSource.Alias;
                }
            }
        }
示例#31
0
    //    // Pings the server
    //    [DllImportAttribute(@"Kinect2UnityClient.dll")]
    //    public static extern int PingKinect2Server();
    //
    //    // Initializes the default Kinect sensor
    //    [DllImportAttribute(@"Kinect2UnityClient.dll")]
    //    public static extern int InitDefaultKinectSensor(FrameSource dwFlags, int iColorWidth, int iColorHeight);
    //    
    //    // Shuts down the opened Kinect2 sensor
    //    [DllImportAttribute(@"Kinect2UnityClient.dll")]
    //    public static extern void ShutdownKinectSensor();
    //    // Returns the maximum number of the bodies
    //    [DllImportAttribute(@"Kinect2UnityClient.dll")]
    //    public static extern int GetBodyCount();
    //    
    //    // Returns the latest body frame data available
    //    [DllImportAttribute(@"Kinect2UnityClient.dll")]
    //    public static extern int GetBodyFrameData(ref BodyFrameData pBodyFrame, bool bGetOrientations, bool bGetHandStates);
    // opens the default Kinect sensor and needed readers
    public static SensorData OpenDefaultKinectSensor(FrameSource dwFlags)
    {
        SensorData sensorData = new SensorData();

        sensorData.kinectSensor = KinectSensor.GetDefault();
        if(sensorData.kinectSensor == null)
            return null;

        sensorData.coordMapper = sensorData.kinectSensor.CoordinateMapper;
        sensorData.bodyCount = sensorData.kinectSensor.BodyFrameSource.BodyCount;

        if((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            sensorData.bodyFrameReader = sensorData.kinectSensor.BodyFrameSource.OpenReader();
            sensorData.bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = sensorData.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);
        sensorData.colorImageWidth = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            sensorData.colorFrameReader = sensorData.kinectSensor.ColorFrameSource.OpenReader();
            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth = sensorData.kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = sensorData.kinectSensor.DepthFrameSource.FrameDescription.Height;

        if((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            sensorData.depthFrameReader = sensorData.kinectSensor.DepthFrameSource.OpenReader();
            sensorData.depthImage = new ushort[sensorData.kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            sensorData.bodyIndexFrameReader = sensorData.kinectSensor.BodyIndexFrameSource.OpenReader();
            sensorData.bodyIndexImage = new byte[sensorData.kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            sensorData.infraredFrameReader = sensorData.kinectSensor.InfraredFrameSource.OpenReader();
            sensorData.infraredImage = new ushort[sensorData.kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        if(!sensorData.kinectSensor.IsOpen)
        {
            sensorData.kinectSensor.Open();
        }

        return sensorData;
    }
 public static extern int InitDefaultKinectSensor(FrameSource dwFlags, int iColorWidth, int iColorHeight);
 public static extern int PollImageFrameData(FrameSource dwFlags);
    // opens the default sensor and needed readers
    public static SensorData OpenDefaultSensor(List<DepthSensorInterface> listInterfaces, FrameSource dwFlags, float sensorAngle, bool bUseMultiSource,  
		KinectManager.UserMapType userMapType, BackgroundRemovalManager brManager)
    {
        SensorData sensorData = null;
        if(listInterfaces == null)
            return sensorData;

        foreach(DepthSensorInterface sensorInt in listInterfaces)
        {
            try
            {
                if(sensorData == null)
                {
                    sensorData = sensorInt.OpenDefaultSensor(dwFlags, sensorAngle, bUseMultiSource);

                    if(sensorData != null)
                    {
                        sensorData.sensorInterface = sensorInt;
                        sensorData.sensorIntPlatform = sensorInt.GetSensorPlatform();
                        Debug.Log("Interface used: " + sensorInt.GetType().Name);

                        Debug.Log("Shader level: " + SystemInfo.graphicsShaderLevel);
                        if(sensorData.bodyIndexImage != null && IsDirectX11Available())
                        {
                            Shader bodyIndexShader = Shader.Find("Kinect/BodyShader");

                            if(bodyIndexShader != null)
                            {
                                sensorData.bodyIndexTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0);
                                sensorData.bodyIndexTexture.wrapMode = TextureWrapMode.Clamp;
                                sensorData.bodyIndexTexture.filterMode = FilterMode.Point;
                                //Debug.Log(sensorData.bodyIndexTexture.format);

                                sensorData.bodyIndexMaterial = new Material(bodyIndexShader);

                                sensorData.bodyIndexMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth);
                                sensorData.bodyIndexMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight);

                                sensorData.bodyIndexBuffer = new ComputeBuffer(sensorData.bodyIndexImage.Length, sizeof(float));
                                sensorData.bodyIndexMaterial.SetBuffer("_BodyIndexBuffer", sensorData.bodyIndexBuffer);
                            }
                        }

                        if(sensorData.depthImage != null && IsDirectX11Available() &&
                            userMapType == KinectManager.UserMapType.UserTexture)
                        {
                            Shader depthImageShader = Shader.Find("Kinect/DepthShader");

                            if(depthImageShader != null)
                            {
                                sensorData.depthImageTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0);
                                sensorData.depthImageTexture.wrapMode = TextureWrapMode.Clamp;
                                sensorData.depthImageTexture.filterMode = FilterMode.Point;

                                sensorData.depthImageMaterial = new Material(depthImageShader);

                                sensorData.depthImageMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture);

                                sensorData.depthImageMaterial.SetFloat("_TexResX", (float)sensorData.depthImageWidth);
                                sensorData.depthImageMaterial.SetFloat("_TexResY", (float)sensorData.depthImageHeight);

                                sensorData.depthImageBuffer = new ComputeBuffer(sensorData.depthImage.Length, sizeof(float));
                                sensorData.depthImageMaterial.SetBuffer("_DepthBuffer", sensorData.depthImageBuffer);

                                sensorData.depthHistBuffer = new ComputeBuffer(5001, sizeof(float));
                                sensorData.depthImageMaterial.SetBuffer("_HistBuffer", sensorData.depthHistBuffer);

                                // use body index buffer to overcome the linear color correction
                                sensorData.depthImageMaterial.SetBuffer("_BodyIndexBuffer", sensorData.bodyIndexBuffer);
                            }
                        }

                        if(sensorData.colorImage != null)
                        {
                            sensorData.colorImageTexture = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.RGBA32, false);
                        }

                        // check if background removal requires cut-out image
                        bool bBrRequiresCutOut = brManager && (!brManager.colorCameraResolution || !sensorInt.IsBRHiResSupported());

                        if(sensorData.bodyIndexImage != null && sensorData.colorImage != null && IsDirectX11Available() &&
                           (userMapType == KinectManager.UserMapType.CutOutTexture || bBrRequiresCutOut))
                        {
                            Shader depth2ColorShader = Shader.Find("Kinect/Depth2ColorShader");

                            if(depth2ColorShader)
                            {
                                sensorData.depth2ColorTexture = new RenderTexture(sensorData.depthImageWidth, sensorData.depthImageHeight, 0);
                                sensorData.depth2ColorTexture.wrapMode = TextureWrapMode.Clamp;
                                //sensorData.depth2ColorTexture.filterMode = FilterMode.Point;

                                sensorData.depth2ColorMaterial = new Material(depth2ColorShader);

                                sensorData.depth2ColorMaterial.SetFloat("_ColorResX", (float)sensorData.colorImageWidth);
                                sensorData.depth2ColorMaterial.SetFloat("_ColorResY", (float)sensorData.colorImageHeight);
                                sensorData.depth2ColorMaterial.SetFloat("_DepthResX", (float)sensorData.depthImageWidth);
                                sensorData.depth2ColorMaterial.SetFloat("_DepthResY", (float)sensorData.depthImageHeight);

                                sensorData.depth2ColorBuffer = new ComputeBuffer(sensorData.depthImage.Length, sizeof(float) * 2);
                                sensorData.depth2ColorMaterial.SetBuffer("_ColorCoords", sensorData.depth2ColorBuffer);

                                sensorData.depth2ColorCoords = new Vector2[sensorData.depthImage.Length];
                            }
                        }

        //						if(sensorData.bodyIndexImage != null && sensorData.colorImage != null && IsDirectX11Available() &&
        //						   (userMapType == KinectManager.UserMapType.CutOutTexture || bBrRequiresCutOut))
        //						{
        //							sensorData.depth2ColorCoords = new Vector2[sensorData.depthImage.Length];
        //						}

                    }
                }
                else
                {
                    sensorInt.FreeSensorInterface(false);
                }
            }
            catch (Exception ex)
            {
                Debug.LogError("Initialization of the sensor failed.");
                Debug.LogError(ex.ToString());

                try
                {
                    sensorInt.FreeSensorInterface(false);
                }
                catch (Exception)
                {
                    // do nothing
                }
            }
        }

        return sensorData;
    }
	// opens the default sensor and needed readers
	public static SensorData OpenDefaultSensor(List<DepthSensorInterface> listInterfaces, 
	                                           FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
	{
		SensorData sensorData = null;
		if(listInterfaces == null)
			return sensorData;

		foreach(DepthSensorInterface sensorInt in listInterfaces)
		{
			try 
			{
				if(sensorData == null)
				{
					sensorData = sensorInt.OpenDefaultSensor(dwFlags, sensorAngle, bUseMultiSource);

					if(sensorData != null)
					{
						sensorData.sensorInterface = sensorInt;
						Debug.Log("Interface used: " + sensorInt.GetType().Name);
					}
				}
				else
				{
					sensorInt.FreeSensorInterface();
				}
			} 
			catch (Exception ex) 
			{
				Debug.LogError("Initialization of sensor failed.");
				Debug.LogError(ex.ToString());

				try 
				{
					sensorInt.FreeSensorInterface();
				} 
				catch (Exception) 
				{
					// do nothing
				}
			}
		}

		return sensorData;
	}
示例#36
0
    /// <summary>
    /// Callback that gives the framegrabber a chance to grab the frame
    /// </summary>
    /// <param name="width"></param>
    /// <param name="height"></param>
    /// <param name="arWidth"></param>
    /// <param name="arHeight"></param>
    /// <param name="pSurface"></param>
    public void OnFrame(Int16 width, Int16 height, Int16 arWidth, Int16 arHeight, uint pSurface, FrameSource FrameSource)
    {
      // MP1-4248 :Start* Line Code for Ambilight System Capture (Atmolight)
      if (OnNewFrame != null)
      {
        try
        {
          //raise event to any subcribers for event NewFrameHandler
          OnNewFrame(width, height, arWidth, arHeight, pSurface, FrameSource);
        }
        catch (Exception)
        {
        }
      }
      // MP1-4248 :End* Ambilight Capture code

      //Dont pass GUI frames to GetCurrentImage() -> VideoModeSwitcher is using it
      if (FrameSource == FrameGrabber.FrameSource.GUI) return;

      // Is GetCurrentImage() requesting a frame grab?
      if (!grabSample || width == 0 || height == 0)
      {
        return;
      }

      //Log.Debug("PlaneScene: grabSample is true");
      try
      {
        // if we havent already allocated a surface or the surface dimensions dont match
        // allocate a new surface to store the grabbed frame in
        if (rgbSurface == null || rgbSurface.Disposed || rgbSurface.Description.Height != height ||
            rgbSurface.Description.Width != width)
        {
          Log.Debug("FrameGrabber: Creating new frame grabbing surface");

          if (GUIGraphicsContext.VideoRenderer != GUIGraphicsContext.VideoRendererType.madVR)
          {
            rgbSurface = GUIGraphicsContext.DX9Device.CreateRenderTarget(width, height, Format.A8R8G8B8,
                                                             MultiSampleType.None, 0, true);
          }
          else if (GUIGraphicsContext.VideoRenderer == GUIGraphicsContext.VideoRendererType.madVR && GUIGraphicsContext.Vmr9Active)
          {
            if (GUIGraphicsContext.DX9DeviceMadVr != null)
            {
              rgbSurface = GUIGraphicsContext.DX9DeviceMadVr.CreateRenderTarget(width, height, Format.A8R8G8B8,
                                                                 MultiSampleType.None, 0, true);
            }
          }
          else
          {
            rgbSurface = GUIGraphicsContext.DX9Device.CreateRenderTarget(width, height, Format.A8R8G8B8,
                                                                         MultiSampleType.None, 0, true);
          }
        }
        unsafe
        {
          // copy the YUV video surface to our managed ARGB surface
          // Log.Debug("Calling VideoSurfaceToRGBSurface");
          if (rgbSurface != null)
          {
            VideoSurfaceToRGBSurface(new IntPtr(pSurface), (IntPtr) rgbSurface.UnmanagedComPointer);
          }
          lock (grabNotifier)
          {
            grabSample = false;
            grabSucceeded = true;
            Monitor.Pulse(grabNotifier);
          }
        }
      }
        // The loss of the D3DX device or similar can cause exceptions, catch any such
        // exception and report failure to GetCurrentImage
      catch (Exception e)
      {
        if (rgbSurface != null)
        {
          rgbSurface.SafeDispose(); // get rid of rgbSurface just to make sure
          rgbSurface = null;
        }
        lock (grabNotifier)
        {
          grabSucceeded = false;
          Monitor.Pulse(grabNotifier);
        }
        Log.Error(e.ToString());
      }
    }