示例#1
0
 private void MediaOutputForm_FormClosed(object sender, FormClosedEventArgs e)
 {
     if (mediaOutput.IsOpen)
     {
         mediaOutput.Close();
     }
     OeipHelper.closeAudioOutput();
 }
示例#2
0
 private void btn_close_Click(object sender, EventArgs e)
 {
     if (mediaOutput.IsOpen)
     {
         mediaOutput.Close();
     }
     OeipHelper.closeAudioOutput();
 }
示例#3
0
        private unsafe void CameraParametrControl_OnParametrEvent(Parametr parametr, int index)
        {
            fixed(Parametr *par = &camParametrs.Brightness)
            {
                *(par + index) = parametr;
            }

            OeipHelper.setDeviceParametrs(cameraId, ref camParametrs);
        }
示例#4
0
        public void SetVideoFormat(OeipVideoType videoType, int width, int height)
        {
            //YUV类型数据
            var             yuvType    = OeipHelper.getVideoYUV(videoType);
            YUV2RGBAParamet yuvParamet = new YUV2RGBAParamet();

            yuvParamet.yuvType = yuvType;
            Pipe.UpdateParamet(Yuv2Rgba, yuvParamet);

            int inputWidth  = width;
            int inputHeight = height;

            Pipe.SetEnableLayer(Yuv2Rgba, true);
            Pipe.SetEnableLayer(MapChannel, false);
            //Pipe.SetEnableLayer(ResizeIndex, false);
            OeipDataType dataType = OeipDataType.OEIP_CU8C1;

            if (yuvType == OeipYUVFMT.OEIP_YUVFMT_OTHER)
            {
                Pipe.SetEnableLayer(Yuv2Rgba, false);
                if (videoType == OeipVideoType.OEIP_VIDEO_BGRA32)
                {
                    Pipe.SetEnableLayer(MapChannel, true);
                    MapChannelParamet mapChannelParamet = new MapChannelParamet();
                    mapChannelParamet.red   = 2;
                    mapChannelParamet.green = 1;
                    mapChannelParamet.blue  = 0;
                    mapChannelParamet.alpha = 3;
                    Pipe.UpdateParamet(MapChannel, mapChannelParamet);
                }
                else if (videoType == OeipVideoType.OEIP_VIDEO_RGB24)
                {
                    dataType = OeipDataType.OEIP_CU8C3;
                }
            }
            else if (yuvType == OeipYUVFMT.OEIP_YUVFMT_YUV420SP || yuvType == OeipYUVFMT.OEIP_YUVFMT_YUV420P || yuvType == OeipYUVFMT.OEIP_YUVFMT_YUY2P)
            {
                dataType    = OeipDataType.OEIP_CU8C1;
                inputHeight = height * 3 / 2;
                if (yuvType == OeipYUVFMT.OEIP_YUVFMT_YUY2P)
                {
                    inputHeight = height * 2;
                }
            }
            else if (yuvType == OeipYUVFMT.OEIP_YUVFMT_UYVYI || yuvType == OeipYUVFMT.OEIP_YUVFMT_YUY2I || yuvType == OeipYUVFMT.OEIP_YUVFMT_YVYUI)
            {
                dataType   = OeipDataType.OEIP_CU8C4;
                inputWidth = width / 2;
            }
            Pipe.SetInput(InputIndex, inputWidth, inputHeight, dataType);
        }
示例#5
0
 private void OnLoadNet()
 {
     darknetParamet.bLoad      = 1;
     darknetParamet.confile    = Path.GetFullPath(Path.Combine(Application.dataPath, "../../ThirdParty/yolov3-tiny-test.cfg"));
     darknetParamet.weightfile = Path.GetFullPath(Path.Combine(Application.dataPath, "../../ThirdParty/yolov3-tiny_745000.weights"));
     darknetParamet.thresh     = 0.3f;
     darknetParamet.nms        = 0.3f;
     darknetParamet.bDraw      = 1;
     darknetParamet.drawColor  = OeipHelper.getColor(1.0f, 0.1f, 0.1f, 0.8f);
     Loom.RunAsync(() =>
     {
         cameraView.VideoPipe.UpdateDarknetParamet(ref darknetParamet);
     });
 }
示例#6
0
 private void Pipe_OnProcessEvent(int layerIndex, IntPtr data, int width, int height, int outputIndex)
 {
     if (!mediaOutput.IsOpen)
     {
         return;
     }
     if (layerIndex == cameraControl1.VideoPipe.OutYuvIndex)
     {
         OeipHelper.setVideoFrame(data, width, height, cameraControl1.VideoPipe.YUVFMT, ref videoFrame);
         videoFrame.timestamp = DateTime.Now.Ticks / 10000 - openTime;
         LogHelper.LogMessage("video time:" + videoFrame.timestamp);
         mediaOutput.PushVideoFrame(ref videoFrame);
     }
 }
示例#7
0
 public void RunLivePipe(ref OeipVideoFrame videoFrame)
 {
     if (VideoFormat.width != videoFrame.width || VideoFormat.height != videoFrame.height || yuvfmt != videoFrame.fmt)
     {
         VideoFormat.fps       = 30;
         VideoFormat.width     = (int)videoFrame.width;
         VideoFormat.height    = (int)videoFrame.height;
         yuvfmt                = videoFrame.fmt;
         VideoFormat.videoType = OeipVideoType.OEIP_VIDEO_RGBA32;
         ResetPipe();
     }
     OeipHelper.getVideoFrameData(yuvData, ref videoFrame);
     Pipe.UpdateInput(InputIndex, yuvData);
     Pipe.RunPipe();
 }
示例#8
0
 public bool PushVideoFrame(int index, IntPtr data, int width, int height, OeipYUVFMT fmt)
 {
     lock (obj)
     {
         if (!bLogin)
         {
             return(false);
         }
         ref OeipVideoFrame videoFrame = ref mainVideoFrame;
         if (index == 1)
         {
             videoFrame = ref auxVideoFrame;
         }
         OeipHelper.setVideoFrame(data, width, height, fmt, ref videoFrame);
         return(OeipLiveHelper.pushVideoFrame(index, ref videoFrame));
     }
示例#9
0
 // Start is called before the first frame update
 void Start()
 {
     setting = SettingManager.Instance.Setting;
     OeipManager.Instance.OnLogEvent += Instance_OnLogEvent;
     //绑定Camera UI
     objBindCamera.Bind(setting.cameraSetting, cameraSelectPanel);
     objBindCamera.GetComponent <DropdownComponent>("CameraIndex").SetFillOptions(true, OeipManagerU3D.Instance.GetCameras);
     objBindCamera.GetComponent <DropdownComponent>("FormatIndex").SetFillOptions(true, OeipManagerU3D.Instance.GetFormats);
     objBindCamera.OnChangeEvent += ObjBindCamera_OnChangeEvent;
     //cameraView管线返回设置
     cameraView.VideoPipe.Pipe.OnProcessEvent += Pipe_OnProcessEvent;
     //绑定GrabCut设置
     objBindGrabcut.Bind(setting.videoParamet, grabCutPanel);
     cameraView.VideoPipe.UpdateVideoParamet(setting.videoParamet);
     objBindGrabcut.OnChangeEvent += ObjBindGrabcut_OnChangeEvent;
     //加载神经网络
     btnLoadNet.onClick.AddListener(() =>
     {
         Loom.RunAsync(() =>
         {
             darknetParamet.bLoad      = 1;
             darknetParamet.confile    = Path.GetFullPath(Path.Combine(Application.dataPath, "../../ThirdParty/yolov3-tiny-test.cfg"));
             darknetParamet.weightfile = Path.GetFullPath(Path.Combine(Application.dataPath, "../../ThirdParty/yolov3-tiny_745000.weights"));
             darknetParamet.thresh     = 0.3f;
             darknetParamet.nms        = 0.3f;
             darknetParamet.bDraw      = 1;
             darknetParamet.drawColor  = OeipHelper.getColor(1.0f, 0.1f, 0.1f, 0.8f);
             cameraView.VideoPipe.UpdateDarknetParamet(ref darknetParamet);
         });
     });
     //Grabcut 扣像
     btnGrabcut.onClick.AddListener(() =>
     {
         Loom.RunAsync(() =>
         {
             bDrawMode     = !bDrawMode;
             OeipRect rect = new OeipRect();
             if (personBox != null && personBox.Length > 0)
             {
                 rect = personBox[0].rect;
             }
             cameraView.VideoPipe.ChangeGrabcutMode(bDrawMode, ref rect);
         });
     });
 }
示例#10
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (string.IsNullOrEmpty(textBox1.Text))
            {
                return;
            }
            if (mediaOutput.IsOpen)
            {
                mediaOutput.Close();
            }
            //音频与视频解码所需要信息
            OeipAudioEncoder audioEncoder = new OeipAudioEncoder();

            audioEncoder.bitrate   = 12800;
            audioEncoder.channel   = audioDesc.channel;
            audioEncoder.frequency = audioDesc.sampleRate;
            mediaOutput.SetAudioEncoder(audioEncoder);
            OeipVideoEncoder videoEncoder = new OeipVideoEncoder();

            videoEncoder.bitrate = 4000000;
            videoEncoder.fps     = cameraControl1.Format.fps;
            videoEncoder.width   = cameraControl1.Format.width;
            videoEncoder.height  = cameraControl1.Format.height;
            videoEncoder.yuvType = cameraControl1.VideoPipe.YUVFMT;
            mediaOutput.SetVideoEncoder(videoEncoder);
            //重新洗白文件
            using (var file = File.Open(textBox1.Text, FileMode.Create))
            {
                LogHelper.LogMessage("create file:" + textBox1.Text + "");
            }
            if (this.checkBox1.Checked || this.checkBox2.Checked)
            {
                OeipHelper.setAudioOutputAction(onAudioDataDelegate, null);
                OeipHelper.startAudioOutput(this.checkBox1.Checked, this.checkBox2.Checked, audioDesc);
            }
            openTime = DateTime.Now.Ticks / 10000;
            mediaOutput.Open(this.textBox1.Text, this.checkBox3.Checked, this.checkBox1.Checked || this.checkBox2.Checked);
            if (!mediaOutput.IsOpen)
            {
                LogHelper.LogMessage("file:" + this.textBox1.Text + " not open");
            }
        }
示例#11
0
        public OeipVideoPipe(OeipPipe pipe)
        {
            this.Pipe = pipe;
            //添加输入层
            InputIndex = pipe.AddLayer("input", OeipLayerType.OEIP_INPUT_LAYER);
            //如果输入格式是YUV
            Yuv2Rgba = pipe.AddLayer("yuv2rgba", OeipLayerType.OEIP_YUV2RGBA_LAYER);
            //如果输入格式是BGR
            MapChannel = pipe.AddLayer("map channel", OeipLayerType.OEIP_MAPCHANNEL_LAYER);
            //mapChannel与yuv2rgba同级
            pipe.ConnectLayer(MapChannel, InputIndex);
            //经过大小变化
            //ResizeIndex = pipe.AddLayer("resize", OeipLayerType.OEIP_RESIZE_LAYER);
            //ResizeParamet rp = new ResizeParamet();
            //rp.width = 1920;
            //rp.height = 1080;
            //pipe.UpdateParamet(ResizeIndex, rp);

            //如果显示格式要求BRG
            OutMap = pipe.AddLayer("out map channel", OeipLayerType.OEIP_MAPCHANNEL_LAYER);
            //输出层
            OutIndex = pipe.AddLayer("out put", OeipLayerType.OEIP_OUTPUT_LAYER);
            SetOutput(IsGpu, IsCpu);
            OutputParamet outputParamet = new OutputParamet();

            //输出YUV格式,为了推出流
            Rgba2Yuv = pipe.AddLayer("rgba2yuv", OeipLayerType.OEIP_RGBA2YUV_LAYER);
            RGBA2YUVParamet yuvParamet = new RGBA2YUVParamet();

            yuvParamet.yuvType = YUVFMT;
            Pipe.UpdateParamet(Rgba2Yuv, yuvParamet);
            //输出第二个流,YUV流,用于推送数据
            pipe.ConnectLayer(Rgba2Yuv, OutMap);
            OutYuvIndex        = pipe.AddLayer("out put yuv", OeipLayerType.OEIP_OUTPUT_LAYER);
            outputParamet.bGpu = 0;
            outputParamet.bCpu = 1;
            Pipe.UpdateParamet(OutYuvIndex, outputParamet);

            InputParamet input = new InputParamet();

            input.bCpu = 1;
            input.bGpu = 0;
            Pipe.UpdateParamet(InputIndex, input);
            if (pipe.GpgpuType == OeipGpgpuType.OEIP_DX11)
            {
                int             yuv2rgba2 = pipe.AddLayer("yuv2rgba 2", OeipLayerType.OEIP_YUV2RGBA_LAYER);
                YUV2RGBAParamet yparamet  = new YUV2RGBAParamet();
                yparamet.yuvType = YUVFMT;
                Pipe.UpdateParamet(yuv2rgba2, yparamet);
                MattingOutIndex    = pipe.AddLayer("matting out put", OeipLayerType.OEIP_OUTPUT_LAYER);
                outputParamet.bGpu = 1;
                outputParamet.bCpu = 0;
                Pipe.UpdateParamet(MattingOutIndex, outputParamet);
            }

            if (pipe.GpgpuType == OeipGpgpuType.OEIP_CUDA)
            {
                //神经网络层
                DarknetIndex = pipe.AddLayer("darknet", OeipLayerType.OEIP_DARKNET_LAYER);
                pipe.ConnectLayer(DarknetIndex, OutMap);
                darknetParamet.bLoad      = 1;
                darknetParamet.confile    = "../../ThirdParty/yolov3-tiny-test.cfg";
                darknetParamet.weightfile = "../../ThirdParty/yolov3-tiny_745000.weights";
                darknetParamet.thresh     = 0.3f;
                darknetParamet.nms        = 0.3f;
                darknetParamet.bDraw      = 1;
                darknetParamet.drawColor  = OeipHelper.getColor(0.1f, 1.0f, 0.1f, 0.1f);
                Pipe.UpdateParametStruct(DarknetIndex, darknetParamet);
                //Grab cut扣像层
                GrabcutIndex             = pipe.AddLayer("grab cut", OeipLayerType.OEIP_GRABCUT_LAYER);
                grabcutParamet.bDrawSeed = 0;
                grabcutParamet.iterCount = 1;
                grabcutParamet.seedCount = 1000;
                grabcutParamet.count     = 250;
                grabcutParamet.gamma     = 90.0f;
                grabcutParamet.lambda    = 450.0f;
                grabcutParamet.rect      = new OeipRect();
                Pipe.UpdateParamet(GrabcutIndex, grabcutParamet);
                //GuiderFilter
                GuiderFilterIndex             = pipe.AddLayer("guider filter", OeipLayerType.OEIP_GUIDEDFILTER_LAYER);
                guidedFilterParamet.zoom      = 8;
                guidedFilterParamet.softness  = 5;
                guidedFilterParamet.eps       = 0.000001f;
                guidedFilterParamet.intensity = 0.2f;
                Pipe.UpdateParamet(GuiderFilterIndex, guidedFilterParamet);
                //输出第三个流,网络处理层流
                MattingOutIndex    = pipe.AddLayer("matting out put", OeipLayerType.OEIP_OUTPUT_LAYER);
                outputParamet.bGpu = 1;
                outputParamet.bCpu = 0;
                Pipe.UpdateParamet(MattingOutIndex, outputParamet);
            }
        }