Пример #1
0
        public override void setup(CaptureDevice i_cap)
        {
            Device d3d = this.size(SCREEN_WIDTH, SCREEN_HEIGHT);
            INyARMarkerSystemConfig cf = new NyARMarkerSystemConfig(SCREEN_WIDTH, SCREEN_HEIGHT);

            d3d.RenderState.ZBufferEnable = true;
            d3d.RenderState.Lighting      = false;
            d3d.RenderState.CullMode      = Cull.CounterClockwise;
            this._ms = new NyARD3dMarkerSystem(cf);
            this._ss = new NyARSensor(cf.getScreenSize());
            this._rs = new NyARD3dRender(d3d, this._ms);
            this.mid = this._ms.addARMarker(AR_CODE_FILE, 16, 25, 80);
            //set View mmatrix
            this._rs.loadARViewMatrix(d3d);
            //set Viewport matrix
            this._rs.loadARViewPort(d3d);
            //setD3dProjectionMatrix
            this._rs.loadARProjectionMatrix(d3d);
            Bitmap src   = new Bitmap(TEST_IMAGE);
            Bitmap input = new Bitmap(src.Width, src.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb);

            using (Graphics g = Graphics.FromImage(input))
            {
                g.DrawImage(src, 0, 0);
            }
            this._ss.update(new NyARBitmapRaster(input));
        }
Пример #2
0
        private void Declare()
        {
            Action     = new MainMenu.Action(this);
            Button     = new ButtonItem(this);
            CreateMain = new Create(this);
            Keyboard   = new Keyboard(this);
            MainMenu   = new MainMenu.Menu(this);
            Mouse      = new Mouse(this);
            Nav        = new Navigation(this);
            Shutter    = new Shutter(this);
            SubAction  = new SubMenu.Action(this);
            SubMenu    = new SubMenu.Menu(this);

            SubSelectMenu = new SubSelectMenu.Menu(this);
            SubSelectVar  = new SubSelectMenu.Var();
            SubNav        = new SubSelectMenu.Navigation(this);

            Var         = new Variables();
            Data        = new Interaction();
            Fps         = new FrameCount();
            Gamepad     = new Gamepad(this);
            User        = new User();
            DrawGui     = new DrawGraph();
            Display     = new Display(this);
            VideoDevice = new CaptureDevice(this);
            Profiles    = new Profiles(this);

            RemapMenu    = new Remap.Remap(this);
            RemapGamepad = new Remap.Gamepad(this);
            RemapNav     = new Remap.Navigation(this);

            ConfigVr     = new VR.Config(this);
            ResizeVr     = new Resize(this);
            RepositionVr = new Reposition(this);
        }
Пример #3
0
        /* 非同期イベントハンドラ
         * CaptureDeviceからのイベントをハンドリングして、バッファとテクスチャを更新する。
         */
        public void OnBuffer(CaptureDevice i_sender, double i_sample_time, IntPtr i_buffer, int i_buffer_len)
        {
            int w = i_sender.video_width;
            int h = i_sender.video_height;
            int s = w * (i_sender.video_bit_count / 8);
            NyARDoubleMatrix44 nyar_transmat = this.__OnBuffer_nyar_transmat;

            //テクスチャにRGBを取り込み()
            lock (this)
            {
                //カメラ映像をARのバッファにコピー
                this._raster.setBuffer(i_buffer, i_buffer_len, i_sender.video_vertical_flip);

                //マーカーは見つかったかな?
                bool is_marker_enable = this._ar.detectMarkerLite(this._raster, 110);
                if (is_marker_enable)
                {
                    //あればMatrixを計算
                    this._ar.getTransmationMatrix(nyar_transmat);
                    NyARD3dUtil.toD3dCameraView(nyar_transmat, 1f, ref this._trans_mat);
                }
                this._is_marker_enable = is_marker_enable;
                //テクスチャ内容を更新
                this._surface.setRaster(this._raster);
            }
            return;
        }
Пример #4
0
 public void Dispose()
 {
     StopRecording();
     CaptureDevice.DataAvailable -= OnDataAvailable;
     CaptureDevice.Dispose();
     CaptureDevice = null;
 }
Пример #5
0
        private void setCaptureDevice()
        {
            // Set capture device
            try
            {
                FilterCollection filters = new FilterCollection(FilterCategory.VideoInputDevice);
                string           device  = filters[0].MonikerString;

                if (filters.Count == 0)
                {
                    throw new ApplicationException();
                }

                // create video source
                CaptureDevice localSource = new CaptureDevice();
                localSource.VideoSource = device;

                // open it
                OpenVideoSource(localSource);
            }
            catch (ApplicationException e)
            {
                MessageBox.Show("No video source available!");
                Console.WriteLine(e.Message);
            }
        }
Пример #6
0
 void StopRecording()
 {
     _writer?.Close();
     _writer?.Dispose();
     _writer = null;
     CaptureDevice.StopRecording();
 }
Пример #7
0
 private void startButton_Click(object sender, EventArgs e)
 {
     exitToolStripMenuItem.Enabled = false;
     startButton.Enabled           = false;
     camera = new Camera();
     camera.CameraStopped += new EventHandler(HandleCameraStopped);
     if (CaptureDevice.GetDeviceNames().Count == 0)
     {
         MessageBox.Show("Please connect a camera!");
         startButton.Enabled           = true;
         exitToolStripMenuItem.Enabled = true;
         return;
     }
     camera.DeviceName  = CaptureDevice.GetDeviceNames()[0];
     camera.ImageWidth  = width;
     camera.ImageHeight = height;
     camera.FrameRate   = frameRate;
     camera.Start();
     cameraViewControl.SetCamera(camera);
     cameraViewControl.Start();
     motionDetector = new GaussianExponentialAveraging();
     motionDetectionControl.SetMotionDetector(motionDetector);
     motionDetector.SetCamera(camera);
     motionDetector.Start();
     motionDetectionControl.Start();
     mainTabControl.Enabled = true;
     stopButton.Enabled     = true;
     previousWidth          = this.Width;
     previousHeight         = this.Height;
     //   saveThread = new Thread(new ThreadStart(() => SaveLoop()));
     //   saveThread.Start();
 }
Пример #8
0
        public MainWindow()
        {
            InitializeComponent();

            CaptureDeviceList oCaptureDeviceList = new CaptureDeviceList();

            MyCaptureDevice = oCaptureDeviceList[0];
            MyCaptureDevice.SetCaptureListener(this);
            MyCaptureDevice.PrepareCapture(800, 600, 30.0f);

            INyARMarkerSystemConfig oMarkerSystemConfig = new NyARMarkerSystemConfig(800, 600);

            this.ARMarkerSystem = new NyARD3dMarkerSystem(oMarkerSystemConfig);
            this.ARMarkerSystem.ThresholdValue = Threshold;
            this.ARCameraSensor = new NyARDirectShowCamera(MyCaptureDevice);

            //public int addARMarker(INyARRgbRaster i_raster, int i_patt_resolution, int i_patt_edge_percentage, double i_marker_size)
            this.MarkerID_Hiro  = this.ARMarkerSystem.addARMarker(AR_CODE_FILE1, 16, 25, 80);
            this.MarkerID_KanJi = this.ARMarkerSystem.addARMarker(AR_CODE_FILE2, 16, 25, 80);
            this.MarkerID_VTT   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE3, 16, 25, 80);
            this.MarkerID_ABB   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE4, 16, 25, 80);
            this.Marker_Hello   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE5, 16, 25, 80);

            this.Loaded += MainWindow_Loaded;

            this.BtnPauseZm.Click += BtnPauseZm_Click;
            this.BtnStartZm.Click += BtnStartZm_Click;
            this.BtnClearZm.Click += BtnClearZm_Click;
        }
Пример #9
0
        private void Form1_Load(object sender, EventArgs e)
        {
            if (string.IsNullOrEmpty(this.IDCardNo))
            {
                this.IDCardNo = "370921198606073310";
            }

            try
            {
                FilterCollection filters = new FilterCollection(FilterCategory.VideoInputDevice);
                if (filters.Count != 0)
                {
                    CaptureDevice source = new CaptureDevice
                    {
                        VideoSource = filters[cameraId].MonikerString
                    };
                    this.OpenVideoSource(source);
                }
            }
            catch (ApplicationException exception)
            {
                this.btnSave.Enabled  = false;
                this.btnShoot.Enabled = false;
                MessageBox.Show(exception.Message);
            }
            catch (Exception)
            {
                this.btnSave.Enabled  = false;
                this.btnShoot.Enabled = false;
            }
        }
Пример #10
0
        public MainWindow()
        {
            InitializeComponent();

            CaptureDeviceList oCaptureDeviceList = new CaptureDeviceList();

            MyCaptureDevice = oCaptureDeviceList[0];
            MyCaptureDevice.SetCaptureListener(this);
            MyCaptureDevice.PrepareCapture(800, 600, 30.0f);

            INyARMarkerSystemConfig oMarkerSystemConfig = new NyARMarkerSystemConfig(800, 600);

            this.ARMarkerSystem = new NyARD3dMarkerSystem(oMarkerSystemConfig);
            this.ARCameraSensor = new NyARDirectShowCamera(MyCaptureDevice);

            this.MarkerID_Hiro  = this.ARMarkerSystem.addARMarker(AR_CODE_FILE1, 16, 25, 80);
            this.MarkerID_KanJi = this.ARMarkerSystem.addARMarker(AR_CODE_FILE2, 16, 25, 80);
            this.MarkerID_VTT   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE3, 16, 25, 80);
            this.MarkerID_ABB   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE4, 16, 25, 80);
            this.Marker_Hello   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE5, 16, 25, 80);

            this.Loaded += MainWindow_Loaded;

            this.BtnPauseZm.Click += BtnPauseZm_Click;
            this.BtnStartZm.Click += BtnStartZm_Click;
            this.BtnClearZm.Click += BtnClearZm_Click;
        }
Пример #11
0
        public void Init(string baseUri)
        {
            items = new List <Item>();

            CaptureDevice        defaultDevice = WasapiLoopbackCapture2.GetDefaultLoopbackCaptureDevice();
            List <CaptureDevice> devices       = WasapiLoopbackCapture2.GetLoopbackCaptureDevices();

            int itemId = 0;

            items.Add(new Item {
                Uri        = baseUri,
                Definition = DidlUtil.GenerateCaptureDeviceItem(++itemId, defaultDevice, baseUri)
            });

            int deviceId = 0;

            foreach (CaptureDevice captureDevice in devices)
            {
                if (captureDevice != defaultDevice)
                {
                    string uri = baseUri + "?id=" + deviceId;
                    items.Add(new Item {
                        Uri        = uri,
                        Definition = DidlUtil.GenerateCaptureDeviceItem(++itemId, captureDevice, uri)
                    });
                }
                deviceId++;
            }
        }
Пример #12
0
        public void StartCamera(string cameraID)
        {
            try
            {
                BoothState   = BoothStates.WaitingForCamera;
                _latestFrame = null;
                pictureBoxDisplay.Invalidate();

                StopCamera();

                CameraDeviceID = cameraID;

                // create video source
                CaptureDevice localSource = new CaptureDevice();
                localSource.VideoSource = CameraDeviceID;

                // open it
                OpenVideoSource(localSource);
            }
            catch (Exception ex)
            {
                Setup.Log("StartCamera Exception", ex);
                Setup.LogStat(StatTypes.Error, "StartCamera Exception");
                MessageBox.Show(this, "Could not start camera!  Please send us your Log.txt file for troubleshooting", "Error", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }
        }
Пример #13
0
        public override void setup(CaptureDevice i_cap)
        {
            Device d3d = this.size(SCREEN_WIDTH, SCREEN_HEIGHT);

            i_cap.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30.0f);
            INyARMarkerSystemConfig cf = new NyARMarkerSystemConfig(SCREEN_WIDTH, SCREEN_HEIGHT);

            d3d.RenderState.ZBufferEnable = true;
            d3d.RenderState.Lighting      = false;
            d3d.RenderState.CullMode      = Cull.CounterClockwise;
            this._ms = new NyARD3dMarkerSystem(cf);
            //recommended be NyARBufferType.BYTE1D_B8G8R8X8_32 or NyARBufferType.CS_BITMAP
            this._ss = new NyARDirectShowCamera(i_cap, NyARBufferType.OBJECT_CS_Bitmap);
            this._rs = new NyARD3dRender(d3d, this._ms);
            this.mid = this._ms.addARMarker(AR_CODE_FILE, 16, 25, 80);

            //set View mmatrix
            this._rs.loadARViewMatrix(d3d);
            //set Viewport matrix
            this._rs.loadARViewPort(d3d);
            //setD3dProjectionMatrix
            this._rs.loadARProjectionMatrix(d3d);
            this._ss.start();
            //should be NyARBufferType.BYTE1D_B8G8R8X8_32 or NyARBufferType.CS_BITMAP
            this._raster = new NyARBitmapRaster(64, 64, NyARBufferType.OBJECT_CS_Bitmap);
        }
Пример #14
0
        async void Device_IncomingConnectionArrived(object sender, IncomingConnectionEventArgs e)
        {
            e.Accept();
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, (() =>
            {
                OutgoingCall.Visibility = Visibility.Collapsed;
                OutgoingCall.Stop();
                RemoteVideo.Visibility = Visibility.Visible;
            }));


            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, (() =>
            {
                activated = true;
                var remoteAddress = e.RemoteUrl;

                btnEndConsult.IsEnabled = true;
                Interlocked.Exchange(ref isTerminator, 0);

                if (!((bool)roleIsActive))
                {
                    // Passive client
                    RemoteVideo.Source = new Uri(remoteAddress);
                    device = new CaptureDevice();
                    HostNameTextbox.IsEnabled = btnInitConsult.IsEnabled = false;
                }

                remoteAddress = remoteAddress.Replace("stsp://", "");
            }));
        }
Пример #15
0
        void CaptureListener.OnBuffer(CaptureDevice oCaptureDevice, double i_sample_time, IntPtr i_buffer,
                                      int i_buffer_len)
        {
            Dispatcher.Invoke(new Action(delegate()
            {
                TransformedBitmap b = new TransformedBitmap();
                b.BeginInit();
                b.Source = BitmapSource.Create(oCaptureDevice.video_width, oCaptureDevice.video_height, 96.0, 96.0,
                                               PixelFormats.Bgr32, BitmapPalettes.WebPalette, i_buffer,
                                               i_buffer_len, oCaptureDevice.video_width * (oCaptureDevice.video_bit_count / 8));
                b.SetValue(TransformedBitmap.TransformProperty, new ScaleTransform(-1, -1));
                b.EndInit();
                this.ImgCameraZm.SetValue(Image.SourceProperty, b);

                this.ARMarkerSystem.update(this.ARCameraSensor);
                this.CvMainZm.Children.Clear();

                if (this.ARMarkerSystem.isExistMarker(this.MarkerID_VTT))
                {
                    this.DrawARDetectInfo(this.MarkerID_VTT, "VTT");
                }
                if (this.ARMarkerSystem.isExistMarker(this.MarkerID_ABB))
                {
                    this.DrawARDetectInfo(this.MarkerID_ABB, "ABB");
                }
                if (this.ARMarkerSystem.isExistMarker(this.Marker_Hello))
                {
                    this.DrawARDetectInfo(this.Marker_Hello, "Hello");
                }
            }));
        }
Пример #16
0
        private void VisionCapture_Load(object sender, EventArgs e)
        {
            if (!Program.Started)
            {
                return;
            }

            try
            {
                FilterCollection filters = new FilterCollection(FilterCategory.VideoInputDevice);

                // create video source
                CaptureDevice localSource = new CaptureDevice()
                {
                    VideoSource = filters[0].MonikerString
                };

                // open it
                OpenVideoSource(localSource);
            }
            catch (ApplicationException ex)
            {
                // TODO:  Implement in UIDebugger
                throw new TeaException(ex.Message, ex);
            }

            // Do UI Stuff
            _offset = new Point(11, 58);

            CenterMousePosition();
            this.ParentForm.KeyDown += Form_KeyDown;

            // Create EventHandler
            Program.Engine.ExecutiveFunctions.ContextRecognition += ExecutiveFunctions_ContextRecognition;
        }
Пример #17
0
 public void StartCapturing()
 {
     try
     {
         if (CaptureDevice.Equals(""))
         {
             _capture = new Capture();
         }
         else
         {
             if (capturingType.Equals(CaptureType.Local))
             {
                 _capture = new Capture(Convert.ToInt32(CaptureDevice));
             }
             else if (capturingType.Equals(CaptureType.IP))
             {
                 _capture = new Capture(CaptureDevice);
             }
         }
         _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FPS, 40);
         _capture.ImageGrabbed += ProcessFrame;
         _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, 720);
         _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, 1280);
         _capture.Start();
         IsCapturing = true;
     }
     catch (Exception) { }
 }
Пример #18
0
        private void Form1_Load(object sender, EventArgs e)
        {
            try
            {
                if (SavePath == "")
                {
                    SavePath = ConfigurationManager.AppSettings["PhotosPath"] == null ? @"D:\QCSoft\photos\" :
                               ConfigurationManager.AppSettings["PhotosPath"].ToString();
                }

                FilterCollection filters = new FilterCollection(FilterCategory.VideoInputDevice);

                if (filters.Count != 0)
                {
                    CaptureDevice source = new CaptureDevice
                    {
                        VideoSource = filters[cameraId].MonikerString
                    };

                    this.OpenVideoSource(source);
                }
            }
            catch (ApplicationException exception)
            {
                this.btnSave.Enabled  = false;
                this.btnShoot.Enabled = false;
                MessageBox.Show(exception.Message);
            }
            catch (Exception ex)
            {
                this.btnSave.Enabled  = false;
                this.btnShoot.Enabled = false;
                MessageBox.Show(ex.Message);
            }
        }
Пример #19
0
        public MainWindow()
        {
            InitializeComponent();

            CaptureDeviceList cl = new CaptureDeviceList();

            m_cap = cl[0];
            m_cap.SetCaptureListener(this);
            m_cap.PrepareCapture(cameraResX, cameraResY, 30); // 800x600 resolution, 30 fps

            NyARParam ap = new NyARParam();

            ap.loadARParamFromFile(AR_CAMERA_FILE);
            ap.changeScreenSize(cameraResX, cameraResY);

            this.m_raster = new NyARRgbRaster(m_cap.video_width, m_cap.video_height, NyARBufferType.BYTE1D_R8G8B8_24,
                                              false);

            NyARCode code = new NyARCode(16, 16); // 16 pixels to detect within black box

            code.loadARPattFromFile(AR_CODE_FILE);
            this.m_ar = new NyARDetectMarker(ap, new NyARCode[] { code }, new double[] { 80.0 }, 1,
                                             NyARBufferType.BYTE1D_B8G8R8_24);
            this.m_ar.setContinueMode(false);

            this.Loaded += MainWindow_Loaded;
        }
Пример #20
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //ARの設定

            //ARラスタを作る(DirectShowキャプチャ仕様)。
            this._raster = new DsBGRX32Raster(i_cap_device.video_width, i_cap_device.video_height, i_cap_device.video_width * i_cap_device.video_bit_count / 8);



            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);

            // ライトを無効
            this._device.RenderState.Lighting = false;

            //背景サーフェイスを作成
            this._surface = new NyARSurface_XRGB32(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            return(true);
        }
Пример #21
0
        public override void setup(CaptureDevice i_cap)
        {
            Device d3d = this.size(SCREEN_WIDTH, SCREEN_HEIGHT);

            i_cap.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30.0f);
            INyARMarkerSystemConfig cf = new NyARMarkerSystemConfig(SCREEN_WIDTH, SCREEN_HEIGHT);

            d3d.RenderState.ZBufferEnable = true;
            d3d.RenderState.Lighting      = false;
            d3d.RenderState.CullMode      = Cull.CounterClockwise;
            this._ms = new NyARD3dMarkerSystem(cf);
            this._ss = new NyARDirectShowCamera(i_cap);
            this._rs = new NyARD3dRender(d3d, this._ms);
            this.mid = this._ms.addARMarker(AR_CODE_FILE, 16, 25, 80);
//            this.mid = this._ms.addPsARPlayCard(1,80);
//            this.mid = this._ms.addARMarker(new Bitmap("../../../../../data/hiro.png"), 16, 25, 80); // you can use PNG style marker too.
            //this.mid = this._ms.addNyIdMarker(0, 80);// For Id  marker #0

            //set View mmatrix
            this._rs.loadARViewMatrix(d3d);
            //set Viewport matrix
            this._rs.loadARViewPort(d3d);
            //setD3dProjectionMatrix
            this._rs.loadARProjectionMatrix(d3d);
            this._ss.start();
        }
Пример #22
0
        public void Start()
        {
            var fileName = Path.Combine(OutPutFolder, DateTime.Now.ToString("yyyyMMddHHmmssfff") + ".wav");

            _writer = new WaveFileWriter(fileName, CaptureDevice.WaveFormat);
            CaptureDevice.StartRecording();
        }
Пример #23
0
        public CameraPnl1()
        {
            InitializeComponent();

            this.CameraWidth  = this.Width;
            this.CameraHeight = this.Height;

            CaptureDeviceList oCaptureDeviceList = new CaptureDeviceList();

            MyCaptureDevice = oCaptureDeviceList[0];
            MyCaptureDevice.SetCaptureListener(this);
            MyCaptureDevice.PrepareCapture((int)this.CameraWidth, (int)this.CameraHeight, 30.0f);

            INyARMarkerSystemConfig oMarkerSystemConfig = new NyARMarkerSystemConfig((int)this.CameraWidth, (int)this.CameraHeight);

            this.ARMarkerSystem = new NyARD3dMarkerSystem(oMarkerSystemConfig);
            this.ARCameraSensor = new NyARDirectShowCamera(MyCaptureDevice);

            this.MarkerID_Hiro  = this.ARMarkerSystem.addARMarker(AR_CODE_FILE1, 16, 25, 80);
            this.MarkerID_KanJi = this.ARMarkerSystem.addARMarker(AR_CODE_FILE2, 16, 25, 80);
            this.MarkerID_VTT   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE3, 16, 25, 80);
            this.MarkerID_ABB   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE4, 16, 25, 80);
            this.Marker_Hello   = this.ARMarkerSystem.addARMarker(AR_CODE_FILE5, 16, 25, 80);

            this.Loaded += CameraPnl1_Loaded;
        }
Пример #24
0
        public void wake()
        {
            isAsleep       = false;
            btn_sleep.Text = "Sleep";
            displayLine("woken by server");
            if (wakeUp != null)
            {
                if (wakeUp.IsAlive)
                {
                    wakeUp.Abort();
                }
            }
            CaptureDeviceForm form = new CaptureDeviceForm();

            if (form.ShowDialog(this) == DialogResult.OK)
            {
                // create video source
                CaptureDevice localSource = new CaptureDevice();
                localSource.VideoSource = form.Device;

                // open it
                OpenVideoSource(localSource);

                // enable camera window
                cameraWindow.Enabled = true;
            }
        }
Пример #25
0
        /// <summary>
        /// Главный метод класса
        /// Запуск формы получения картинки с камеры.
        /// Возвращает true если снимок сделан и сохранен
        /// </summary>
        public bool StartVideo()
        {
            if (CaptureDevice == null || !IsCameraConfigured)
            {
                MessageBox.Show(@"Камера не инициализирована. Запуск не возможен");
                return(false);
            }

            if (!CaptureDevice.IsRunning)
            {
                CaptureDevice.Start();
            }

            _webCamForm = new WebCamForm(this);
            var dlgResult = _webCamForm.ShowDialog();
            var result    = dlgResult == DialogResult.OK;

            StopVideo();

            // Получаем выбранное изображение с формы
            if (result)
            {
                CameraFrame = _webCamForm.GetBitmap();
            }

            return(result);
        }
Пример #26
0
        public Form1()
        {
            InitializeComponent();
            //ARの設定
            //AR用カメラパラメタファイルをロード
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), 320, 240);

            //AR用のパターンコードを読み出し
            NyARCode code = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE), 16, 16);

            NyARDoubleMatrix44 result_mat = new NyARDoubleMatrix44();
            //計算モードの設定
            //キャプチャを作る

            /**************************************************
            *  このコードは、0番目(一番初めに見つかったキャプチャデバイス)
            *  を使用するようにされています。
            *  複数のキャプチャデバイスを持つシステムの場合、うまく動作しないかもしれません。
            *  n番目のデバイスを使いたいときには、CaptureDevice cap=cl[0];←ここの0を変えてください。
            *  手動で選択させる方法は、SimpleLiteDirect3Dを参考にしてください。
            **************************************************/
            CaptureDeviceList cl  = new CaptureDeviceList();
            CaptureDevice     cap = cl[0];

            cap.SetCaptureListener(this);
            cap.PrepareCapture(320, 240, 30);
            this.m_cap = cap;
            //ラスタを作る。
            this.m_raster = new DsRgbRaster(cap.video_width, cap.video_height);
            //1パターンのみを追跡するクラスを作成
            this.m_ar = NyARSingleDetectMarker.createInstance(ap, code, 80.0);
            this.m_ar.setContinueMode(false);
        }
Пример #27
0
 public unsafe bool startRecording()
 {
     if (captureDevice == null)
     {
         hasRecording  = true;
         captureDevice = SoundPluginInterface.Instance.SoundManager.openCaptureDevice();
         if (captureDevice.Valid)
         {
             writeStream = File.Open(tempFilePath, FileMode.Create, FileAccess.Write);
             unsafe
             {
                 captureDevice.start((byte *buffer, int length) =>
                 {
                     byte[] byteBuffer = new byte[length];
                     for (int i = 0; i < length; ++i)
                     {
                         byteBuffer[i] = buffer[i];
                     }
                     writeStream.Write(byteBuffer, 0, length);
                 });
             }
             return(true);
         }
         else
         {
             captureDevice.Dispose();
             captureDevice = null;
             return(false);
         }
     }
     return(false);
 }
Пример #28
0
        private void cbDevices_SelectedIndexChanged(object sender, EventArgs e)
        {
            mSelectedDevice    = mDevices[cbDevices.SelectedIndex];
            mDeviceResolutions = QuickCam.GetAvailableResolutions(mSelectedDevice);

            cbResolutions.Items.Clear();
            cbResolutions.Items.AddRange(mDeviceResolutions);
        }
Пример #29
0
        public bool InitializeApplication(Form1 topLevelForm, CaptureDevice i_cap_device)
        {
            topLevelForm.ClientSize = new Size(SCREEN_WIDTH, SCREEN_HEIGHT);
            //キャプチャを作る(QVGAでフレームレートは30)
            i_cap_device.SetCaptureListener(this);
            i_cap_device.PrepareCapture(SCREEN_WIDTH, SCREEN_HEIGHT, 30);
            this._cap = i_cap_device;

            //ARラスタを作る(DirectShowキャプチャ仕様)。
            this._raster = new DsRgbRaster(i_cap_device.video_width, i_cap_device.video_height);

            //AR用カメラパラメタファイルをロードして設定
            NyARParam ap = NyARParam.loadFromARParamFile(File.OpenRead(AR_CAMERA_FILE), SCREEN_WIDTH, SCREEN_HEIGHT);

            //Direct3d用のユーティリティ準備

            //プロセッサの準備
            this._processor = new MarkerProcessor(ap, this._raster.getBufferType());
            NyARCode[] codes = new NyARCode[2];
            codes[0] = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE1), 16, 16);
            codes[1] = NyARCode.loadFromARPattFile(File.OpenRead(AR_CODE_FILE2), 16, 16);
            this._processor.setARCodeTable(codes, 16, 80.0);


            //3dデバイスを準備する
            this._device = PrepareD3dDevice(topLevelForm);
            this._device.RenderState.ZBufferEnable = true;
            this._device.RenderState.Lighting      = false;
            this._device.RenderState.CullMode      = Cull.CounterClockwise;

            Viewport vp = new Viewport();

            vp.X      = 0;
            vp.Y      = 0;
            vp.Height = ap.getScreenSize().h;
            vp.Width  = ap.getScreenSize().w;
            vp.MaxZ   = 1.0f;
            //ビューポート設定
            this._device.Viewport = vp;

            this._text = new TextPanel(this._device, 1);
            //カメラProjectionの設定
            Matrix tmp = new Matrix();

            NyARD3dUtil.toCameraFrustumRH(ap.getPerspectiveProjectionMatrix(), ap.getScreenSize(), 1, 10, 10000, ref tmp);
            this._device.Transform.Projection = tmp;

            // ビュー変換の設定(左手座標系ビュー行列で設定する)
            // 0,0,0から、Z+方向を向いて、上方向がY軸
            this._device.Transform.View = Matrix.LookAtLH(
                new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 1.0f), new Vector3(0.0f, 1.0f, 0.0f));

            //背景サーフェイスを作成
            this._surface = new NyARD3dSurface(this._device, SCREEN_WIDTH, SCREEN_HEIGHT);

            return(true);
        }
Пример #30
0
        static void Main()
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);


            //キャプチャデバイスリストを取得
            CaptureDeviceList capture_device_list = new CaptureDeviceList();

            if (capture_device_list.count < 1)
            {
                MessageBox.Show("キャプチャデバイスが見つかりませんでした。");
                return;
            }
            //キャプチャデバイスを選択してもらう。
            int cdevice_number = 0;

            using (Form2 frm2 = new Form2())
            {
                frm2.ShowDialog(capture_device_list, out cdevice_number);
            }
            using (CaptureDevice capture_device = capture_device_list[cdevice_number])
            {
                // フォームとメインサンプルクラスを作成
                using (Form1 frm = new Form1())
                    using (ABGRAcapture sample = new ABGRAcapture())
                    {
                        frm.ref_cap = sample;
                        // アプリケーションの初期化
                        if (sample.InitializeApplication(frm, capture_device))
                        {
                            // メインフォームを表示
                            frm.Show();
                            //キャプチャ開始
                            sample.StartCap();
                            // フォームが作成されている間はループし続ける
                            while (frm.Created)
                            {
                                // メインループ処理を行う
                                sample.MainLoop();

                                //スレッドスイッチ
                                Thread.Sleep(1);

                                // イベントがある場合はその処理する
                                Application.DoEvents();
                            }
                            //キャプチャの停止
                            sample.StopCap();
                        }
                        else
                        {
                            // 初期化に失敗
                        }
                    }
            }
        }
        protected async override void OnNavigatingFrom(NavigatingCancelEventArgs e)
        {
            base.OnNavigatingFrom(e);

            if (activated)
            {
                RemoteVideo.Stop();
                RemoteVideo.Source = null;
            }

            await device.CleanUpAsync();
            device = null;
        }
Пример #32
0
        // Create video source object
        public IVideoSource CreateVideoSource(object config)
        {
            LocalConfiguration cfg = (LocalConfiguration) config;

            if (cfg != null)
            {
                CaptureDevice source = new CaptureDevice();

                source.VideoSource	= cfg.source;

                return (IVideoSource) source;
            }
            return null;
        }
Пример #33
0
 private static void EmitWarnings(CaptureDevice requestedDevice)
 {
     if (EnvironmentService.IsUnix)
     {
         if (requestedDevice == CaptureDevice.Usb)
         {
             Log.Warn("You are in Unix but aren't requesting a Pi camera? Whatever you say boss...");
         }
         if (CvInvokeRaspiCamCV.CVLibrary.Contains("opencv"))
         {
             Log.Warn("You are in Unix but trying to bind to opencv libraries - not raspicamcv");
         }
     }
 }
        /// <summary>
        /// Invoked when this page is about to be displayed in a Frame.
        /// </summary>
        /// <param name="e">Event data that describes how this page was reached.  The Parameter
        /// property is typically used to configure the page.</param>
        protected async override void OnNavigatedTo(NavigationEventArgs e)
        {
            var cameraFound = await CaptureDevice.CheckForRecordingDeviceAsync();

            if (cameraFound)
            {
                device = new CaptureDevice();
                await InitializeAsync();
                device.IncomingConnectionArrived += device_IncomingConnectionArrived;
                device.CaptureFailed += device_CaptureFailed;
                RemoteVideo.MediaFailed += RemoteVideo_MediaFailed;
            }
            else
            {
                rootPage.NotifyUser("A machine with a camera and a microphone is required to run this sample.", NotifyType.ErrorMessage);
            }
        }
Пример #35
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <returns></returns>
#endif
        public CvCapture(CaptureDevice device)
            : this((int)device)
        {
        }
Пример #36
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param>
        /// <returns></returns>
#endif
        public static CvCapture FromCamera(CaptureDevice device, int index)
        {
            return new CvCapture((int)device + index);
        }
Пример #37
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <returns></returns>
#endif
        public static CvCapture FromCamera(CaptureDevice device)
        {
            return new CvCapture(device);
        }
Пример #38
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param>
        /// <returns></returns>
#endif
        public CvCapture(CaptureDevice device, int index)
            : this((int)device + index)
        {
        }
Пример #39
0
        protected void Dispose(bool disposing)
        {
            if (this.disposed)
                return;

            OpenAL.DebugFormat ("Freeing OpenALCaptureProvider. Disposing: {0}", disposing);

            if (disposing)
            {
                if (IsCapturing)
                    EndCapture();

                if (this.device != null)
                    this.device.Dispose();
            }

            this.device = null;
            this.disposed = true;
        }
Пример #40
0
 // Constructor
 public Grabber(CaptureDevice parent)
 {
     this.parent = parent;
 }
Пример #41
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param>
        /// <returns></returns>
#endif
        public void Open(CaptureDevice device, int index)
        {
            Open((int)device + index);
        }
Пример #42
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <returns></returns>
#endif
        public void Open(CaptureDevice device)
        {
            Open((int)device);
        }
Пример #43
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <returns></returns>
#endif
        public static VideoCapture FromCamera(CaptureDevice device)
        {
            return new VideoCapture(device);
        }
Пример #44
0
        /// <summary>
        /// カメラからのビデオキャプチャを初期化する.
        /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). 
        /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). 
        /// </summary>
        /// <param name="device">使われるカメラの種類</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. 
        /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). 
        /// </summary>
        /// <param name="device">Device type</param>
        /// <returns></returns>
#endif
        public VideoCapture(CaptureDevice device)
            : this((int)device)
        {
        }
        /// <summary>
        /// Initializes the scenario
        /// </summary>
        /// <returns></returns>
        private async Task InitializeAsync(CancellationToken cancel = default(CancellationToken))
        {
            var streamFilteringCriteria = new
            {
                //AspectRatio = 1.333333333333333,
                HorizontalResolution = (uint)480,
                SubType = "YUY2"
            };
            currentState = State.Initializing;
            device = new CaptureDevice();

            CameraPreview.Visibility = Visibility.Collapsed;
            PreviewPoster.Visibility = Visibility.Visible;
            Preview.Content = "Start Preview";
            LoopbackClient.IsEnabled = false;

            mode = defaultMode;
            LatencyModeToggle.IsOn = (mode == LatencyMode.LowLatency);
            LatencyModeToggle.IsEnabled = false;

            await device.InitializeAsync();
            var setting = await device.SelectPreferredCameraStreamSettingAsync(MediaStreamType.VideoPreview, ((x) =>
            {
                var previewStreamEncodingProperty = x as Windows.Media.MediaProperties.VideoEncodingProperties;

                return (previewStreamEncodingProperty.Width >= streamFilteringCriteria.HorizontalResolution &&
                    previewStreamEncodingProperty.Subtype == streamFilteringCriteria.SubType);
            }));

            previewEncodingProperties = setting as VideoEncodingProperties;

            PreviewSetupCompleted();
        }