GrapImg() public method

Allocate memory space and set SetCallBack
public GrapImg ( ) : void
return void
Exemplo n.º 1
0
        private void Form1_Load(object sender, EventArgs e)
        {
            capture = new Capture(cameraFilters.VideoInputDevices[0], cameraFilters.AudioInputDevices[0]);

            VideoCapabilities vc = capture.VideoCaps;
            capture.FrameSize = new Size(640, 480);
            capture.PreviewWindow = cam;

            var referenceCards = sql.GetCards();

            capture.FrameEvent2 += new Capture.HeFrame((Bitmap bitmap) => {
                var magicCards = Utilities.DetectCardArt(bitmap);
                foreach (var card in magicCards)
                {
                    camWindow.Image = bitmap;
                    image_output.Image = card.GetDrawnCorners();
                    cardArtImage.Image = card.CardArtBitmap;

                    var bestMatch = Utilities.MatchCard(card, referenceCards);
                    Graphics g = Graphics.FromImage(bitmap);
                    g.DrawString(bestMatch.name, new Font("Tahoma", 25), Brushes.Black, new PointF(card.Corners[0].X - 29, card.Corners[0].Y - 39));
                    g.DrawString(bestMatch.name, new Font("Tahoma", 25), Brushes.Yellow, new PointF(card.Corners[0].X - 30, card.Corners[0].Y - 40));
                    g.Dispose();
                    image_output.Image = bitmap;
                }
            });

            capture.GrapImg();
        }
Exemplo n.º 2
0
        void InitCapture(string Quality, int videoIndex)
        {
            try
            {
                if (capture != null)
                {
                    capture.Stop();
                    capture.Dispose();
                }

                capture = new DirectX.Capture.Capture(filters.VideoInputDevices[videoIndex], null);

                #region make these lines comment for using professional cam
                if (Quality == "High")
                {
                    capture.FrameSize = new System.Drawing.Size(capture.VideoCaps.MaxFrameSize.Width, capture.VideoCaps.MaxFrameSize.Height);
                }
                else if (Quality == "Low")
                {
                    capture.FrameSize = new System.Drawing.Size(160, 120);
                }
                capture.FrameRate = 30;
                #endregion

                double dblPicWidth = 0.0, dblPicHeight = 0.0;

                this.Dispatcher.BeginInvoke(DispatcherPriority.Background, new DispatcherOperationCallback(delegate(Object state)
                {
                    dblPicWidth  = picUserVideo.ActualWidth;
                    dblPicHeight = picUserVideo.ActualHeight;
                    return(null);
                }), null);

                Point pt = picUserVideo.TranslatePoint(new Point(), Application.Current.MainWindow);
                picUserVideo.Tag = pt;

                capture.PreviewWindow = cnvUserVideo;

                disptgetVideoWind.Start();   //timer of getting video window is start

                if (VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.BootStrap || VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.SuperNode || VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.NodeWithNetP2P)
                {
                    capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventNetP2P);
                }
                else
                {
                    capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventHttp);
                }
                capture.GrapImg();
            }
            catch (Exception ex)
            {
                VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "InitCapture", "ctlUserVideo.xaml.cs");
            }
        }
Exemplo n.º 3
0
        private void LoadCamera()
        {
            _capturer = new Capture(_cameraFilters.VideoInputDevices[0], _cameraFilters.AudioInputDevices[0])
            {
                FrameSize = new System.Drawing.Size(640, 480),
                PreviewWindow = _cam
            };
            _capturer.FrameEvent2 += CaptureDone;
            _capturer.GrapImg();

        }
Exemplo n.º 4
0
 void LblCaptureLinkClicked(object sender, LinkLabelLinkClickedEventArgs e)
 {
     if (!hasDevice ) return;
     if (cboSelectDevide.Items.Count==0) return;
     if (cboSelectDevide.SelectedIndex<0) return;
     if (isStarted)
     {
         if (captureDevice!=null)
         {
             if (captureDevice.Capturing) captureDevice.Stop();
             captureDevice.Dispose();
         }
         this.lblCaptureStart.Text = MSG_START;
         isStarted =false;
         return;
     }else
     {//start device
         captureDevice = new Capture(filters.VideoInputDevices[cboSelectDevide.SelectedIndex], null);
         captureDevice.FrameSize = new Size(640, 480);
         captureDevice.FrameRate = 30; //20frame/sec
         captureDevice.FrameEvent2+= new Capture.HeFrame(captureDevice_FrameEvent2);
         captureDevice.PreviewWindow = picPreview;
         //captureDevice.Cue();
         captureDevice.GrapImg();
         //captureDevice.Start();
         isStarted =true;
         this.lblCaptureStart.Text = MSG_STOP;
     }
 }
Exemplo n.º 5
0
        /// <summary>
        /// Uruchamia przechwytywanie obrazu za pomocą pierwszego dostępnego urządzenia.
        /// </summary>
        private void StartVideoCapture()
        {
            if (this.filters.VideoInputDevices.Count == 0)
                throw new Exception("Brak urządzeń do przechwytywania obrazu!");

            videoCapture = new Capture(filters.VideoInputDevices[0], null);
            videoCapture.FrameSize = new Size(320, 240);
            videoCapture.FrameRate = 30.0;
            videoCapture.PreviewWindow = videoWindow;
            videoCapture.FrameEvent2 += new Capture.HeFrame(CapturedFrame);
            videoCapture.GrapImg();

            // pauza pozwalająca na kompletne zainicjowanie kamery
            Thread.Sleep(1000);
        }
Exemplo n.º 6
0
        public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, 
            ImageFormat format, bool grayscale)
        {
            if (cameraInitialized)
                return;

            this.resolution = resolution;
            this.grayscale = grayscale;
            this.frameRate = framerate;
            this.videoDeviceID = videoDeviceID;
            this.format = format;

            switch (resolution)
            {
                case Resolution._160x120:
                    cameraWidth = 160;
                    cameraHeight = 120;
                    break;
                case Resolution._320x240:
                    cameraWidth = 320;
                    cameraHeight = 240;
                    break;
                case Resolution._640x480:
                    cameraWidth = 640;
                    cameraHeight = 480;
                    break;
                case Resolution._800x600:
                    cameraWidth = 800;
                    cameraHeight = 600;
                    break;
                case Resolution._1024x768:
                    cameraWidth = 1024;
                    cameraHeight = 768;
                    break;
                case Resolution._1280x1024:
                    cameraWidth = 1280;
                    cameraHeight = 1024;
                    break;
                case Resolution._1600x1200:
                    cameraWidth = 1600;
                    cameraHeight = 1200;
                    break;
            }

            Filters filters = null;
            Filter videoDevice, audioDevice = null;
            try
            {
                filters = new Filters();
            }
            catch (Exception exp)
            {
                throw new GoblinException("No video capturing devices are found");
            }

            try
            {
                videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null;
            }
            catch (Exception exp)
            {
                String suggestion = "Try the following device IDs:";
                for(int i = 0; i < filters.VideoInputDevices.Count; i++)
                {
                    suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", ";
                }
                throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. "
                    + suggestion);
            }

            selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name;
            
            capture = new DCapture(videoDevice, audioDevice);

            double frame_rate = 0;
            switch (frameRate)
            {
                case FrameRate._15Hz: frame_rate = 15; break;
                case FrameRate._30Hz: frame_rate = 30; break;
                case FrameRate._50Hz: frame_rate = 50; break;
                case FrameRate._60Hz: frame_rate = 60; break;
                case FrameRate._120Hz: frame_rate = 120; break;
                case FrameRate._240Hz: frame_rate = 240; break;
            }

            if (videoDevice != null)
            {
                // Using MPEG compressor
                //capture.VideoCompressor = filters.VideoCompressors[2]; 
                capture.FrameRate = frame_rate;
                try
                {
                    capture.FrameSize = new Size(cameraWidth, cameraHeight);
                }
                catch(Exception exp)
                {
                    throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight +
                        " is not supported for " + selectedVideoDeviceName + 
                        ". Maximum resolution supported is " + 
                        capture.VideoCaps.MaxFrameSize);
                }
            }

            if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight)
                throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight);

            tmpPanel = new Panel();
            tmpPanel.Size = new Size(cameraWidth, cameraHeight);

            try
            {
                capture.PreviewWindow = tmpPanel;
            }
            catch (Exception exp)
            {
                throw new GoblinException("Specified framerate or/and resolution is/are not supported " +
                    "for " + selectedVideoDeviceName);
            }

            capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone);
            capture.GrapImg();

            cameraInitialized = true;
        }
Exemplo n.º 7
0
        private void Form1_Load(object sender, EventArgs e)
        {
            cameraBitmap = new Bitmap(640, 480);
            capture = new Capture(cameraFilters.VideoInputDevices[0], cameraFilters.AudioInputDevices[0]);
            VideoCapabilities vc = capture.VideoCaps;
            capture.FrameSize = new Size(640, 480);
            capture.PreviewWindow = cam;
            capture.FrameEvent2 += new Capture.HeFrame(CaptureDone);
            capture.GrapImg();

            loadSourceCards();
        }
Exemplo n.º 8
0
        public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, 
            ImageFormat format, bool grayscale)
        {
            if (cameraInitialized)
                return;

            this.resolution = resolution;
            this.grayscale = grayscale;
            this.frameRate = framerate;
            this.videoDeviceID = videoDeviceID;
            this.format = format;

            switch (resolution)
            {
                case Resolution._160x120:
                    cameraWidth = 160;
                    cameraHeight = 120;
                    break;
                case Resolution._320x240:
                    cameraWidth = 320;
                    cameraHeight = 240;
                    break;
                case Resolution._640x480:
                    cameraWidth = 640;
                    cameraHeight = 480;
                    break;
                case Resolution._800x600:
                    cameraWidth = 800;
                    cameraHeight = 600;
                    break;
                case Resolution._1024x768:
                    cameraWidth = 1024;
                    cameraHeight = 768;
                    break;
                case Resolution._1280x1024:
                    cameraWidth = 1280;
                    cameraHeight = 1024;
                    break;
                case Resolution._1600x1200:
                    cameraWidth = 1600;
                    cameraHeight = 1200;
                    break;
            }

            Filters filters = null;
            Filter videoDevice, audioDevice = null;
            try
            {
                filters = new Filters();
            }
            catch (Exception exp)
            {
                throw new GoblinException("No video capturing devices are found");
            }

            try
            {
                videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null;
            }
            catch (Exception exp)
            {
                String suggestion = "Try the following device IDs:";
                for(int i = 0; i < filters.VideoInputDevices.Count; i++)
                {
                    suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", ";
                }
                throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. "
                    + suggestion);
            }

            selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name;
            
            capture = new DCapture(videoDevice, audioDevice);

            double frame_rate = 0;
            switch (frameRate)
            {
                case FrameRate._15Hz: frame_rate = 15; break;
                case FrameRate._30Hz: frame_rate = 30; break;
                case FrameRate._50Hz: frame_rate = 50; break;
                case FrameRate._60Hz: frame_rate = 60; break;
                case FrameRate._120Hz: frame_rate = 120; break;
                case FrameRate._240Hz: frame_rate = 240; break;
            }

            if (videoDevice != null)
            {
                // Using MPEG compressor
                //capture.VideoCompressor = filters.VideoCompressors[2]; 
                capture.FrameRate = frame_rate;
                try
                {
                    capture.FrameSize = new Size(cameraWidth, cameraHeight);
                }
                catch(Exception exp)
                {
                    throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight +
                        " is not supported for " + selectedVideoDeviceName + 
                        ". Maximum resolution supported is " + 
                        capture.VideoCaps.MaxFrameSize);
                }
            }

            if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight)
                throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight);

            tmpPanel = new Panel();
            tmpPanel.Size = new Size(cameraWidth, cameraHeight);

            try
            {
                capture.PreviewWindow = tmpPanel;
            }
            catch (Exception exp)
            {
                throw new GoblinException("Specified framerate or/and resolution is/are not supported " +
                    "for " + selectedVideoDeviceName);
            }

            capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone);
            capture.GrapImg();

            cameraInitialized = true;
        }
Exemplo n.º 9
0
        void InitCapture(string Quality, int videoIndex)
        {
            try
            {
                if (capture != null)
                {
                    capture.Stop();
                    capture.Dispose();
                }

                capture = new DirectX.Capture.Capture(filters.VideoInputDevices[videoIndex], null);
                
                #region make these lines comment for using professional cam
                if (Quality == "High")
                {
                    capture.FrameSize = new System.Drawing.Size(capture.VideoCaps.MaxFrameSize.Width, capture.VideoCaps.MaxFrameSize.Height);
                }
                else if (Quality == "Low")
                {
                    capture.FrameSize = new System.Drawing.Size(160, 120);
                }
                capture.FrameRate = 30;
                #endregion

                double dblPicWidth = 0.0, dblPicHeight = 0.0;
                
                this.Dispatcher.BeginInvoke(DispatcherPriority.Background, new DispatcherOperationCallback(delegate(Object state)
                {
                    dblPicWidth = picUserVideo.ActualWidth;
                    dblPicHeight = picUserVideo.ActualHeight;
                    return null;
                }), null);

                Point pt = picUserVideo.TranslatePoint(new Point(), Application.Current.MainWindow);
                picUserVideo.Tag = pt;

                capture.PreviewWindow = cnvUserVideo;
                
                disptgetVideoWind.Start();   //timer of getting video window is start

                if (VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.BootStrap || VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.SuperNode || VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.NodeWithNetP2P)
                {
                    capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventNetP2P);
                }
                else
                {
                    capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventHttp);
                }
                capture.GrapImg();
            }
            catch (Exception ex)
            {
                VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "InitCapture", "ctlUserVideo.xaml.cs");             
            }
        }