/// <summary> /// 设置Crossbar属性 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> /// <returns></returns> private void DisplayCrossbarProperty_Click(object sender, EventArgs e) { if (videoSource != null) { if (videoSource.CheckIfCrossbarAvailable()) { videoSource.DisplayCrossbarPropertyPage(this.Handle); } else { MessageBox.Show("Crossbar property page is not available."); } } }
/// <summary> /// 切换视频设备,初始化支持分辨率的列表 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> /// <returns></returns> private void cmbSource_SelectedIndexChanged(object sender, EventArgs e) { if (cmbSource.SelectedIndex >= 0) { videoSource = new VideoCaptureDevice(videoDevices[cmbSource.SelectedIndex].MonikerString); if (!videoSource.CheckIfCrossbarAvailable()) { this.button2.Enabled = false; } //摄像头的能力 VideoCapabilities[] vc = videoSource.VideoCapabilities; //通过VideoCapabilities可以得到全部能力,然后进行判断设置分辨率 this.cmbCapability.Items.Clear(); for (int i = 0; i < vc.Length; i++) { VideoCapabilitiesItem item = new VideoCapabilitiesItem(vc[i]); this.cmbCapability.Items.Add(item); } this.cmbCapability.SelectedIndex = 0; } }
public void Enable() { _processing = true; switch (Camobject.settings.sourceindex) { case 0: var jpegSource = new JPEGStream(Camobject.settings.videosourcestring); if (Camobject.settings.frameinterval != 0) { jpegSource.FrameInterval = Camobject.settings.frameinterval; } if (Camobject.settings.login != "") { jpegSource.Login = Camobject.settings.login; jpegSource.Password = Camobject.settings.password; } //jpegSource.SeparateConnectionGroup = true; jpegSource.RequestTimeout = iSpyServer.Default.IPCameraTimeout; OpenVideoSource(jpegSource, false); break; case 1: var mjpegSource = new MJPEGStream(Camobject.settings.videosourcestring) { Login = Camobject.settings.login, Password = Camobject.settings.password, RequestTimeout = iSpyServer.Default.IPCameraTimeout, HttpUserAgent = Camobject.settings.useragent }; //mjpegSource.SeparateConnectionGroup = true; OpenVideoSource(mjpegSource, false); break; case 2: //var fileSource = new AVIFileVideoSource(Camobject.settings.videosourcestring); //OpenVideoSource(fileSource, true); break; case 3: string moniker = Camobject.settings.videosourcestring; var videoSource = new VideoCaptureDevice(moniker); string[] wh = Camobject.resolution.Split('x'); var sz = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1])); var vc = videoSource.VideoCapabilities.Where(p => p.FrameSize == sz).ToList(); if (vc.Count > 0) { var vc2 = vc.FirstOrDefault(p => p.AverageFrameRate == Camobject.settings.framerate) ?? vc.FirstOrDefault(); videoSource.VideoResolution = vc2; } if (Camobject.settings.crossbarindex != -1 && videoSource.CheckIfCrossbarAvailable()) { var cbi = videoSource.AvailableCrossbarVideoInputs.FirstOrDefault( p => p.Index == Camobject.settings.crossbarindex); if (cbi != null) { videoSource.CrossbarVideoInput = cbi; } } OpenVideoSource(videoSource, true); break; case 4: Rectangle area = Rectangle.Empty; if (!String.IsNullOrEmpty(Camobject.settings.desktoparea)) { var i = Array.ConvertAll(Camobject.settings.desktoparea.Split(','), int.Parse); area = new Rectangle(i[0], i[1], i[2], i[3]); } var desktopSource = new DesktopStream(Convert.ToInt32(Camobject.settings.videosourcestring), area) { MousePointer = Camobject.settings.desktopmouse }; if (Camobject.settings.frameinterval != 0) { desktopSource.FrameInterval = Camobject.settings.frameinterval; } OpenVideoSource(desktopSource, false); break; case 5: var ks = new KinectStream(Nv("UniqueKinectId"), Convert.ToBoolean(Nv("KinectSkeleton"))); OpenVideoSource(ks, true); break; } if (Camera != null) { if (!Camera.IsRunning) { Camera.Start(); } Camobject.settings.active = true; if (File.Exists(Camobject.settings.maskimage)) { Camera.Mask = Image.FromFile(Camobject.settings.maskimage); } } _frameCount = 0; VideoSourceErrorState = false; VideoSourceErrorMessage = ""; Camobject.ftp.ready = true; MainForm.NeedsSync = true; Invalidate(); _lastRun = DateTime.Now.Ticks; _processing = false; }
public Form1() { InitializeComponent(); FilterInfoCollection col = new FilterInfoCollection(FilterCategory.VideoInputDevice); dev = new VideoCaptureDevice(col[1].MonikerString); dev.NewFrame += Dev_NewFrame1; if (dev.CheckIfCrossbarAvailable()) { dev.DisplayCrossbarPropertyPage(Handle); } int minVal; int maxVal; int defVal; int step; CameraControlFlags flag; dev.GetCameraPropertyRange(CameraControlProperty.Exposure, out minVal, out maxVal, out step, out defVal, out flag); trackBar1.Minimum = minVal; trackBar1.Maximum = maxVal; trackBar1.SmallChange = step; trackBar1.Value = defVal; dev.Start(); //uint handle = 10; //uint val2 = 11; //uint[] array_names = new uint[100]; //ArduCamCfg cfg = new ArduCamCfg(); //val2 = LibHelp.ArduCam_scan( array_names, 21195); //MessageBox.Show(val2.ToString()); //cfg.u32Width = 3664; //cfg.u32Height = 2748; //cfg.u16Vid = 0x52CB; //cfg.u32I2cAddr = 0x20; //cfg.emI2cMode = i2c_mode.I2C_MODE_16_16; //cfg.emImageFmtMode = format_mode.FORMAT_MODE_RAW; //cfg.u8PixelBytes = 1; //cfg.u8PixelBits = 8; //cfg.u32TransLvl = 0; //cfg.u32CameraType = 0; //val2 = 11; //val2 = LibHelp.ArduCam_open(ref handle, ref cfg, 0); //MessageBox.Show(val2 + "\n" + handle); //UInt32 conf_handle = 11; //String tmpStr = "MT9J001_10MP_8bit.cfg"; //val2 = LibHelp.ArduCamCfg_LoadCameraConfig(ref conf_handle, ref tmpStr); //MessageBox.Show(val2 + "/n" + conf_handle); //UInt32 register_val = 11; //val2 = LibHelp.ArduCam_readSensorReg(ref handle, 0x0204, ref register_val); //MessageBox.Show(val2 + "\n" + register_val); //ArduCamOutData dt = new ArduCamOutData(); ////dt.stImagePara = cfg; ////dt.pu8ImageData = new byte[cfg.u32Width * cfg.u32Height]; //unsafe //{ // byte[] arr = new byte[(int)(cfg.u32Height * cfg.u32Width * 10)]; // fixed (byte* point = arr) // { // dt.pu8ImageData = point; // ArduCamCfg[] cfg_arr = new ArduCamCfg[] { cfg }; // fixed (ArduCamCfg* cfg_point = cfg_arr) // { // dt.stImagePara = cfg_point; // val2 = LibHelp.ArduCam_readImage(ref handle, ref dt); // } // } //} //val2 = 11; //MessageBox.Show(val2 + ""); }