// Open video source private void OpenVideoSource(IVideoSource source) { // set busy cursor this.Cursor = Cursors.WaitCursor; // close previous file CloseFile( ); // enable/disable motion alarm if (detector != null) { detector.MotionLevelCalculation = motionAlarmItem.Checked; } // create camera Camera camera = new Camera(source, detector); // start camera camera.Start( ); // attach camera to camera window cameraWindow.Camera = camera; // reset statistics statIndex = statReady = 0; // set event handlers camera.NewFrame += new EventHandler(camera_NewFrame); camera.Alarm += new EventHandler(camera_Alarm); // start timer timer.Start( ); this.Cursor = Cursors.Default; }
private void StartCamera() { var rectangle = new Rectangle(); foreach (var screen in System.Windows.Forms.Screen.AllScreens) { rectangle = Rectangle.Union(rectangle, screen.Bounds); } videoSource = new ScreenCaptureStream(rectangle); videoSource.NewFrame += capturedWebCamNewFrame; videoSource.Start(); /* * if (CurrentDevice != null) * { * videoSource = new VideoCaptureDevice(CurrentDevice.MonikerString); * videoSource.NewFrame += capturedWebCamNewFrame; * videoSource.Start(); * } * else * { * MessageBox.Show("Current device can't be null"); * } */ }
// Open video source private bool OpenVideoSource(IVideoSource source) { try { // close previous file CloseFile(); if (!checkCamPassword(source)) { toolStripStatusLabel3.Text = "Invalid Username Or Password."; return(false); } // set busy cursor this.Cursor = Cursors.WaitCursor; // enable/disable motion alarm if (detector != null) { detector.MotionLevelCalculation = true; } // create camera if (EnableMotionDetector) { camera = new Camera(source, detector); } else { camera = new Camera(source); } //camera.Width = 640; //camera.Height = 460; // start camera camera.Start(); // attach camera to camera window cameraWindow.Camera = camera; // set event handlers camera.NewFrame += new EventHandler(camera_NewFrame); camera.Alarm += new EventHandler(camera_Alarm); // start timer timer.Start(); this.Cursor = Cursors.Default; return(true); } catch (Exception er) { toolStripStatusLabel3.Text = er.Message; return(false); } }
// On timer event - gather statistics private void timer_Tick(object sender, EventArgs e) { IVideoSource videoSource = videoSourcePlayer.VideoSource; if (videoSource != null) { // get number of frames since the last timer tick int framesReceived = videoSource.FramesReceived; if (stopWatch == null) { stopWatch = new Stopwatch( ); stopWatch.Start( ); } else { stopWatch.Stop( ); float fps = 1000.0f * framesReceived / stopWatch.ElapsedMilliseconds; fpsLabel.Text = fps.ToString("F2") + " fps"; stopWatch.Reset( ); stopWatch.Start( ); } } }
// Open video source private void OpenVideoSource(IVideoSource source) { // set busy cursor this.Cursor = Cursors.WaitCursor; // reset glyph processor lock (_sync) { _imageProcessor.Reset(); } // stop current video source videoSourcePlayer.SignalToStop(); videoSourcePlayer.WaitForStop(); // start new video source videoSourcePlayer.VideoSource = new AsyncVideoSource(source); videoSourcePlayer.Start(); // reset stop watch //stopWatch = null; // start timer //timer.Start(); this.Cursor = Cursors.Default; }
private void MotionForm_Shown(object sender, EventArgs e) { AppendLog("程序已加载..."); try { AppendLog("正在读取摄像头设备列表..."); //初始化摄像头设备 VideoDevicesList = new FilterInfoCollection(FilterCategory.VideoInputDevice); AppendLog("使用默认摄像头设备..."); //使用默认设备 VideoSource = new VideoCaptureDevice(VideoDevicesList[0].MonikerString); //绑定事件 AppendLog("注册警报事件"); VideoSource.NewFrame += new NewFrameEventHandler(Alert); AppendLog("正在启动摄像头..."); //启动摄像头 VideoSource.Start(); } catch (Exception ex) { AppendLog("无法连接或启动摄像头,程序即将退出...\n{0}", ex.Message); MessageBox.Show(string.Format("无法连接摄像头:\n{0}", ex.Message)); Application.Exit(); } AppendLog("设备初始化完成!开始监视..."); //运动监视 LunchDetector = new MotionDetector(motionDetector, motionProcessing); AppendLog("运行监视创建完成..."); AppendLog("————————————"); }
// On timer event - gather statistics private void timer_Tick(object sender, EventArgs e) { IVideoSource videoSource = videoSourcePlayer.VideoSource; if (videoSource != null) { // get number of frames for the last second statCount[statIndex] = videoSource.FramesReceived; // increment indexes if (++statIndex >= statLength) { statIndex = 0; } if (statReady < statLength) { statReady++; } float fps = 0; // calculate average value for (int i = 0; i < statReady; i++) { fps += statCount[i]; } fps /= statReady; statCount[statIndex] = 0; fpsLabel.Text = fps.ToString("F2") + " fps"; } }
public MainApp(RestClient client) { InitializeComponent(); this.client = client; #region Set up video settings videoDeviceList = new FilterInfoCollection(FilterCategory.VideoInputDevice); foreach (FilterInfo videoDevice in videoDeviceList) { cmbVideoSource.Items.Add(videoDevice.Name); } if (cmbVideoSource.Items.Count > 0) { cmbVideoSource.SelectedIndex = 0; } else { MessageBox.Show("No video sources found", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } videoSource = new VideoCaptureDevice(videoDeviceList[cmbVideoSource.SelectedIndex].MonikerString); videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); #endregion }
// Open video source private void OpenVideoSource(IVideoSource source) { // set busy cursor this.Cursor = Cursors.WaitCursor; // close previous video source CloseVideoSource(); // start new video source videoSourcePlayer.VideoSource = new AsyncVideoSource(source); videoSourcePlayer.Start(); // reset statistics statIndex = statReady = 0; // start timers timer.Start(); alarmTimer.Start(); videoSource = source; this.Cursor = Cursors.Default; detector.MotionZones = new Rectangle[] { new Rectangle() { X = 400, Y = 50, Width = 150, Height = 150 } }; detectorTL.MotionZones = new Rectangle[] { new Rectangle() { X = 50, Y = 50, Width = 150, Height = 150 } }; }
public void Instantiate_Camera() { int cam_count = 0; videoDevicesList = new FilterInfoCollection(FilterCategory.VideoInputDevice); foreach (FilterInfo videoDevice in videoDevicesList) { Console.WriteLine("Kamera[" + cam_count + "]: " + videoDevice.Name); cam_count++; } if (videoDevicesList.Count > 0) { Console.WriteLine("mamy kamerek: " + videoDevicesList.Count); } else { Console.WriteLine("Nie mamy kamerek :/"); } Console.WriteLine("Wpisz 0 lub 1 i zatwierdz enterem zeby wybrac kamerke"); int camera = Int32.Parse(Console.ReadLine()); videoSource = new VideoCaptureDevice(videoDevicesList[camera].MonikerString); videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); videoSource.Start(); Console.WriteLine("kamerka wybrana: " + videoDevicesList[camera].Name); }
// Open video source private void OpenVideoSource(IVideoSource source) { // start new video source vPlayer.VideoSource = source; vPlayer.Start(); }
public IFileVideoCapturer CreateFileCapturer(IVideoSource videoSource, string file) { var fileVideoCapturer = new FileVideoCapturer(file); return(new FileVideoCapturerNative(fileVideoCapturer, _context, videoSource.ToNative <VideoSource>(), EglBaseContext)); }
public Camera( IVideoSource source, IMotionDetector detector ) { this.videoSource = source; this.motionDetecotor = detector; videoSource.NewFrame += new NewFrameEventHandler( video_NewFrame ); videoSource.VideoSourceError += new VideoSourceErrorEventHandler( video_VideoSourceError ); }
/// <summary> /// Opens the video source and prepares the surveillance system. /// </summary> /// <param name="source">The video source.</param> /// <param name="surveillance">The surveillance system.</param> /// <param name="conf">The configuration for the image process.</param> /// <param name="output">The output.</param> private void OpenVideoSource(IVideoSource source, ISurveillanceSystem surveillance, IDictionary <string, object> conf, IOutput output) { // set busy cursor Cursor = Cursors.WaitCursor; // create camera camera = new Camera(source, surveillance.GetImageProcess(conf), output, surveillance.GraphicalOutput, pictureBox1); Cursor = Cursors.Default; }
private void OpenVideoSource(IVideoSource source) { this.Cursor = Cursors.Wait; CloseVideoSource(); CameraZm.VideoSource = new AsyncVideoSource(source); CameraZm.Start(); this.Cursor = Cursors.Arrow; }
private void btn_Start_Click(object sender, EventArgs e) { string ConnectionString = "http://" + cameraIp.Text + "/axis-cgi/jpg/image.cgi"; videoSource = new JPEGStream(ConnectionString); videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); videoSource.Start(); }
public VideoManager(IVideoSource source, IAudioSource audioSource, INetwork network, ILogger <VideoManager> logger) { _videoSource = source ?? throw new ArgumentNullException(nameof(source)); _audioSource = audioSource ?? throw new ArgumentNullException(nameof(audioSource)); _network = network ?? throw new ArgumentNullException(nameof(network)); _network.RegisterVideoManager(this); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); }
public void Setup() { _barcodeReader = Substitute.For <IBarcodeReader>(); _timer = Substitute.For <ITimer <Timer> >(); _video = Substitute.For <IVideoSource>(); _output = Substitute.For <IOutput>(); _uut = new CameraConnection(_timer, _barcodeReader, _video, _output); }
private void OpenVideoSource(IVideoSource source) { // create camera camera = new Camera(source); // start camera camera.Start(); camera.NewFrame += new EventHandler(camera_NewFrame); }
public Kamera(IVideoSource ivideo, IMotionDetector mdetector) { this.ivideo = ivideo; this.mdetector = mdetector; ivideo.NewFrame += new NewFrameEventHandler(Kamera_YeniFrame); ivideo.VideoSourceError += new VideoSourceErrorEventHandler(Kamera_VideoError); }
private void BtnStart_Click(object sender, RoutedEventArgs e) { string ConnectionString = "http://" + ipInfo.Text + "/axis-cgi/jpg/image.cgi"; _videoSource = new JPEGStream(ConnectionString); _videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); _videoSource.Start(); }
public Camera(string deviceName, IVideoSource videoSource, VideoStore videoStore) { this.deviceName = deviceName; this.lastFrameTime = DateTime.MinValue; this.video = new Video(videoStore); this.videoSource = videoSource; this.videoSource.NewFrame += new NewFrameEventHandler(videoSource_NewFrame); }
public Video(IVideoSource videoSource, double 再生速度 = 1.0) : this() { this.再生速度 = 再生速度; this._VideoSource = videoSource; this._ファイルから生成した = false; }
public VideoInput(IVideoSource source, int frames, int pixels, int verschil) { aFrames = frames; bgFrame = new Background(frames, pixels, verschil); this.source = source; this.source.NewFrame += new CameraEventHandler(source_NewFrame); this.source.Start(); }
private void OpenVideoSource(IVideoSource source) { this.Cursor = Cursors.WaitCursor; videoSourcePlayer.VideoSource = new AsyncVideoSource(source); videoSourcePlayer.Start(); fpsTimer.Start(); videoSource = source; this.Cursor = Cursors.Default; }
public void RefreshPlay() { IVideoSource vs = GetVideoSource(); if (vs != null) { vs.RefreshPlay(); } }
public Camera(VideoCaptureDevice source, MotionDetector detector, string _name) { ipCamera = false; cameraName = _name; pubFrame = null; this.videoSource = source; this.motionDetector = detector; videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); }
public Camera(MJPEGStream source, MotionDetector detector, string _name) { ipCamera = true; cameraName = _name; pubFrame = null; this.videoSource = source; this.motionDetector = detector; videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); }
private void StartCamera() { if (((FilterInfo)cbDevices.SelectedItem) != null) { videoSource = new VideoCaptureDevice(((FilterInfo)cbDevices.SelectedItem).MonikerString); videoSource.NewFrame += video_NewFrame; videoSource.Start(); } }
private void StartCamera() { if (CurrentDevice != null) { _videoSource = new VideoCaptureDevice(CurrentDevice.MonikerString); _videoSource.NewFrame += video_NewFrame; _videoSource.Start(); } }
// Create video source public bool CreateVideoSource() { if ((provider != null) && ((videoSource = provider.CreateVideoSource(configuration)) != null)) { videoSource.NewFrame += new CameraEventHandler(video_NewFrame); return(true); } return(false); }
private void OpenVideoSource(IVideoSource source) { // stop current video source CloseCurrentVideoSource(); // start new video source framePlayer.VideoSource = source; framePlayer.Start(); }
public void Setup() { _fakeVideoSource = Substitute.For <IVideoSource>(); _timer = new TimerClock(100); _barcodeReader = new ReadBarcode(); _fakeOutput = Substitute.For <IOutput>(); _sut = new CameraConnection(_timer, _barcodeReader, _fakeVideoSource, _fakeOutput); }
private VideoFeed(IVideoSource source) { videoSource = source; //videoSource.DesiredFrameRate = 60; //videoSource.DesiredFrameSize = new Size(640, 480); videoSource.Start(); }
public void SetSource(IVideoSource videoSource) { EditableVideoSource.ErrorsChanged -= RaiseCanExecuteChanged; videoSourceCache = videoSource; var path = existingSourcePaths.First(p => p.Id == videoSource.PathId).Path; var source = EditableVideoSource.FromIVideoSource(videoSource, path); EditableVideoSource = source; EditableVideoSource.ErrorsChanged += RaiseCanExecuteChanged; EditSourceMode = true; }
public static EditableVideoSource FromIVideoSource(IVideoSource videoSource, string path) { return new EditableVideoSource { Path = path, Name = videoSource.Name, ContentType = videoSource.ContentType, InfoSource = videoSource.InfoSource, NoUpdate = videoSource.NoUpdate }; }
/* * Start playing video using selected video device */ private void StartVideo(String url) { // do this only once in case of multiple connect/disconnect actions if (_stream == null) { _stream = new MJPEGStream(url); _videoPlayer.VideoSource = _stream; } _stream.Start(); }
/// <summary> /// Sets the device. /// </summary> /// <param name="monikerString">The moniker string.</param> /// <remarks></remarks> public void SetDevice(string monikerString) { if (monikerString == "") return; Stop(); MonkierString = monikerString; Source = new VideoCaptureDevice(MonkierString); VideoCaptureDevice s = (VideoCaptureDevice)Source; s.DesiredFrameRate = 30; s.DesiredFrameSize = new Size(640, 480); }
public Camera RegisterNew(string deviceName, IVideoSource videoSource) { Camera camera = null; var dir = CheckAndGetDeviceFolder(deviceName); if (!cameraMap.TryGetValue(deviceName, out camera)) { camera = new Camera(deviceName, videoSource, new VideoStore(string.Format(@"{0}\{1}", dir, deviceName))); cameraMap[deviceName] = camera; } return camera; }
public Camera(CameraProfile profile, CameraConfig config, IVideoSource videoSource) { if (profile == null) throw new ArgumentNullException("profile"); if (config == null) throw new ArgumentNullException("config"); if (videoSource == null) throw new ArgumentNullException("videoSource"); _profile = profile; _config = config; _videoSource = videoSource; }
/// <summary> /// Sets the device. /// </summary> /// <param name="videoDeviceNum">The video device number.</param> /// <remarks></remarks> public void SetDevice(int videoDeviceNum) { Stop(); if (videoDeviceNum >= 0 && Devices.Count > videoDeviceNum) { MonkierString = Devices[videoDeviceNum].MonikerString; Source = new VideoCaptureDevice(MonkierString); VideoCaptureDevice s = (VideoCaptureDevice)Source; s.DesiredFrameRate = 30; s.DesiredFrameSize = new Size(640, 480); } }
public void Close() { if (videoSource != null) { _videoSource.Close(); videoSource = null; } // dispose old frame if (lastFrame != null) { lastFrame.Dispose(); lastFrame = null; } }
public YAMDDetector(IVideoSource source, Magnitude low, Magnitude medium, Magnitude high) { detector = new MotionDetector( new SimpleBackgroundModelingDetector(), new BlobCountingObjectsProcessing(true)); //async video source processes images in a separate thread and uses the NewFrame event inputStream = new AsyncVideoSource(source); inputStream.NewFrame += inputStream_NewFrame; this.low = low; this.medium = medium; this.high = high; timer = new Stopwatch(); stoptimer = new Stopwatch(); videoRecorder = new VideoFileWriter(); Running = false; buffer = new FixedSizeQueue<Bitmap>(); buffer.Limit = 50; magnitudes = new Queue<int>(); }
/// <summary> /// Initializes a new instance of the <see cref="AsyncVideoSource"/> class. /// </summary> /// /// <param name="nestedVideoSource">Nested video source which is the target for asynchronous processing.</param> /// <param name="skipFramesIfBusy">Specifies if the object should skip frames from the nested video source /// in the case if it is still busy processing the previous video frame.</param> /// public AsyncVideoSource(IVideoSource nestedVideoSource, bool skipFramesIfBusy) { this.nestedVideoSource = nestedVideoSource; this.skipFramesIfBusy = skipFramesIfBusy; }
/// <summary> /// Initializes a new instance of the <see cref="AsyncVideoSource"/> class. /// </summary> /// /// <param name="nestedVideoSource">Nested video source which is the target for asynchronous processing.</param> /// public AsyncVideoSource(IVideoSource nestedVideoSource) { this.nestedVideoSource = nestedVideoSource; }
//------------------------------------------------------------------------------------------------------------------------ #endregion #region Constructor //------------------------------------------------------------------------------------------------------------------------ public YVideoClient(IVideoSource videosource) { this.videosource = videosource; this.videosource.OnFrameCaptured += Videosource_OnFrameCaptured; }
public CloneStream(IVideoSource source) { _source = source; }
public CloneStream() { _source = null; }
public Camera(IVideoSource source) { VideoSource = source; _motionDetector = null; VideoSource.NewFrame += VideoNewFrame; }
public void LoadProfile(Profile profile) { CloseCurrentProfile(); CurrentTrackedObjects.Clear(); ArchivedObjects.Clear(); Profile = profile; var identifier = CreateIdentifier(); _identificationService = CreateIdService(identifier); _objectTracker = new ObjectTracker(profile.TrackerSettings); VideoSource = VideoSourceFactory.Create(profile); VideoSource.NewFrame += VideoSourceOnNewFrame; }
public Camera(IVideoSource source, MotionDetector detector) { VideoSource = source; _motionDetector = detector; VideoSource.NewFrame += VideoNewFrame; }
private void OpenVideoSource(IVideoSource source, bool @override) { if (!@override && Camera != null && Camera.VideoSource != null && Camera.VideoSource.Source == source.Source) { return; } if (Camera != null && Camera.IsRunning) { Disable(); } if (source is VlcStream) { ((VlcStream) source).FormatWidth = Camobject.settings.desktopresizewidth; ((VlcStream) source).FormatHeight = Camobject.settings.desktopresizeheight; } if (source is FFMPEGStream) { ((FFMPEGStream)source).HasAudioStream += VideoSourceHasAudioStream; } if (source is VlcStream) { ((VlcStream)source).HasAudioStream += VideoSourceHasAudioStream; } if (source is KinectStream) { ((KinectStream)source).HasAudioStream += VideoSourceHasAudioStream; ((KinectStream)source).InitTripWires(Camobject.alerts.pluginconfig); ((KinectStream)source).TripWire += CameraAlarm; } if (source is KinectNetworkStream) { ((KinectNetworkStream)source).HasAudioStream += VideoSourceHasAudioStream; ((KinectNetworkStream)source).AlertHandler += CameraWindow_AlertHandler; } source.PlayingFinished += SourcePlayingFinished; source.VideoSourceError += SourceVideoSourceError; Camera = new Camera(source); }
public void InjectVideoSource(IVideoSource source, bool skipFramesAllowed = true) { OpenVideoSource(source, skipFramesAllowed); }
// Open video source private void OpenVideoSource( IVideoSource source, bool skippingFramesAllowed = true ) { // set busy cursor this.Cursor = Cursors.WaitCursor; // reset glyph processor imageProcessor.Reset( ); // stop current video source videoSourcePlayer.SignalToStop( ); videoSourcePlayer.WaitForStop( ); // start new video source videoSourcePlayer.VideoSource = new AsyncVideoSource(source, skippingFramesAllowed); videoSourcePlayer.Start( ); // reset stop watch stopWatch = null; // start timer timer.Start( ); this.Cursor = Cursors.Default; }
private void CloseCurrentProfile() { if (VideoSource == null) return; VideoSource.NewFrame -= VideoSourceOnNewFrame; VideoSource.Dispose(); VideoSource = null; _objectTracker.Dispose(); _objectTracker = null; _identificationService.Dispose(); _identificationService = null; }
public void Dispose() { if (_disposing) return; _disposing = true; lock (_sync) { ClearMotionZones(); ForeBrush.Dispose(); BackBrush.Dispose(); DrawFont.Dispose(); _framerates?.Clear(); if (Mask != null) { Mask.Dispose(); Mask = null; } Alarm = null; NewFrame = null; PlayingFinished = null; Plugin = null; VideoSource = null; if (MotionDetector != null) { try { MotionDetector.Reset(); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } MotionDetector = null; } } }
/// <summary> /// Create new Camera object. /// </summary> /// <param name="source">Video source.</param> /// <param name="imageProcess">The image process system.</param> /// <param name="output">To where output results.</param> /// <param name="graphicalOutput">How to change the graphical output.</param> /// <param name="pictureBox">Where to display the final image.</param> public Camera(IVideoSource source, IImageProcess imageProcess, IOutput output, GraphicalOutputDelegate graphicalOutput, PictureBox pictureBox) { if (source == null) throw new ArgumentNullException("source"); if (imageProcess == null) throw new ArgumentNullException("imageProcess"); if (output == null) throw new ArgumentNullException("output"); if (graphicalOutput == null) throw new ArgumentNullException("graphicalOutput"); if (pictureBox == null) throw new ArgumentNullException("pictureBox"); videoSource = source; videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame); this.imageProcess = imageProcess; this.output = output; this.graphicalOutput = graphicalOutput; this.pictureBox = pictureBox; }
//word gebruikt als er geen achtergrond vastgesteld hoeft te worden public VideoInput(IVideoSource source) : this(source, 1,10,255) { }