private void Cursor_Elapsed(object sender, EventArgs e) { //Actual position on the screen. var lefttop = Dispatcher.Invoke(() => { var left = Math.Round((Math.Round(Left, MidpointRounding.AwayFromZero) + Constants.LeftOffset) * _scale); var top = Math.Round((Math.Round(Top, MidpointRounding.AwayFromZero) + Constants.TopOffset) * _scale); return(new Point((int)left, (int)top)); }); int cursorPosX, cursorPosY; var bt = Native.CaptureWithCursor(_size, lefttop.X, lefttop.Y, out cursorPosX, out cursorPosY); if (bt == null || !IsLoaded) { return; } string fileName = $"{Project.FullPath}{FrameCount}.png"; Project.Frames.Add(new FrameInfo(fileName, FrameRate.GetMilliseconds(_snapDelay), cursorPosX, cursorPosY, _recordClicked || Mouse.LeftButton == MouseButtonState.Pressed, new List <SimpleKeyGesture>(_keyList))); _keyList.Clear(); ThreadPool.QueueUserWorkItem(delegate { AddFrames(fileName, new Bitmap(bt)); }); FrameCount++; }
public bool ReadFrom(string inputFile, FrameRate frameRate) { if (string.IsNullOrEmpty(inputFile)) { throw new ArgumentException("Input is null or empty, we need an input file to process! Check appsettings.json for valid configuration"); } Log.Information("Attempting to read input file {@File} for tracks", inputFile); try { using (var reader = new StreamReader(inputFile)) using (var csv = new CsvReader(reader, _csvConfiguration)) { ToTimeCodeConverter.FrameRate = frameRate; // hacky fix TODO _tracks = csv.GetRecords <Track>().ToList().OrderBy(track => track.Number); Log.Information("Read {@Count} tracks from input file", _tracks.Count()); return(true); } } catch (FileNotFoundException e) { Log.Debug(e, "Input file not found"); throw new FileNotFoundException($"Input file '{inputFile}' not found", e); } catch (Exception e) { Log.Debug(e, "Unhandled exception"); throw; } }
private async void NormalAsync_Elapsed(object sender, EventArgs e) { //Actual position on the screen. var lefttop = Dispatcher.Invoke(() => { var left = Math.Round((Math.Round(Left, MidpointRounding.AwayFromZero) + Constants.LeftOffset) * _scale); var top = Math.Round((Math.Round(Top, MidpointRounding.AwayFromZero) + Constants.TopOffset) * _scale); return(new Point((int)left, (int)top)); }); //Take a screenshot of the area. _captureTask = Task.Factory.StartNew(() => Native.Capture(_size, lefttop.X, lefttop.Y)); var bt = await _captureTask; if (bt == null || !IsLoaded) { return; } string fileName = $"{Project.FullPath}{FrameCount}.png"; Project.Frames.Add(new FrameInfo(fileName, FrameRate.GetMilliseconds(_snapDelay), new List <SimpleKeyGesture>(_keyList))); _keyList.Clear(); ThreadPool.QueueUserWorkItem(delegate { AddFrames(fileName, new Bitmap(bt)); }); FrameCount++; }
private static void FrameRateSanityCheck(FrameRate frameRate, bool isDropFrame) { if (isDropFrame && frameRate != FrameRate.fps29_97 && frameRate != FrameRate.fps59_94) { throw new ArgumentException("Dropframe is supported with 29.97 or 59.94 fps.", nameof(isDropFrame)); } }
public void RunLoop() { if (Control == null) { throw new ArgumentException("Control cannot be null on loop start"); } if (frameRate == null) { frameRate = new FrameRate(); } frameRate.Start(); while (NextFrame()) { frameRate.StartFrame(); if (PreProcess != null) { PreProcess(this, new EventArgs()); } if (ProcessFrame != null) { ProcessFrame(this, new EventArgs()); } if (PostProcess != null) { PostProcess(this, new EventArgs()); } frameRate.EndFrame(); } frameRate.Stop(); }
public static double ToDouble(this FrameRate frameRate) { switch (frameRate) { case FrameRate.fps23_98: return(24000.0 / 1001.0); case FrameRate.fps24: return(24); case FrameRate.fps25: return(25); case FrameRate.fps29_97: return(30000.0 / 1001.0); case FrameRate.fps30: return(30); case FrameRate.fps50: return(50); case FrameRate.fps59_94: return(60000.0 / 1001.0); case FrameRate.fps60: return(60); case FrameRate.msec: return(1); default: throw new ArgumentOutOfRangeException(nameof(frameRate), frameRate, null); } }
public static TimeCode FromString(string input, FrameRate frameRate, bool isDropFrame) { if (string.IsNullOrEmpty(input)) { throw new ArgumentNullException(nameof(input)); } FrameRateSanityCheck(frameRate, isDropFrame); var tcRegex = new Regex(TimeCodePattern); var match = tcRegex.Match(input); if (!match.Success) { throw new ArgumentException("Input text was not in valid timecode format.", nameof(input)); } var tc = new TimeCode(frameRate, isDropFrame) { Hours = int.Parse(match.Groups["hours"].Value), Minutes = int.Parse(match.Groups["minutes"].Value), Seconds = int.Parse(match.Groups["seconds"].Value), Frames = int.Parse(match.Groups["frames"].Value) }; tc.UpdateTotalFrames(); return(tc); }
public static int ToInt(this FrameRate frameRate) { switch (frameRate) { case FrameRate.fps23_98: case FrameRate.fps24: return(24); case FrameRate.fps25: return(25); case FrameRate.fps29_97: case FrameRate.fps30: return(30); case FrameRate.fps50: return(50); case FrameRate.fps59_94: case FrameRate.fps60: return(60); case FrameRate.msec: return(1000); default: throw new ArgumentOutOfRangeException(nameof(frameRate), frameRate, null); } }
static public float GetFramerate(FrameRate rate) { switch (rate) { case FrameRate.FrameRate240: return(240f); case FrameRate.FrameRate120: return(120f); case FrameRate.FrameRate60: return(60f); case FrameRate.FrameRate30: return(30f); case FrameRate.FrameRate15: return(15f); case FrameRate.FrameRate7_5: return(7.5f); case FrameRate.FrameRate3_75: return(3.75f); case FrameRate.FrameRate1_875: return(1.875f); } return(0); }
private async void CursorAsync_Elapsed(object sender, EventArgs e) { //Get the actual position of the form. var lefttop = Dispatcher.Invoke(() => new Point((int)((Left + Constants.LeftOffset) * _scale), (int)((Top + Constants.TopOffset) * _scale))); if (_captureTask != null && !_captureTask.IsCompleted) { _captureTask.Wait(); } //var bt = await Task.Factory.StartNew(() => Native.CaptureWindow(_thisWindow, _scale)); _captureTask = Task.Factory.StartNew(() => Native.Capture(_size, lefttop.X, lefttop.Y), TaskCreationOptions.PreferFairness); var bt = await _captureTask; if (bt == null || !IsLoaded) { return; } string fileName = $"{_pathTemp}{FrameCount}.png"; if (!OutterGrid.IsVisible) { return; } ListFrames.Add(new FrameInfo(fileName, FrameRate.GetMilliseconds(_snapDelay), new CursorInfo(Native.CaptureImageCursor(ref _posCursor), OutterGrid.PointFromScreen(_posCursor), _recordClicked || Mouse.LeftButton == MouseButtonState.Pressed, _scale))); ThreadPool.QueueUserWorkItem(delegate { AddFrames(fileName, new Bitmap(bt)); }); FrameCount++; }
protected internal override void Update(Stream stream) { using (var binReader = new BinaryReader(stream)) { binReader.ReadByte(); base.FillSettings(binReader); HlsSegmentSize = binReader.ReadByte(); BurstRate = binReader.ReadEnum<BurstRate>(); ContinuousShot = binReader.ReadEnum<ContinuousShot>(); WhiteBalance = binReader.ReadEnum<WhiteBalance>(); BracketingMode = binReader.ReadByte(); PhotoInVideo = binReader.ReadEnum<PhotoInVideo>(); LoopingVideoMode = binReader.ReadEnum<LoopingVideo>(); SlideshowSettings = binReader.ReadByte(); BroadcastSettings = binReader.ReadByte(); TimeLapseStyle = binReader.ReadByte(); VideoLoopCounter = binReader.ReadInt32(); ExternalBattery = binReader.ReadByte(); var field = binReader.ReadByte(); IsBombieAttached = (byte) (field & 0x8) > 0; IsLcdAttached = (byte) (field & 0x4) > 0; IsBroadcasting = (byte) (field & 0x2) > 0; IsUploading = (byte) (field & 0x1) > 0; LcdVolume = binReader.ReadByte(); LcdBrightness = binReader.ReadByte(); LcdSleepTimer = binReader.ReadByte(); VideoResolution = binReader.ReadEnum<VideoResolution>(); FrameRate = binReader.ReadEnum<FrameRate>(); } }
protected internal override void Update(Stream stream) { using (var binReader = new BinaryReader(stream)) { binReader.ReadByte(); base.FillSettings(binReader); HlsSegmentSize = binReader.ReadByte(); BurstRate = binReader.ReadEnum <BurstRate>(); ContinuousShot = binReader.ReadEnum <ContinuousShot>(); WhiteBalance = binReader.ReadEnum <WhiteBalance>(); BracketingMode = binReader.ReadByte(); PhotoInVideo = binReader.ReadEnum <PhotoInVideo>(); LoopingVideoMode = binReader.ReadEnum <LoopingVideo>(); SlideshowSettings = binReader.ReadByte(); BroadcastSettings = binReader.ReadByte(); TimeLapseStyle = binReader.ReadByte(); VideoLoopCounter = binReader.ReadInt32(); ExternalBattery = binReader.ReadByte(); var field = binReader.ReadByte(); IsBombieAttached = (byte)(field & 0x8) > 0; IsLcdAttached = (byte)(field & 0x4) > 0; IsBroadcasting = (byte)(field & 0x2) > 0; IsUploading = (byte)(field & 0x1) > 0; LcdVolume = binReader.ReadByte(); LcdBrightness = binReader.ReadByte(); LcdSleepTimer = binReader.ReadByte(); VideoResolution = binReader.ReadEnum <VideoResolution>(); FrameRate = binReader.ReadEnum <FrameRate>(); } }
public void SetFrameRate(FrameRate fRate, int _frameRate = 60) { switch (fRate) { case FrameRate.FPS24: SetFrameRate(24); break; case FrameRate.FPS30: SetFrameRate(30); break; case FrameRate.FPS60: SetFrameRate(60); break; case FrameRate.FPS90: SetFrameRate(90); break; case FrameRate.FPS120: SetFrameRate(120); break; case FrameRate.FPSCustom: SetFrameRate(_frameRate); return; } }
public override bool Open() { try { if (FGuid == null) { throw new Exception("No Camera GUID specified"); } FCamera.Connect(FGuid); VideoMode mode = new VideoMode(); FrameRate rate = new FrameRate(); FCamera.GetVideoModeAndFrameRate(ref mode, ref rate); FMode = mode.ToString(); FFramerate = Utils.GetFramerate(rate); FRunning = true; FCamera.StartCapture(CaptureCallback); ReAllocate(); Status = "OK"; return(true); } catch (Exception e) { FRunning = false; Status = e.Message; return(false); } }
/// <summary> /// Stops the recording or the Pre-Start countdown. /// </summary> private async void Stop() { try { _capture.Stop(); FrameRate.Stop(); if (Stage != Stage.Stopped && Stage != Stage.PreStarting && Project.Any) { #region Stop if (UserSettings.All.AsyncRecording) { _stopRequested = true; } await Task.Delay(100); ExitArg = ExitAction.Recorded; DialogResult = false; #endregion } else if ((Stage == Stage.PreStarting || Stage == Stage.Snapping) && !Project.Any) { #region if Pre-Starting or in Snapmode and no Frames, Stops //Only returns to the stopped stage if it was recording. Stage = Stage == Stage.Snapping ? Stage.Snapping : Stage.Stopped; //Enables the controls that are disabled while recording; FpsIntegerUpDown.IsEnabled = true; RecordPauseButton.IsEnabled = true; HeightIntegerBox.IsEnabled = true; WidthIntegerBox.IsEnabled = true; IsRecording = false; Topmost = true; Title = "Screen To Gif"; AutoFitButtons(); #endregion } } catch (NullReferenceException nll) { LogWriter.Log(nll, "NullPointer on the Stop function"); ErrorDialog.Ok("ScreenToGif", "Error while stopping", nll.Message, nll); } catch (Exception ex) { LogWriter.Log(ex, "Error on the Stop function"); ErrorDialog.Ok("ScreenToGif", "Error while stopping", ex.Message, ex); } }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) { return; } this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; ColorImageFormat colorFormat = ColorImageFormat.Undefined; switch (resolution) { case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; colorFormat = ColorImageFormat.RgbResolution640x480Fps30; break; default: throw new GoblinException(resolution.ToString() + " is not supported by Kinect video. The only " + "supported resolution is 640x480"); } if (framerate != FrameRate._30Hz) { throw new GoblinException(framerate.ToString() + " is not supported by Kinect video. The only supported " + "frame rate is 30 Hz"); } sensor = (from sensorToCheck in KinectSensor.KinectSensors where sensorToCheck.Status == KinectStatus.Connected select sensorToCheck).ElementAtOrDefault(videoDeviceID); sensor.ColorStream.Enable(colorFormat); sensor.Start(); if (depthStreamEnabled) { sensor.AllFramesReady += new EventHandler <AllFramesReadyEventArgs>(AllImagesReady); } else { sensor.ColorFrameReady += new EventHandler <ColorImageFrameReadyEventArgs>(VideoImageReady); } if (UsedForCalibration) { videoData = new int[cameraWidth * cameraHeight]; } cameraInitialized = true; }
public TimeCode(int hours, int minutes, int seconds, int frame = 0, FrameRate frameRate = FrameRate.Fps24) { var fps = frameRate.ToDouble(); var frameMs = MillisInt / fps * frame; var frameMsInt = (int)Math.Round(frameMs, MidpointRounding.AwayFromZero); TimeSpan = new TimeSpan(0, hours, minutes, seconds, frameMsInt); }
private int properties_UpdateRate = 16; //60 FPS is the default #endregion Fields #region Constructors public GameLoop(Form1 form, FrameRate updateRate, StatisticsScreen statScreen) { game_Form = form; game_Controller = new GameController(form, this); game_StatScreen = statScreen; SetUpdateRate(updateRate); }
public void FrameRate_Verify() { var settingWrong1 = new FrameRate(); var setting = new FrameRate(29.97); Assert.Throws <InvalidOperationException>(() => { SettingSerializer.Serialize(settingWrong1); }); Assert.Equal("-r 29.97", SettingSerializer.Serialize(setting)); }
public void OnUpdateFrameRate(FrameRate frameRate) { if (_foregroundChannel == null) { _foregroundChannel = _container.Resolve<IForegroundChannel>(); } _foregroundChannel.OnUpdateFrameRate(frameRate); }
/// <summary> /// Stops the recording or the Pre-Start countdown. /// </summary> private void Stop() { try { FrameCount = 0; _capture.Stop(); FrameRate.Stop(); if (Stage != Stage.Stopped && Stage != Stage.PreStarting && Project.Any) { #region Stop ExitArg = ExitAction.Recorded; if (IsDialog) { DialogResult = false; } else { Close(); } #endregion } else if ((Stage == Stage.PreStarting || Stage == Stage.Snapping) && !Project.Any) { #region if Pre-Starting or in Snapmode and no Frames, Stops Stage = Stage.Stopped; //Enables the controls that are disabled while recording; FpsNumericUpDown.IsEnabled = true; HeightIntegerBox.IsEnabled = true; WidthIntegerBox.IsEnabled = true; IsRecording = false; Topmost = true; Title = FindResource("Board.Title") as string + " â– "; AutoFitButtons(); #endregion } } catch (NullReferenceException nll) { ErrorDialog.Ok(FindResource("Board.Title") as string, "Error while stopping", nll.Message, nll); LogWriter.Log(nll, "NullPointer on the Stop function"); } catch (Exception ex) { ErrorDialog.Ok(FindResource("Board.Title") as string, "Error while stopping", ex.Message, ex); LogWriter.Log(ex, "Error on the Stop function"); } }
internal static unsafe void Invoke(IntPtr obj, FrameRate FrameRate) { long *p = stackalloc long[] { 0L, 0L }; byte *b = (byte *)p; *((FrameRate *)(b + 0)) = FrameRate; Main.GetProcessEvent(obj, SetFrameRate_ptr, new IntPtr(p));; } }
public void FrameRate_Verify() { var settingWrong1 = new FrameRate(); var setting = new FrameRate(29.97); Assert.Throws <InvalidOperationException>(() => { var s = settingWrong1.GetAndValidateString(); }); Assert.DoesNotThrow(() => { var s = setting.GetAndValidateString(); }); Assert.Equal(setting.GetAndValidateString(), "-r 29.97"); }
public TimeCode(byte hour, byte minute, byte second, byte frame, FrameRate frmRate) { _h = hour; _m = minute; _s = second; _f = frame; GetFrameRateFromFrameRate(frmRate); _Miliseconds = hour * 3600000 + minute * 60000 + second * 1000 + (frame * 1000 * Demoniater) / Numerator; }
private static void Validate(FrameRate frameRate) { if (!Enum.IsDefined(typeof(FrameRate), frameRate)) { throw new ArgumentOutOfRangeException( frameRate.ToString(), "Value should be defined in the FrameRate enum."); } }
/// <summary> /// Stops the recording or the Pre-Start countdown. /// </summary> private void Stop() { try { _frameCount = 0; _capture.Stop(); FrameRate.Stop(); if (Stage != Stage.Stopped && Stage != Stage.PreStarting && ListFrames.Any()) { #region Stop ExitArg = ExitAction.Recorded; DialogResult = false; #endregion } else if ((Stage == Stage.PreStarting || Stage == Stage.Snapping) && !ListFrames.Any()) { #region if Pre-Starting or in Snapmode and no Frames, Stops Stage = Stage.Stopped; //Enables the controls that are disabled while recording; FpsNumericUpDown.IsEnabled = true; RecordPauseButton.IsEnabled = true; HeightTextBox.IsEnabled = true; WidthTextBox.IsEnabled = true; IsRecording(false); Topmost = true; RecordPauseButton.Text = Properties.Resources.btnRecordPause_Record; RecordPauseButton.Content = (Canvas)FindResource("Vector.Record.Dark"); RecordPauseButton.HorizontalContentAlignment = HorizontalAlignment.Left; Title = Properties.Resources.TitleStoped; AutoFitButtons(); #endregion } } catch (NullReferenceException nll) { var errorViewer = new ExceptionViewer(nll); errorViewer.ShowDialog(); LogWriter.Log(nll, "NullPointer on the Stop function"); } catch (Exception ex) { var errorViewer = new ExceptionViewer(ex); errorViewer.ShowDialog(); LogWriter.Log(ex, "Error on the Stop function"); } }
public void RecordFrame(byte[] jpeg, FrameRate FPS) { if (!File.Exists(RecordVDB)) { CreateFile(RecordVDB, TypeofFile.VDB); if (!File.Exists(RecordVDI)) { CreateFile(RecordVDI, TypeofFile.VDI); } } try { using (FileStream fileStreamVDB = File.OpenWrite(RecordVDB)) { if (Offsets.Count == 0) { fileStreamVDB.Position = 0; } else { fileStreamVDB.Position = Offsets[CurrentPosition]; } ArVDB = new ArchiveSerialization.Archive(fileStreamVDB, ArchiveSerialization.ArchiveOp.store); VDBSerialize(jpeg, ArVDB, fileStreamVDB.Length); if (fileStreamVDB.Length > 1074279092) { this.NumberOfPartition++; RecordVDB = GenerateVDBName(RecordVDI, NumberOfPartition, 1); Offsets[CurrentPosition] = 0; } } } catch { } try { using (FileStream fileStreamVDI = File.OpenWrite(RecordVDI)) { if (OffsetsVDI.Count == 0) { fileStreamVDI.Position = 0; } else { fileStreamVDI.Position = OffsetsVDI[CurrentPosition - 1]; } ArVDI = new ArchiveSerialization.Archive(fileStreamVDI, ArchiveSerialization.ArchiveOp.store); VDISerialize(ArVDI, fileStreamVDI.Length); } } catch { } System.Threading.Thread.Sleep((int)FPS); }
internal static unsafe bool Invoke(FrameRate InFrameRate) { long *p = stackalloc long[] { 0L, 0L, 0L }; byte *b = (byte *)p; *((FrameRate *)(b + 0)) = InFrameRate; Main.GetProcessEvent(TimeManagementBlueprintLibrary.DefaultObject, IsValid_Framerate_ptr, new IntPtr(p));; return(*((bool *)(b + 8))); } }
internal static unsafe float Invoke(FrameRate InFrameRate) { long *p = stackalloc long[] { 0L, 0L, 0L }; byte *b = (byte *)p; *((FrameRate *)(b + 0)) = InFrameRate; Main.GetProcessEvent(TimeManagementBlueprintLibrary.DefaultObject, Conv_FrameRateToSeconds_ptr, new IntPtr(p));; return(*((float *)(b + 8))); } }
public void ComputeFPS(double elapsedRealTime) { if (elapsedRealTime < 0.001) { return; } FrameRate.Update(elapsedRealTime, 1.0 / elapsedRealTime); FrameTime.Update(elapsedRealTime, elapsedRealTime); }
/// <summary> /// Convert timecode to a timespan /// </summary> /// <returns>Time span object</returns> public TimeSpan ToTimeSpan(FrameRate frameRate) { var tc = new Timecode(frameRate) { TotalFrames = this.TotalFrames }; tc.UpdateByTotalFrames(); return(new TimeSpan(0, tc.Hours, tc.Minutes, tc.Seconds, tc.Frames)); }
/// <summary> /// Stops the recording or the Pre-Start countdown. /// </summary> private async void Stop() { try { _capture.Stop(); FrameRate.Stop(); await Task.Delay(100); FrameCount = 0; if (Stage != Stage.Stopped && Stage != Stage.PreStarting && ListFrames.Any()) { #region Stop ExitArg = ExitAction.Recorded; DialogResult = false; #endregion } else if ((Stage == Stage.PreStarting || Stage == Stage.Snapping) && !ListFrames.Any()) { #region if Pre-Starting or in Snapmode and no Frames, Stops Stage = Stage.Stopped; //Enables the controls that are disabled while recording; FpsNumericUpDown.IsEnabled = true; RecordPauseButton.IsEnabled = true; HeightIntegerBox.IsEnabled = true; WidthIntegerBox.IsEnabled = true; IsRecording = false; Topmost = true; Title = "Screen To Gif"; AutoFitButtons(); #endregion } } catch (NullReferenceException nll) { var errorViewer = new Other.ExceptionViewer(nll); errorViewer.ShowDialog(); LogWriter.Log(nll, "NullPointer on the Stop function"); } catch (Exception ex) { var errorViewer = new Other.ExceptionViewer(ex); errorViewer.ShowDialog(); LogWriter.Log(ex, "Error on the Stop function"); } }
static public float GetFramerate(FrameRate rate) { switch(rate) { case FrameRate.FrameRate240: return 240f; case FrameRate.FrameRate120: return 120f; case FrameRate.FrameRate60: return 60f; case FrameRate.FrameRate30: return 30f; case FrameRate.FrameRate15: return 15f; case FrameRate.FrameRate7_5: return 7.5f; case FrameRate.FrameRate3_75: return 3.75f; case FrameRate.FrameRate1_875: return 1.875f; } return 0; }
private byte[] BuildVideoStreamAttributes(byte type, VideoFormat vf, FrameRate fr) { if (type != (byte)ElementaryStreamTypes.VIDEO_STREAM_VC1 && type != (byte)ElementaryStreamTypes.VIDEO_STREAM_MPEG2 && type != (byte)ElementaryStreamTypes.VIDEO_STREAM_H264) throw new FormatException(String.Format("Video stream of type {0} is not supported by Blu Ray", type)); byte[] attributes = new byte[6]; attributes[0] = 0x05; attributes[1] = type; attributes[2] = (byte)((((byte)vf) << 4) & 0xf0); attributes[2] |= (byte)(((byte)fr) & 0x0f); attributes[3] = attributes[4] = attributes[5] = 0; return attributes; }
public void SetUpdateRate(FrameRate update) { properties_UpdateRate = (int)update; }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.videoDeviceID = videoDeviceID; this.format = format; this.grayscale = grayscale; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } camera = new PhotoCamera(); camera.Initialized += new EventHandler<Microsoft.Devices.CameraOperationCompletedEventArgs>(CameraInitialized); if(videoBrush == null) videoBrush = new VideoBrush(); videoBrush.SetSource(camera); cameraInitialized = true; }
public StreamInfo(byte[] data, int index) { if (null == data) throw new ArgumentException("stream data is null"); if (data.Length + index < 5) throw new ArgumentException("stream data too short"); uint descLength = (uint)((data[3 + index] & 0x0f) << 8) + data[4 + index]; if (descLength > Constants.TS_SIZE) throw new ArgumentException("descriptors data too long"); if (5 + descLength > data.Length - index) throw new ArgumentException("stream data too short"); mData = new byte[5 + descLength]; for (int i = 0; i < mData.Length; i++) { mData[i] = data[i + index]; } mVideoFormat = VideoFormat.Reserved; mAspectRatio = AspectRatio.Reserved; mFrameRate = FrameRate.Reserved; mAudioPresentationType = AudioPresentationType.Reserved; mSamplingFrequency = SamplingFrequency.Reserved; }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 960; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } flyCapture = new PGRFlyCapture(); PGRFlyModule.FlyCaptureFrameRate flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_ANY; switch (frameRate) { case FrameRate._15Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_15; break; case FrameRate._30Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_30; break; case FrameRate._50Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_50; break; case FrameRate._60Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_60; break; case FrameRate._120Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_120; break; case FrameRate._240Hz: flyFrameRate = PGRFlyModule.FlyCaptureFrameRate.FLYCAPTURE_FRAMERATE_240; break; } if (flyVideoMode.Equals(PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_ANY)) { switch (resolution) { case Resolution._160x120: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_160x120YUV444; break; case Resolution._320x240: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_320x240YUV422; break; case Resolution._640x480: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_640x480Y8; break; case Resolution._800x600: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_800x600Y8; break; case Resolution._1024x768: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_1024x768Y8; break; case Resolution._1280x1024: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_1280x960Y8; break; case Resolution._1600x1200: flyVideoMode = PGRFlyModule.FlyCaptureVideoMode.FLYCAPTURE_VIDEOMODE_1600x1200Y8; break; } } flyCapture.Initialize(videoDeviceID, flyFrameRate, flyVideoMode, grayscale); cameraInitialized = true; }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } Filters filters = null; Filter videoDevice, audioDevice = null; try { filters = new Filters(); } catch (Exception exp) { throw new GoblinException("No video capturing devices are found"); } try { videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null; } catch (Exception exp) { String suggestion = "Try the following device IDs:"; for(int i = 0; i < filters.VideoInputDevices.Count; i++) { suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", "; } throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. " + suggestion); } selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name; capture = new DCapture(videoDevice, audioDevice); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } if (videoDevice != null) { // Using MPEG compressor //capture.VideoCompressor = filters.VideoCompressors[2]; capture.FrameRate = frame_rate; try { capture.FrameSize = new Size(cameraWidth, cameraHeight); } catch(Exception exp) { throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight + " is not supported for " + selectedVideoDeviceName + ". Maximum resolution supported is " + capture.VideoCaps.MaxFrameSize); } } if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight) throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight); tmpPanel = new Panel(); tmpPanel.Size = new Size(cameraWidth, cameraHeight); try { capture.PreviewWindow = tmpPanel; } catch (Exception exp) { throw new GoblinException("Specified framerate or/and resolution is/are not supported " + "for " + selectedVideoDeviceName); } capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone); capture.GrapImg(); cameraInitialized = true; }
byte[] BuildVideoStreamCodingInfo(ElementaryStreamTypes type, VideoFormat format, FrameRate rate, AspectRatio ratio) { List<byte> info = new List<byte>(); info.Add(0x15); info.Add((byte)type); info.Add((byte)((((byte)format) << 4) | (byte)rate)); info.Add((byte)(((byte)(ratio)) << 4)); for(int i = 0; i < 18; i++) info.Add(0x00); return info.ToArray(); }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } capture = OpenCVWrapper.cvCaptureFromCAM(videoDeviceID); if (capture == IntPtr.Zero) throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range."); OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_WIDTH, cameraWidth); OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_HEIGHT, cameraHeight); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FPS, frame_rate); // Grab the video image to see if resolution is correct if (OpenCVWrapper.cvGrabFrame(capture) != 0) { IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture); OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr, typeof(OpenCVWrapper.IplImage)); if (videoImage.width != cameraWidth || videoImage.height != cameraHeight) throw new GoblinException("Resolution " + cameraWidth + "x" + cameraHeight + " is not supported"); } cameraInitialized = true; }
public void RunLoop() { if (Control == null) throw new ArgumentException("Control cannot be null on loop start"); if (frameRate == null) frameRate = new FrameRate(); frameRate.Start(); while (NextFrame()) { frameRate.StartFrame(); if (PreProcess != null) PreProcess(this, new EventArgs()); if (ProcessFrame != null) ProcessFrame(this, new EventArgs()); if (PostProcess != null) PostProcess(this, new EventArgs()); frameRate.EndFrame(); } frameRate.Stop(); }
public void GenerateFrameRate(FrameRate fr = null) { }