void CreateVideoStream(int Width, int Height) { // Select encoder type based on FOURCC of codec if (_codec == AviCodec.Uncompressed) { _videoStream = _writer.AddUncompressedVideoStream(Width, Height); } else if (_codec == AviCodec.MotionJpeg) { _videoStream = _writer.AddMotionJpegVideoStream(Width, Height, _codec.Quality); } else { _videoStream = _writer.AddMpeg4VideoStream(Width, Height, (double)_writer.FramesPerSecond, // It seems that all tested MPEG-4 VfW codecs ignore the quality affecting parameters passed through VfW API // They only respect the settings from their own configuration dialogs, and Mpeg4VideoEncoder currently has no support for this 0, _codec.Quality, // Most of VfW codecs expect single-threaded use, so we wrap this encoder to special wrapper // Thus all calls to the encoder (including its instantiation) will be invoked on a single thread although encoding (and writing) is performed asynchronously _codec.FourCC, true); } _videoStream.Name = "ScrenaVideo"; }
public Recorder(RecorderParams Params, string NameOfTheStream) { try { this.Params = Params; // Create AVI writer and specify FPS writer = Params.CreateAviWriter(); // Create video stream videoStream = Params.CreateVideoStream(writer); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = NameOfTheStream; screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; screenThread.Start(); } catch (Exception exp) { throw exp; } }
private void ConfigureStream(IAviVideoStream stream) { stream.Width = Screen.PrimaryScreen.WorkingArea.Width; stream.Height = Screen.PrimaryScreen.WorkingArea.Height; stream.Codec = KnownFourCCs.Codecs.Uncompressed; stream.BitsPerPixel = BitsPerPixel.Bpp32; }
/// <summary> /// Records this instance. /// </summary> /// <exception cref="RedCell.Research.ResearchException"> /// Filename must be set before recording. /// or /// A recording has already started. /// </exception> /// <exception cref="System.InvalidOperationException">Filename must be set before recording.</exception> public void StartRecord() { // Automatic filename if (string.IsNullOrWhiteSpace(Filename)) { Filename = "Video_" + DateTime.Now.ToString("yyyyMMddHHmmss"); } if (_writer != null) { throw new ResearchException("A recording has already started."); } // Get video details from camera. if (StreamSetting == null) { StreamSetting = Camera.StreamSetting; } var encoder = new Mpeg4VideoEncoderVcm(StreamSetting.Width, StreamSetting.Height, StreamSetting.Framerate, 0, 50, KnownFourCCs.Codecs.X264); _writer = new AviWriter(Filename); _video = _writer.AddEncodingVideoStream(encoder, true, StreamSetting.Width, StreamSetting.Height); Camera.FrameAvailable += Camera_FrameAvailable; }
public Recorder(string fileName, FourCC codec, int quality, int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate) { System.Windows.Media.Matrix toDevice; using (var source = new HwndSource(new HwndSourceParameters())) { toDevice = source.CompositionTarget.TransformToDevice; } screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11); screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22); // Create AVI writer and specify FPS writer = new AviWriter(fileName) { FramesPerSecond = 10, EmitIndex1 = true, }; // Create video stream videoStream = CreateVideoStream(codec, quality); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Screencast"; if (audioSourceIndex >= 0) { var waveFormat = ToWaveFormat(audioWaveFormat); audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used audioStream.Name = "Voice"; audioSource = new WaveInEvent { DeviceNumber = audioSourceIndex, WaveFormat = waveFormat, // Buffer size to store duration of 1 frame BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond), NumberOfBuffers = 3, }; audioSource.DataAvailable += audioSource_DataAvailable; } screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; if (audioSource != null) { videoFrameWritten.Set(); audioBlockWritten.Reset(); audioSource.StartRecording(); } screenThread.Start(); }
/// <summary> /// 录制屏幕 /// </summary> /// <param name="fileName">要保存的文件名</param> /// <param name="codec">编码</param> /// <param name="quality">录制质量</param> /// <param name="zoom">缩放</param> public Recorder(string fileName, FourCC codec, int quality = 70, float zoom = 1.0F) { //设置缩放 宽高 zoomHeight = (int)Math.Floor(System.Windows.Forms.Screen.PrimaryScreen.Bounds.Height * zoom); zoomWidth = (int)Math.Floor(System.Windows.Forms.Screen.PrimaryScreen.Bounds.Width * zoom); this.zoom = zoom; //创建视频 writer = new AviWriter(fileName) { FramesPerSecond = 10, EmitIndex1 = true, }; //创建视频流 videoStream = CreateVideoStream(codec, quality); videoStream.Name = "Screencast"; //开启一个线程录制屏幕 screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; //钩子函数用于监控是否点击了鼠标 // hook = WinHook.SetWindowsHookEx(HookType.WH_MOUSE_LL, WinHook.hookProc += MouseHook, Win32Api.GetModuleHandle(Process.GetCurrentProcess().MainModule.ModuleName), 0); screenThread.Start(); }
public void StartRecord(string fileName, FourCC codec, int quality) { stopThread = new ManualResetEvent(false); // Create AVI writer and specify FPS writer = new AviWriter(fileName) { FramesPerSecond = 10, EmitIndex1 = true, }; // Create video stream videoStream = CreateVideoStream(codec, quality); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Screencast"; screenThread = new Thread(RecordScreen) { Name = typeof(RecorderVideo).Name + ".RecordScreen", IsBackground = true }; screenThread.Start(); stopwatchGlobal = Stopwatch.StartNew(); }
public TimedFrameWriter(IAviVideoStream videoStream, AviWriter writer, RecorderParams @params) { VideoStream = videoStream; Writer = writer; Params = @params; msPerFrame = (1.0 / (double)writer.FramesPerSecond) * 1000; }
private void CreateVideoStream(int width, int height) { // Select encoder type based on FOURCC of codec if (_codec == AviCodec.Uncompressed) { _videoStream = _writer.AddUncompressedVideoStream(width, height); } else if (_codec == AviCodec.MotionJpeg) { // MotionJpegVideoStream implementation allocates multiple WriteableBitmap for every thread // Use SingleThreadWrapper to reduce allocation var encoderFactory = new Func <IVideoEncoder>(() => new MotionJpegVideoEncoderWpf(width, height, _codec.Quality)); var encoder = new SingleThreadedVideoEncoderWrapper(encoderFactory); _videoStream = _writer.AddEncodingVideoStream(encoder, true, width, height); } else { _videoStream = _writer.AddMpeg4VideoStream(width, height, (double)_writer.FramesPerSecond, // It seems that all tested MPEG-4 VfW codecs ignore the quality affecting parameters passed through VfW API // They only respect the settings from their own configuration dialogs, and Mpeg4VideoEncoder currently has no support for this 0, _codec.Quality, // Most of VfW codecs expect single-threaded use, so we wrap this encoder to special wrapper // Thus all calls to the encoder (including its instantiation) will be invoked on a single thread although encoding (and writing) is performed asynchronously _codec.FourCC, true); } _videoStream.Name = "Video"; }
private void StartRecordingThread(string videoFileName, ISeleniumTest scenario, int fps) { fileName = videoFileName; using (writer = new AviWriter(videoFileName) { FramesPerSecond = fps, EmitIndex1 = true }) { stream = writer.AddVideoStream(); this.ConfigureStream(stream); while (!scenario.IsFinished || forceStop) { GetScreenshot(buffer); //Thread safety issues lock (locker) { stream.WriteFrame(true, buffer, 0, buffer.Length); } } } }
/// <summary> /// 初始化Avi文件 /// </summary> /// <param name="filePath">Avi文件路径</param> public void InitRecord(String filePath) { try { if (_writer == null) { //在文件路径下创建AVI文件 _writer = new AviWriter(filePath) { FramesPerSecond = 30, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; //确定编码格式 _stream = _writer.AddMpeg4VideoStream(1920, 1080, 30, quality: 70, codec: KnownFourCCs.Codecs.X264, forceSingleThreadedAccess: true); } //创建AVI文件完成,判断标记赋值 IsCreateRecord = false; } catch { Stoping(); //System.Windows.MessageBox.Show(ex.Message); //Console.WriteLine(ex.Message); } }
private void CreateVideoStream(int width, int height) { var quality = 70; var screenWidth = width; var screenHeight = height; _videoStream = _writer.AddMotionJpegVideoStream(screenWidth, screenHeight, quality); _videoStream.Name = "Epidemic Simulator"; }
public void Init() { _writer = new AviWriter(VideoPath) { FramesPerSecond = 30, EmitIndex1 = true }; try { _stream = _writer.AddUncompressedVideoStream(DmdWidth, DmdHeight); Logger.Info("Uncompressed encoder found."); } catch (InvalidOperationException e) { Logger.Warn("Error creating Uncompressed encoded stream: {0}.", e.Message); } try { if (_stream == null) { _stream = _writer.AddMpeg4VideoStream( DmdWidth, DmdHeight, Fps, quality: 100, codec: KnownFourCCs.Codecs.X264, forceSingleThreadedAccess: true ); Logger.Info("X264 encoder found."); } } catch (InvalidOperationException e) { Logger.Warn("Error creating X264 encoded stream: {0}.", e.Message); } try { if (_stream == null) { _stream = _writer.AddMotionJpegVideoStream(DmdWidth, DmdHeight, quality: 100 ); } Logger.Info("MJPEG encoder found."); } catch (InvalidOperationException e) { Logger.Warn("Error creating MJPEG encoded stream: {0}.", e.Message); } if (_stream == null) { Logger.Error("No encoder available, aborting."); return; } _animation = Observable .Interval(TimeSpan.FromTicks(1000 * TimeSpan.TicksPerMillisecond / Fps)) .Subscribe(_ => { if (_frame != null) { _stream?.WriteFrame(true, _frame, 0, _frame.Length); } }); Logger.Info("Writing video to {0}.", VideoPath); }
public void Stoping() { writer.Close(); writer = null; stream = null; IsCreateRecord = false; IsRecording = false; }
/// <summary> /// Render a video based on JPEG-images /// </summary> /// <param name="fps">Requested frames-per-second</param> /// <param name="width">Width of the images</param> /// <param name="height">Height of the images</param> /// <param name="quality">Requested quality</param> /// <param name="path">Path to the folder containing frame-images</param> /// <param name="renderGuid">Unique GUID for this frame-batch</param> /// <returns>Path to the video</returns> public static async Task <string> RenderVideoAsync(int fps, int width, int height, int quality, string path, string renderGuid) { if (quality < 1 && quality > 100) { throw new ArgumentException("Quality can only be between 1 and 100."); } Task <string> renderT = Task.Run(() => { // Compose output path string outputPath = string.Format("{0}/{1}.avi", path, renderGuid); // Create a new writer with the requested FPS AviWriter writer = new AviWriter(outputPath) { FramesPerSecond = fps }; // Create a new stream to process it IAviVideoStream stream = writer.AddEncodingVideoStream(new MotionJpegVideoEncoderWpf(width, height, quality)); stream.Width = width; stream.Height = height; // Create an output stream byte[] frameData = new byte[stream.Width * stream.Height * 4]; // Retrieve all iamges for this batch string[] images = Directory.GetFiles(path, string.Format("{0}*.jpg", renderGuid)); // Process image per image foreach (string file in images) { // Decode the bitmap JpegBitmapDecoder decoder = new JpegBitmapDecoder(new Uri(file), BitmapCreateOptions.None, BitmapCacheOption.Default); // Get bitmap source BitmapSource source = decoder.Frames[0]; // Copy pixels source.CopyPixels(frameData, 1920 * 4, 0); // Write it to the stream stream.WriteFrame(true, frameData, 0, frameData.Length); } // Close writer writer.Close(); return(outputPath); }); await renderT; return(renderT.Result); }
public VideoRecorder() { forceStop = false; writer = null; stream = null; if (streamingTask != null) { streamingTask.Dispose(); streamingTask = null; } }
/// <summary> /// Frees all resources used by this object. /// </summary> public void Dispose() { lock (_syncLock) { _writer.Close(); _writer = null; _videoStream = null; _audioStream = null; } _videoBuffer = null; }
/// <summary> /// 结束Avi录制 /// </summary> public void Stoping() { if (_writer != null) { //关闭Writer,生成完整的,并且有时间轴的AVI文件 _writer.Close(); _writer = null; _stream = null; } //录制完毕,赋值标志位 IsCreateRecord = false; IsRecording = false; IsStopRecord = false; }
public Recorder(RecorderParams Params) { this.Params = Params; writer = Params.CreateAviWriter(); videoStream = Params.CreateVideoStream(writer); screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; screenThread.Start(); }
void Record() { int FrameRate = (int)FpsUpDown.Value; Size RSize = RecordPanel.Size; int RndNum = new Random().Next(); string OutFile = Path.GetFullPath(string.Format("rec_{0}.avi", RndNum)); string OutFileCompressed = Path.GetFullPath(string.Format("rec_{0}.webm", RndNum)); AviWriter Writer = new AviWriter(OutFile); Writer.FramesPerSecond = FrameRate; Writer.EmitIndex1 = true; IAviVideoStream VStream = Writer.AddVideoStream(RSize.Width, RSize.Height, BitsPerPixel.Bpp24); VStream.Codec = KnownFourCCs.Codecs.Uncompressed; VStream.BitsPerPixel = BitsPerPixel.Bpp24; Bitmap Bmp = new Bitmap(RSize.Width, RSize.Height); Graphics G = Graphics.FromImage(Bmp); Stopwatch SWatch = new Stopwatch(); SWatch.Start(); while (!AbortRecording) { G.CopyFromScreen(DesktopLocation.Add(RecorderOffset), Point.Empty, RSize); //G.DrawString("Text Embedding", SystemFonts.CaptionFont, Brushes.Red, new PointF(0, 0)); Bmp.RotateFlip(RotateFlipType.RotateNoneFlipY); byte[] Data = Bmp.ToByteArray(); VStream.WriteFrame(true, Data, 0, Data.Length); while ((float)SWatch.ElapsedMilliseconds / 1000 < 1.0f / FrameRate) { ; } SWatch.Restart(); } G.Dispose(); Writer.Close(); if (WebmCheckBox.Checked) { Program.FFMPEG("-i \"{0}\" -c:v libvpx -b:v 1M -c:a libvorbis \"{1}\"", OutFile, OutFileCompressed); File.Delete(OutFile); } }
public void Record() { _tempName = "Temp-" + DateTime.Now.Ticks + ".avi"; var writer = new AviWriter(_tempName) { FramesPerSecond = 30, }; IAviVideoStream stream = writer.AddMotionJpegVideoStream(ScreenWidth, ScreenHeight); var frameData = new byte[stream.Width * stream.Height * 4]; var stopwatch = new Stopwatch(); var buffer = new byte[ScreenWidth * ScreenHeight * 4]; var shotsTaken = 0; var timeTillNextFrame = TimeSpan.Zero; Task videoWriteTask = null; var isFirstFrame = true; stopwatch.Start(); while (!WaitHandler.WaitOne(timeTillNextFrame)) { GetScreenshot(buffer); if (!isFirstFrame) { videoWriteTask.Wait(); } videoWriteTask = stream.WriteFrameAsync(true, buffer, 0, buffer.Length); timeTillNextFrame = TimeSpan.FromSeconds(shotsTaken / (double)writer.FramesPerSecond - stopwatch.Elapsed.TotalSeconds); if (timeTillNextFrame < TimeSpan.Zero) { timeTillNextFrame = TimeSpan.Zero; } isFirstFrame = false; } stopwatch.Stop(); if (!isFirstFrame) { videoWriteTask.Wait(); } writer.Close(); }
public Recorder(RecorderParams Params) { this._params = Params; this._writer = Params.CreateAviWriter(); this._videoStream = Params.CreateVideoStream(this._writer); this._videoStream.Name = "DesktopCapture"; this._screenThread = new Thread(this.RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; this._screenThread.Start(); }
public void InitializeRecorder() { System.Windows.Media.Matrix toDevice; using (var source = new HwndSource(new HwndSourceParameters())) { toDevice = source.CompositionTarget.TransformToDevice; } mScreenWidth = Screen.PrimaryScreen.Bounds.Width; mScreenHeight = Screen.PrimaryScreen.Bounds.Height; string currentDirectory = Environment.CurrentDirectory; string newDir = Path.GetDirectoryName(Path.GetDirectoryName(currentDirectory)); mFullPath = newDir + @"\SaveFolder\screenRecord.avi"; mWriter = new AviWriter(mFullPath) { FramesPerSecond = 10, EmitIndex1 = true, }; mVideoStream = CreateVideoStream(mCodecInfo.Codec, mQuality); mVideoStream.Name = "Screencast"; suppForm = SupportedWaveFormat.WAVE_FORMAT_44S16; mWaveFormat = ToWaveFormat(suppForm); mAudioStream = CreateAudioStream(mWaveFormat, false, 160); mAudioStream.Name = "Voice"; mAudioSource = new WaveInEvent { DeviceNumber = mAudioDevice.Value, WaveFormat = mWaveFormat, BufferMilliseconds = (int)Math.Ceiling(1000 / mWriter.FramesPerSecond), NumberOfBuffers = 3, }; mAudioSource.DataAvailable += audioSource_DataAvailable; }
public void SaveAsAvi(string fileName) { var writer = new AviWriter(fileName) { FramesPerSecond = (1000000 + this.Header.FrameDelay / 2) / this.Header.FrameDelay, EmitIndex1 = true }; try { IAviVideoStream videoStream = writer.AddMotionJpegVideoStream(this.Header.Width, this.Header.Height, 70); IAviAudioStream audioStream = writer.AddAudioStream(this.AudioHeader.NumChannels, this.AudioHeader.Frequency, 16); this.BeginPlay(); try { byte[] audio; byte[] video; while (this.RetrieveNextFrame(out audio, out video)) { if (video != null) { byte[] buffer = SnmFile.Convert16BppTo32Bpp(video); videoStream.WriteFrame(true, buffer, 0, buffer.Length); } if (audio != null) { audioStream.WriteBlock(audio, 0, audio.Length); } } } finally { this.EndPlay(); } } finally { writer.Close(); } }
public void RecordScreen(RecorderParams recorderParams) { Params = recorderParams; writer = Params.CreateAviWriter(); // Create video stream videoStream = Params.CreateVideoStream(writer); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Captura"; screenThread = new Thread(_recordScreen) { Name = typeof(ScreenRecorderHelper).Name + ".RecordScreen", IsBackground = true }; screenThread.Start(); }
private static void Process(IAviSettings aviSettings, IReadOnlyCollection <string> imageFiles) { if (imageFiles.Count == 0) { return; } using (var writer = new AviWriter(aviSettings.OutputAvi) { FramesPerSecond = aviSettings.FPS, EmitIndex1 = true }) { IAviVideoStream stream = null; byte[] buffer = null; bool first = true; var rectangle = new Rectangle(); foreach (var file in imageFiles) { using (var bitmap = (Bitmap)Image.FromFile(file)) { if (first) { first = false; //stream = writer.AddUncompressedVideoStream(image.Width, image.Height); stream = writer.AddMotionJpegVideoStream(bitmap.Width, bitmap.Height, quality: 90); //stream = writer.AddMpeg4VideoStream(image.Width, image.Height, fps, quality: 70, codec: KnownFourCCs.Codecs.MicrosoftMpeg4V2, forceSingleThreadedAccess: true); buffer = new byte[bitmap.Width * bitmap.Height * 4 /* four bytes per pixel */]; rectangle = new Rectangle(0, 0, bitmap.Width, bitmap.Height); } var raw = bitmap.LockBits(rectangle, ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb); Marshal.Copy(raw.Scan0, buffer, 0, buffer.Length); bitmap.UnlockBits(raw); stream.WriteFrame(true, buffer, 0, buffer.Length); } } } }
public Recorder(RecorderParams Params) { this.Params = Params; // Create AVI writer and specify FPS writer = Params.CreateAviWriter(); // Create video stream videoStream = Params.CreateVideoStream(writer); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Captura"; if (Params.AudioSourceId != -1) { try { var waveFormat = Params.WaveFormat; audioStream = Params.CreateAudioStream(writer); audioStream.Name = "Voice"; audioSource = new WaveInEvent { DeviceNumber = Params.AudioSourceId, WaveFormat = waveFormat, // Buffer size to store duration of 1 frame BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond), NumberOfBuffers = 3, }; audioSource.DataAvailable += AudioDataAvailable; } catch { } } screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; if (audioSource != null) { videoFrameWritten.Set(); audioBlockWritten.Reset(); audioSource.StartRecording(); } screenThread.Start(); }
public Recorder(RecorderParams Params) { this.Params = Params; // Create AVI writer and specify FPS writer = Params.CreateAviWriter(); // Create video stream videoStream = Params.CreateVideoStream(writer); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Cap - " + DateTime.Now.ToLongDateString(); screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; screenThread.Start(); }
public RecordingService(ScreenMetadata targetScreen, RegionBlock recordBlock, FourCC codec, int quality) { Debug.Assert(targetScreen != null); Debug.Assert(recordBlock != null); this.targetScreen = targetScreen; this.recordBlock = recordBlock; videoWriter = new AviWriter($"{DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss")}.avi", 10) { EmitIndex1 = true }; videoStream = CreateVideoStream(codec, quality); videoStream.Name = "Screencast"; recordThread = new Thread(RecordScreen) { Name = typeof(RecordingService).Name + ".RecordScreen", IsBackground = true }; }
public void Start() { _fileWriter = new AviWriter(_outputFilePath) { FramesPerSecond = _configurator.FramePerSecond, EmitIndex1 = true }; _videoStream = _fileWriter.AddEncodingVideoStream(_selectedEncoder, true, _configurator.Width, _configurator.Height); _frameData = new byte[_videoStream.Width * _videoStream.Height * 4]; _cts = new CancellationTokenSource(); _workTask = Task.Run(async() => { Task writeTask = Task.FromResult(true); while (!_cts.IsCancellationRequested) { GetSnapshot(_frameData); await writeTask; writeTask = _videoStream.WriteFrameAsync(true, _frameData, 0, _frameData.Length); } await writeTask; }); }
public void InitRecord(String filePath) { try { writer = new AviWriter(filePath) { FramesPerSecond = 30, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; stream = writer.AddMpeg4VideoStream(1920, 1080, 30, quality: 70, codec: KnownFourCCs.Codecs.X264, forceSingleThreadedAccess: true); IsCreateRecord = false; } catch (Exception ex) { System.Windows.MessageBox.Show(ex.Message); WriteEduAppLog(ex.Message, ex.StackTrace); } }
private void LoadSettings() { try { writer = new AviWriter("test.avi") { FramesPerSecond = 25, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; var encoder = new Mpeg4VideoEncoderVcm((int)numericWidth.Value, (int)numericHeight.Value, 25, 0, 100, KnownFourCCs.Codecs.Xvid); // stream = writer.AddMotionJpegVideoStream(640, 480, quality: 100); //stream = writer.AddMpeg4VideoStream(640, 480, 30, quality: 70, codec: KnownFourCCs.Codecs.X264, forceSingleThreadedAccess: true); stream = writer.AddEncodingVideoStream(encoder, true, (int)numericWidth.Value, (int)numericHeight.Value); // set standard VGA resolution // stream.Width = 640; // stream.Height = 480; // class SharpAvi.KnownFourCCs.Codecs contains FOURCCs for several well-known codecs // Uncompressed is the default value, just set it for clarity //stream.Codec = KnownFourCCs.Codecs.MotionJpeg; // Uncompressed format requires to also specify bits per pixel //stream.BitsPerPixel = BitsPerPixel.Bpp32; } catch (Exception ex) { Console.WriteLine(ex.Message); } }
private bool StartRecording() { int captureWidth = (int)(m_Capture.CaptureBounds.Width * m_Capture.Scale), captureHeight = (int)(m_Capture.CaptureBounds.Height * m_Capture.Scale), codecSelectedIndex = comboBox_videoCodec.SelectedIndex, codecQuality = Convert.ToInt32(textBox_recordQuality.Text); try { m_AviWriter = new AviWriter(m_AviFilePath) { FramesPerSecond = m_Capture.FramesPerSecond, EmitIndex1 = true, }; if (codecSelectedIndex == 0) { m_AviVideoStream = m_AviWriter.AddVideoStream(captureWidth, captureHeight); } else if (codecSelectedIndex == 1) { m_AviVideoStream = m_AviWriter.AddMotionJpegVideoStream(captureWidth, captureHeight, codecQuality); } else { var codecs = Mpeg4VideoEncoderVcm.GetAvailableCodecs(); var encoder = new Mpeg4VideoEncoderVcm(captureWidth, captureHeight, m_Capture.FramesPerSecond, 0, codecQuality, codecs[codecSelectedIndex - 2].Codec); m_AviVideoStream = m_AviWriter.AddEncodingVideoStream(encoder); } if (checkBox_recordAudio.Checked) { m_AviAudioStream = m_AviWriter.AddAudioStream(m_Recorder.WaveFormat.Channels, m_Recorder.WaveFormat.SampleRate, m_Recorder.WaveFormat.BitsPerSample); } } catch { Debug.Log("Failed to Start Recording."); return false; } return true; }
/// <summary> /// 画面キャプチャ(音声録音)の停止 /// </summary> private void StopCapture() { foreach (var pair in m_WebSocketClients) { var data = new MessageData { type = MessageData.Type.StopCapture }; var json = JsonConvert.SerializeObject(data); pair.Value.Send(json); } m_Capture.StopAsync(); if (checkBox_sendAudio.Checked || checkBox_recordAudio.Checked) { m_Recorder.StopRecording(); } if (m_AviWriter != null) { Debug.Log("" + m_AviVideoStream.FramesWritten); m_AviWriter.Close(); m_AviWriter = null; m_AviVideoStream = null; m_AviAudioStream = null; } }
public Recorder(RecorderParams Params) { this.Params = Params; if (Params.CaptureVideo) { // Create AVI writer and specify FPS writer = Params.CreateAviWriter(); // Create video stream videoStream = Params.CreateVideoStream(writer); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Captura"; } try { int AudioSourceId = int.Parse(Params.AudioSourceId); if (AudioSourceId != -1) { if (Params.CaptureVideo) { audioStream = Params.CreateAudioStream(writer); audioStream.Name = "Voice"; } audioSource = new WaveInEvent { DeviceNumber = AudioSourceId, WaveFormat = Params.WaveFormat, // Buffer size to store duration of 1 frame BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond), NumberOfBuffers = 3, }; } } catch { var dev = Params.LoopbackDevice; SilencePlayer = new WasapiOut(dev, AudioClientShareMode.Shared, false, 100); SilencePlayer.Init(new SilenceProvider(Params.WaveFormat)); SilencePlayer.Play(); if (Params.CaptureVideo) { audioStream = Params.CreateAudioStream(writer); audioStream.Name = "Loopback"; } audioSource = new WasapiLoopbackCapture(dev) { ShareMode = AudioClientShareMode.Shared }; } if (Params.CaptureVideo) { screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; } else WaveWriter = Params.CreateWaveWriter(); if (Params.CaptureVideo) screenThread.Start(); if (audioSource != null) { audioSource.DataAvailable += AudioDataAvailable; if (Params.CaptureVideo) { videoFrameWritten.Set(); audioBlockWritten.Reset(); } audioSource.StartRecording(); } }
/// <summary> /// Records this instance. /// </summary> /// <exception cref="RedCell.Research.ResearchException"> /// Filename must be set before recording. /// or /// A recording has already started. /// </exception> /// <exception cref="System.InvalidOperationException">Filename must be set before recording.</exception> public void StartRecord() { // Automatic filename if (string.IsNullOrWhiteSpace(Filename)) Filename = "Video_" + DateTime.Now.ToString("yyyyMMddHHmmss"); if(_writer != null) throw new ResearchException("A recording has already started."); // Get video details from camera. if (StreamSetting == null) StreamSetting = Camera.StreamSetting; var encoder = new Mpeg4VideoEncoderVcm(StreamSetting.Width, StreamSetting.Height, StreamSetting.Framerate, 0, 50, KnownFourCCs.Codecs.X264); _writer = new AviWriter(Filename); _video = _writer.AddEncodingVideoStream(encoder, true, StreamSetting.Width, StreamSetting.Height); Camera.FrameAvailable += Camera_FrameAvailable; }
internal MJpegAviFrameWriter(IAviVideoStream writer) { _Writer = writer; _Cache = new MemoryBitmap <Pixel.BGRA32>(writer.Width, writer.Height, Pixel.BGRA32.Format); }
public Recorder(string filePath, int quality, int x, int y, int width, int height, bool captureCursor, int inputSourceIndex, bool separateAudio) { this.x = x; this.y = y; this.width = width; this.height = height; this.captureCursor = captureCursor; writer = new AviWriter(filePath) { FramesPerSecond = 15, EmitIndex1 = true, }; if (quality == 0) { videoStream = writer.AddUncompressedVideoStream(width, height); videoStream.Name = "Quick Screen Recorder - Motion JPEG video stream"; } else { videoStream = writer.AddMotionJpegVideoStream(width, height, quality); videoStream.Name = "Quick Screen Recorder - Motion JPEG video stream"; } if (inputSourceIndex >= 0) { var waveFormat = new WaveFormat(44100, 16, 1); audioStream = writer.AddAudioStream( channelCount: waveFormat.Channels, samplesPerSecond: waveFormat.SampleRate, bitsPerSample: waveFormat.BitsPerSample ); audioStream.Name = "Quick Screen Recorder - Input audio stream"; audioSource = new WaveInEvent() { DeviceNumber = inputSourceIndex, WaveFormat = waveFormat, BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond), NumberOfBuffers = 3, }; audioSource.DataAvailable += audioSource_DataAvailable; if (separateAudio) { waveFile = new WaveFileWriter(filePath + " (Input audio).wav", audioSource.WaveFormat); } } else if (inputSourceIndex == -1) { audioSource = new WasapiLoopbackCapture(); audioStream = writer.AddAudioStream( channelCount: 1, samplesPerSecond: audioSource.WaveFormat.SampleRate, bitsPerSample: audioSource.WaveFormat.BitsPerSample ); audioStream.Name = "Quick Screen Recorder - System sounds audio stream"; audioSource.DataAvailable += audioSource_DataAvailable; if (separateAudio) { waveFile = new WaveFileWriter(filePath + " (System sounds).wav", audioSource.WaveFormat); } } screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; if (audioSource != null) { videoFrameWritten.Set(); audioBlockWritten.Reset(); audioSource.StartRecording(); } tf.compat.v1.disable_eager_execution(); //var tfSession = tf.Session(); detectorGraph = tf.Graph().as_default(); detectorGraph.Import(Path.Combine(modelDir, pbDetectorFile)); emotionGraph = tf.Graph().as_default(); emotionGraph.Import(Path.Combine(modelDir, pbEmotionFile)); Console.WriteLine(Environment.OSVersion, Color.Yellow); Console.WriteLine($"64Bit Operating System: {Environment.Is64BitOperatingSystem}", Color.Yellow); Console.WriteLine($"TensorFlow.NET v{Assembly.GetAssembly(typeof(TF_DataType)).GetName().Version}", Color.Yellow); Console.WriteLine($"TensorFlow Binary v{tf.VERSION}", Color.Yellow); Console.WriteLine($".NET CLR: {Environment.Version}", Color.Yellow); Console.WriteLine(Environment.CurrentDirectory, Color.Yellow); //writerCSV = new StreamWriter("output.csv"); //csv = new CsvWriter(writerCSV, CultureInfo.InvariantCulture); using (var writer = new StreamWriter("output.csv")) using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture)) { csv.WriteHeader <CSVRecord>(); csv.NextRecord(); } screenThread.Start(); }