static void TestFFMPEG() { var videoWriter = new VideoFileWriter(); int width = 800; int height = 600; int framerate = 24; string path = Path.GetFullPath("output.webm"); int videoBitRate = 1200 * 1000; int audioFrameSize = 44100; int audioBitRate = 128000; int audioSampleRate = 44100; AudioLayout audioChannels = AudioLayout.Mono; videoWriter.Width = width; videoWriter.Height = height; videoWriter.FrameRate = framerate; videoWriter.VideoCodec = VideoCodec.Vp8; videoWriter.BitRate = videoBitRate; videoWriter.PixelFormat = AVPixelFormat.FormatYuv420P; videoWriter.Open(path); //, audioFrameSize, audioChannels, audioSampleRate, AudioCodec.Vorbis, audioBitRate); var a = new Accord.DirectSound.AudioDeviceCollection(DirectSound.AudioDeviceCategory.Capture); // Generate 1 second of audio SineGenerator gen = new SineGenerator() { SamplingRate = audioSampleRate, Channels = 1, Format = SampleFormat.Format16Bit, Frequency = 10, Amplitude = 1000.9f, }; Signal s = gen.Generate(TimeSpan.FromSeconds(255)); //s.Save("test.wav"); var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(1)); //// Generate 1 second of audio //s = gen.Generate(TimeSpan.FromSeconds(1)); //videoWriter.WriteAudioFrame(s); } videoWriter.Close(); }
static void TestFFMPEG2() { string outputPath = Path.GetFullPath("output.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, //PixelFormat = Accord.Video.FFMPEG.PixelFormat.FormatYUV420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding our media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioChannels = videoWriter.AudioLayout; int numberOfChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the duration that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .mp4 media // container, a H.265 video codec for the video stream, and // AAC for the audio stream, into the file: string outputPath = Path.Combine(basePath, "output_audio.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.H265, AudioCodec = AudioCodec.Aac, AudioBitRate = 44100, AudioLayout = AudioLayout.Stereo, FrameSize = 44100, PixelFormat = AVPixelFormat.FormatYuv420P }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; AudioCodec audioCodec = videoWriter.AudioCodec; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // We haven't set those properties, but FFMPEG has filled them for us: int audioSampleRate = videoWriter.SampleRate; int audioSampleSize = videoWriter.FrameSize; // Now, let's say we would like to save dummy images of // changing color, with a sine wave as the audio stream: var g = new SineGenerator() { Channels = 1, // we will generate only one channel, and the file writer will convert on-the-fly Format = SampleFormat.Format32BitIeeeFloat, Frequency = 10f, Amplitude = 0.9f, SamplingRate = 44100 }; var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment when this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); // We can also write the audio samples if we need to: Signal signal = g.Generate(TimeSpan.FromSeconds(1)); // generate 1 second of audio videoWriter.WriteAudioFrame(signal); // save it to the stream } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.H265, videoCodec); Assert.AreEqual(AudioCodec.Aac, audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(AudioLayout.Stereo, audioLayout); Assert.AreEqual(2, audioChannels); }
public void write_video_new_api() { string basePath = TestContext.CurrentContext.TestDirectory; #region doc_new_api // Let's say we would like to save file using a .avi media // container and a MPEG4 (DivX/XVid) codec, saving it into: string outputPath = Path.Combine(basePath, "output_video.avi"); // First, we create a new VideoFileWriter: var videoWriter = new VideoFileWriter() { // Our video will have the following characteristics: Width = 800, Height = 600, FrameRate = 24, BitRate = 1200 * 1000, VideoCodec = VideoCodec.Mpeg4, }; // We can open for it writing: videoWriter.Open(outputPath); // At this point, we can check the console of our application for useful // information regarding the media streams created by FFMPEG. We can also // check those properties using the class itself, specially for properties // that we didn't set beforehand but that have been filled by FFMPEG: int width = videoWriter.Width; int height = videoWriter.Height; int frameRate = videoWriter.FrameRate.Numerator; int bitRate = videoWriter.BitRate; VideoCodec videoCodec = videoWriter.VideoCodec; // We haven't set those properties, but FFMPEG has filled them for us: AudioCodec audioCodec = videoWriter.AudioCodec; int audioSampleRate = videoWriter.SampleRate; AudioLayout audioLayout = videoWriter.AudioLayout; int audioChannels = videoWriter.NumberOfChannels; // Now, let's say we would like to save dummy images of changing color var m2i = new MatrixToImage(); Bitmap frame; for (byte i = 0; i < 255; i++) { // Create bitmap matrix from a matrix of RGB values: byte[,] matrix = Matrix.Create(height, width, i); m2i.Convert(matrix, out frame); // Write the frame to the stream. We can optionally specify // the moment that this frame should remain in the stream: videoWriter.WriteVideoFrame(frame, TimeSpan.FromSeconds(i)); } // We can get how long our written video is: TimeSpan duration = videoWriter.Duration; // Close the stream videoWriter.Close(); videoWriter.Dispose(); #endregion Assert.AreEqual(2540000000, duration.Ticks); Assert.AreEqual(800, width); Assert.AreEqual(600, height); Assert.AreEqual(24, frameRate); Assert.AreEqual(1200000, bitRate); Assert.AreEqual(VideoCodec.Mpeg4, videoCodec); Assert.IsTrue(AudioCodec.Default == audioCodec || AudioCodec.Mp3 == audioCodec); Assert.AreEqual(44100, audioSampleRate); Assert.AreEqual(audioLayout, audioLayout); Assert.AreEqual(2, audioChannels); }
protected override void OnDisappearing() { AudioLayout.PauseAllPlayers(); base.OnDisappearing(); }
protected override void OnSleep() { _ = longPollingManager.Stop().ConfigureAwait(false); AudioLayout.PauseAllPlayers(); base.OnSleep(); }