private void toolStripButtonRecord_Click(object sender, EventArgs e) { if (is_recording == false) { saveFileDialog1.Filter = "avi file|*.avi"; saveFileDialog1.Title = "Save Video File"; saveFileDialog1.InitialDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); saveFileDialog1.FileName = "video_grab_" + System.DateTime.Today.Month.ToString() + "-" + System.DateTime.Today.Day.ToString() + "-" + System.DateTime.Today.Year.ToString(); saveFileDialog1.ShowDialog(); if (saveFileDialog1.FileName != "") { // frame_rate = (float) m_imageProvider.get_floatParam("ResultingFrameRateAbs"); frame_rate = (float)1 / ((float)my_trigger.Period / (float)1000.0); Console.WriteLine(frame_rate); uint temp = ((uint)(frame_rate * 333333)); aw.Open(saveFileDialog1.FileName, ((uint)(temp)), (int)m_imageProvider.get_integerParam("Width"), (int)m_imageProvider.get_integerParam("Height")); is_recording = true; toolStripButtonRecord.BackColor = System.Drawing.SystemColors.GradientActiveCaption; // cmdRecord.BackColor = System.Drawing.SystemColors.GradientActiveCaption; // this.cmdRecord.UseVisualStyleBackColor = false; } } else { is_recording = false; aw.Close(); toolStripButtonRecord.BackColor = System.Drawing.SystemColors.Control; // cmdRecord.BackColor = System.Drawing.SystemColors.Control; // this.cmdRecord.UseVisualStyleBackColor = true; } }
public void StopRecord() { stopThread.Set(); screenThread.Join(); writer.Close(); stopThread.Close(); }
public void Stoping() { writer.Close(); writer = null; stream = null; IsCreateRecord = false; IsRecording = false; }
public void Dispose() { stopThread.Set(); screenThread.Join(); // Close writer: the remaining data is written to a file and file is closed writer.Close(); stopThread.Dispose(); }
private void RecordScreen() { while (!stop) { var buffer = GetScreenshot(); // 把图片写入视频流 videoStream.WriteFrameAsync(true, buffer, 0, buffer.Length).Wait(); } writer.Close(); }
/// <summary> /// Render a video based on JPEG-images /// </summary> /// <param name="fps">Requested frames-per-second</param> /// <param name="width">Width of the images</param> /// <param name="height">Height of the images</param> /// <param name="quality">Requested quality</param> /// <param name="path">Path to the folder containing frame-images</param> /// <param name="renderGuid">Unique GUID for this frame-batch</param> /// <returns>Path to the video</returns> public static async Task <string> RenderVideoAsync(int fps, int width, int height, int quality, string path, string renderGuid) { if (quality < 1 && quality > 100) { throw new ArgumentException("Quality can only be between 1 and 100."); } Task <string> renderT = Task.Run(() => { // Compose output path string outputPath = string.Format("{0}/{1}.avi", path, renderGuid); // Create a new writer with the requested FPS AviWriter writer = new AviWriter(outputPath) { FramesPerSecond = fps }; // Create a new stream to process it IAviVideoStream stream = writer.AddEncodingVideoStream(new MotionJpegVideoEncoderWpf(width, height, quality)); stream.Width = width; stream.Height = height; // Create an output stream byte[] frameData = new byte[stream.Width * stream.Height * 4]; // Retrieve all iamges for this batch string[] images = Directory.GetFiles(path, string.Format("{0}*.jpg", renderGuid)); // Process image per image foreach (string file in images) { // Decode the bitmap JpegBitmapDecoder decoder = new JpegBitmapDecoder(new Uri(file), BitmapCreateOptions.None, BitmapCacheOption.Default); // Get bitmap source BitmapSource source = decoder.Frames[0]; // Copy pixels source.CopyPixels(frameData, 1920 * 4, 0); // Write it to the stream stream.WriteFrame(true, frameData, 0, frameData.Length); } // Close writer writer.Close(); return(outputPath); }); await renderT; return(renderT.Result); }
public static void CreateVideo(List<Frame> frames, string outputFile) { var writer = new AviWriter(outputFile) { FramesPerSecond = 30, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; // returns IAviVideoStream var stream = writer.AddVideoStream(); // set standard VGA resolution stream.Width = 640; stream.Height = 480; // class SharpAvi.KnownFourCCs.Codecs contains FOURCCs for several well-known codecs // Uncompressed is the default value, just set it for clarity stream.Codec = KnownFourCCs.Codecs.Uncompressed; // Uncompressed format requires to also specify bits per pixel stream.BitsPerPixel = BitsPerPixel.Bpp32; var frameData = new byte[stream.Width * stream.Height * 4]; foreach (var item in frames) { // Say, you have a System.Drawing.Bitmap Bitmap bitmap = (Bitmap)item.Image; // and buffer of appropriate size for storing its bits var buffer = new byte[stream.Width * stream.Height * 4]; var pixelFormat = PixelFormat.Format32bppRgb; // Now copy bits from bitmap to buffer var bits = bitmap.LockBits(new Rectangle(0, 0, stream.Width, stream.Height), ImageLockMode.ReadOnly, pixelFormat); //Marshal.Copy(bits.Scan0, buffer, 0, buffer.Length); Marshal.Copy(bits.Scan0, buffer, 0, buffer.Length); bitmap.UnlockBits(bits); // and flush buffer to encoding stream stream.WriteFrame(true, buffer, 0, buffer.Length); } writer.Close(); }
public void Dispose() { if (IsPaused) { Resume(); } stopThread.Set(); screenThread.Join(); writer.Close(); stopThread.Dispose(); }
private static void Main(string[] args) { var firstFrame = CaptureScreen(null); var aviWriter = new AviWriter(); var bitmap = aviWriter.Open("test.avi", 10, firstFrame.Width, firstFrame.Height); for (var i = 0; i < 25*5; i++) { CaptureScreen(bitmap); aviWriter.AddFrame(); } aviWriter.Close(); }
public void Dispose() { if (IsPaused) Resume(); stopThread.Set(); screenThread.Join(); if (audioSource != null) { audioSource.StopRecording(); audioSource.DataAvailable -= AudioDataAvailable; } // Close writer: the remaining data is written to a file and file is closed writer.Close(); stopThread.Close(); }
public void Stop() { _cts.Cancel(); try { _workTask.Wait(); } catch (OperationCanceledException) { } finally { _fileWriter.Close(); } }
/// <summary> /// 结束Avi录制 /// </summary> public void Stoping() { if (_writer != null) { //关闭Writer,生成完整的,并且有时间轴的AVI文件 _writer.Close(); _writer = null; _stream = null; } //录制完毕,赋值标志位 IsCreateRecord = false; IsRecording = false; IsStopRecord = false; }
private void Dispose() { stopThread.Set(); mScreenThread.Join(); if (mAudioSource != null) { mAudioSource.StopRecording(); mAudioSource.DataAvailable -= audioSource_DataAvailable; } // Close writer: the remaining data is written to a file and file is closed mWriter.Close(); stopThread.Close(); }
void Record() { int FrameRate = (int)FpsUpDown.Value; Size RSize = RecordPanel.Size; int RndNum = new Random().Next(); string OutFile = Path.GetFullPath(string.Format("rec_{0}.avi", RndNum)); string OutFileCompressed = Path.GetFullPath(string.Format("rec_{0}.webm", RndNum)); AviWriter Writer = new AviWriter(OutFile); Writer.FramesPerSecond = FrameRate; Writer.EmitIndex1 = true; IAviVideoStream VStream = Writer.AddVideoStream(RSize.Width, RSize.Height, BitsPerPixel.Bpp24); VStream.Codec = KnownFourCCs.Codecs.Uncompressed; VStream.BitsPerPixel = BitsPerPixel.Bpp24; Bitmap Bmp = new Bitmap(RSize.Width, RSize.Height); Graphics G = Graphics.FromImage(Bmp); Stopwatch SWatch = new Stopwatch(); SWatch.Start(); while (!AbortRecording) { G.CopyFromScreen(DesktopLocation.Add(RecorderOffset), Point.Empty, RSize); //G.DrawString("Text Embedding", SystemFonts.CaptionFont, Brushes.Red, new PointF(0, 0)); Bmp.RotateFlip(RotateFlipType.RotateNoneFlipY); byte[] Data = Bmp.ToByteArray(); VStream.WriteFrame(true, Data, 0, Data.Length); while ((float)SWatch.ElapsedMilliseconds / 1000 < 1.0f / FrameRate) { ; } SWatch.Restart(); } G.Dispose(); Writer.Close(); if (WebmCheckBox.Checked) { Program.FFMPEG("-i \"{0}\" -c:v libvpx -b:v 1M -c:a libvorbis \"{1}\"", OutFile, OutFileCompressed); File.Delete(OutFile); } }
public void StopRecord(bool isBreak) { stopThread.Set(); if (!isBreak) { screenThread.Join(); } else { stopThread.Close(); } writer.Close(); stopThread.Close(); }
//create the video locally public async Task <string> CreateVideoAsync(List <byte[]> images, int frameRate, int width, int height) { guid = Guid.NewGuid(); string fileName = guid.ToString() + DateTime.Now.Day.ToString() + DateTime.Now.Millisecond + ".avi"; string compressedVideoName = null; string path = Path.Combine(webHostEnvironment.WebRootPath, "videos", fileName); try { var writer = new AviWriter(path) { FramesPerSecond = frameRate, EmitIndex1 = true }; var stream = writer.AddVideoStream(); stream.Width = width; stream.Height = height; stream.Codec = KnownFourCCs.Codecs.Uncompressed; stream.BitsPerPixel = BitsPerPixel.Bpp32; foreach (var image in images) { //convert the byte array of the frame to bitmap and flip it upside down var bm = aviVideoServices.ToBitmap(image); //reduce the frames size to match the video size var rbm = aviVideoServices.ReduceBitmap(bm, width, height); //convert the bitmap to byte array byte[] fr = aviVideoServices.BitmapToByteArray(rbm); //write the frame to the video await stream.WriteFrameAsync(true, fr, 0, fr.Length); } writer.Close(); //compress the video compressedVideoName = compressService.CompressAndConvertVideo(fileName); return(compressedVideoName); } finally { File.Delete(path); } }
public void Record() { _tempName = "Temp-" + DateTime.Now.Ticks + ".avi"; var writer = new AviWriter(_tempName) { FramesPerSecond = 30, }; IAviVideoStream stream = writer.AddMotionJpegVideoStream(ScreenWidth, ScreenHeight); var frameData = new byte[stream.Width * stream.Height * 4]; var stopwatch = new Stopwatch(); var buffer = new byte[ScreenWidth * ScreenHeight * 4]; var shotsTaken = 0; var timeTillNextFrame = TimeSpan.Zero; Task videoWriteTask = null; var isFirstFrame = true; stopwatch.Start(); while (!WaitHandler.WaitOne(timeTillNextFrame)) { GetScreenshot(buffer); if (!isFirstFrame) { videoWriteTask.Wait(); } videoWriteTask = stream.WriteFrameAsync(true, buffer, 0, buffer.Length); timeTillNextFrame = TimeSpan.FromSeconds(shotsTaken / (double)writer.FramesPerSecond - stopwatch.Elapsed.TotalSeconds); if (timeTillNextFrame < TimeSpan.Zero) { timeTillNextFrame = TimeSpan.Zero; } isFirstFrame = false; } stopwatch.Stop(); if (!isFirstFrame) { videoWriteTask.Wait(); } writer.Close(); }
public void SaveAsAvi(string fileName) { var writer = new AviWriter(fileName) { FramesPerSecond = (1000000 + this.Header.FrameDelay / 2) / this.Header.FrameDelay, EmitIndex1 = true }; try { IAviVideoStream videoStream = writer.AddMotionJpegVideoStream(this.Header.Width, this.Header.Height, 70); IAviAudioStream audioStream = writer.AddAudioStream(this.AudioHeader.NumChannels, this.AudioHeader.Frequency, 16); this.BeginPlay(); try { byte[] audio; byte[] video; while (this.RetrieveNextFrame(out audio, out video)) { if (video != null) { byte[] buffer = SnmFile.Convert16BppTo32Bpp(video); videoStream.WriteFrame(true, buffer, 0, buffer.Length); } if (audio != null) { audioStream.WriteBlock(audio, 0, audio.Length); } } } finally { this.EndPlay(); } } finally { writer.Close(); } }
public void Dispose() { stopThread.Set(); screenThread.Join(); if (waveFile != null) { waveFile.Dispose(); waveFile = null; } if (audioSource != null) { audioSource.StopRecording(); audioSource.DataAvailable -= audioSource_DataAvailable; } writer.Close(); stopThread.Dispose(); }
public static void Main() { try { Color[] palette = new Color[50]; for (int i = 0; i < 50; i++) { palette[i] = Color.FromArgb(i * 4, 255 - i * 4, 50 + i * 2); } int w = 600; int h = 600; AviWriter aw = new AviWriter(); Bitmap bmp = aw.Open("test.avi", 25, w, h); double f = 1.2; double centerX = -0.7454333; double centerY = -0.1130211; double pctAreaNewImage = 0.9; double endWidth_times_2 = 0.0001; while (f > endWidth_times_2) { MandelBrot.CalcMandelBrot( bmp, centerX - f, centerY - f, centerX + f, centerY + f, palette); f = Math.Sqrt(pctAreaNewImage * f * f); aw.AddFrame(); Console.Write("."); } aw.Close(); } catch (AviWriter.AviException e) { Console.WriteLine("AVI Exception in: " + e.ToString()); } }
public void Dispose() { _animation.Dispose(); _writer.Close(); }
public void Dispose() { _writer.Close(); }
public static void Decode(string filename, Stream fs, bool swapped, long framePosition) { char progress = ' '; var aviWriter = new AviWriter(filename + ".avi") { EmitIndex1 = true, FramesPerSecond = 18 }; IAviVideoStream videoStream = aviWriter.AddVideoStream(144, 80, BitsPerPixel.Bpp24); videoStream.Codec = KnownFourCCs.Codecs.Uncompressed; IAviAudioStream audioStream = aviWriter.AddAudioStream(2, 17784, 8); fs.Position = framePosition; byte[] frameBuffer = new byte[19760]; fs.Read(frameBuffer, 0, frameBuffer.Length); int audioStart = swapped ? 9 : 8; byte[] frameMarkerToUse = swapped ? SwappedFrameMarker : FrameMarker; byte[] frameMaskToUse = swapped ? SwappedFrameMask : FrameMask; if (swapped) { frameBuffer = Swapping.SwapBuffer(frameBuffer); } var outFs = new MemoryStream(); for (int i = 9; i <= frameBuffer.Length; i += 10) { switch ((i / 10) % 4) { case 0: progress = '-'; break; case 1: progress = '\\'; break; case 2: progress = '|'; break; case 3: progress = '/'; break; } Console.Write($"\r{Localization.ExtractingAudio}", progress); outFs.WriteByte(frameBuffer[i]); } byte[] videoFrame = Color.DecodeFrame(frameBuffer); videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length); audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length); int totalFrames = 1; framePosition += 19760; byte[] buffer = new byte[frameMarkerToUse.Length]; while (framePosition + 19760 < fs.Length) { switch (totalFrames % 4) { case 0: progress = '-'; break; case 1: progress = '\\'; break; case 2: progress = '|'; break; case 3: progress = '/'; break; } Console.Write($"\r{Localization.LookingForMoreFrames}", progress); for (int i = 0; i < buffer.Length; i++) { buffer[i] &= frameMaskToUse[i]; } if (!buffer.SequenceEqual(frameMarkerToUse)) { Console.Write("\r \r"); Console.WriteLine(Localization.FrameAndNextAreNotAligned, totalFrames); long expectedFramePosition = framePosition; while (framePosition < fs.Length) { fs.Position = framePosition; fs.Read(buffer, 0, buffer.Length); for (int i = 0; i < buffer.Length; i++) { buffer[i] &= frameMaskToUse[i]; } if (buffer.SequenceEqual(frameMarkerToUse)) { Console.Write("\r \r"); fs.Position = framePosition; frameBuffer = new byte[19760]; fs.Read(frameBuffer, 0, frameBuffer.Length); if (swapped) { frameBuffer = Swapping.SwapBuffer(frameBuffer); } outFs = new MemoryStream(); for (int i = 9; i <= frameBuffer.Length; i += 10) { switch ((i / 10) % 4) { case 0: progress = '-'; break; case 1: progress = '\\'; break; case 2: progress = '|'; break; case 3: progress = '/'; break; } Console.Write($"\r{Localization.ExtractingAudio}", progress); outFs.WriteByte(frameBuffer[i]); } videoFrame = Color.DecodeFrame(frameBuffer); videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length); audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length); totalFrames++; Console.Write("\r \r"); Console.WriteLine(Localization.FrameFoundAtPosition, framePosition, totalFrames, framePosition - expectedFramePosition); Console. WriteLine(framePosition % 2352 == 0 ? Localization.FrameIsAtSectorBoundary : Localization.FrameIsNotAtSectorBoundary, totalFrames); framePosition += 19760; break; } framePosition++; } continue; } if (framePosition % 2352 == 0) { Console.Write("\r \r"); Console.WriteLine(Localization.FrameIsAtSectorBoundary, totalFrames); } Console.Write("\r \r"); fs.Position = framePosition; frameBuffer = new byte[19760]; fs.Read(frameBuffer, 0, frameBuffer.Length); if (swapped) { frameBuffer = Swapping.SwapBuffer(frameBuffer); } outFs = new MemoryStream(); for (int i = 9; i <= frameBuffer.Length; i += 10) { switch ((i / 10) % 4) { case 0: progress = '-'; break; case 1: progress = '\\'; break; case 2: progress = '|'; break; case 3: progress = '/'; break; } Console.Write($"\r{Localization.ExtractingAudio}", progress); outFs.WriteByte(frameBuffer[i]); } videoFrame = Color.DecodeFrame(frameBuffer); videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length); audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length); totalFrames++; fs.Position = framePosition; fs.Read(buffer, 0, buffer.Length); framePosition += 19760; } Console.Write("\r \r"); Console.WriteLine(Localization.FramesFound, totalFrames); outFs.Close(); aviWriter.Close(); }
public void FileWriterLoop() { try { writer = new AviWriter(fileName) { FramesPerSecond = frameRate, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; var stream = writer.AddVideoStream(); //var codecs = Mpeg4VideoEncoderVcm.GetAvailableCodecs(); //UniLog.Log(codecs.ToString()); //FourCC selectedCodec = KnownFourCCs.Codecs.MotionJpeg; //var encoder = new Mpeg4VideoEncoderVcm(width, height, // frameRate, // frame rate // 0, // number of frames, if known beforehand, or zero // 70 // quality, though usually ignored :( // ); //var encoder = new SingleThreadedVideoEncoderWrapper(() => new Mpeg4VideoEncoderVcm(width, height, // frameRate, // frame rate // 0, // number of frames, if known beforehand, or zero // 70, // quality, though usually ignored :( // selectedCodec // codecs preference // )); //var stream = writer.AddEncodingVideoStream(encoder, width: width, height: height); //var stream = writer.AddMpeg4VideoStream(width, height, frameRate, 0, 70, selectedCodec, false); //var stream = writer.AddMotionJpegVideoStream(width, height, 70); stream.Height = height; stream.Width = width; stream.Codec = KnownFourCCs.Codecs.Uncompressed; stream.BitsPerPixel = BitsPerPixel.Bpp32; byte[] frame; while (true) { //UniLog.Log("writerloop"); if (should_finish) { writer.Close(); break; } if (framesQueue.TryDequeue(out frame)) { //UniLog.Log("Writing"); //Bitmap bmp = ToBitmap(frame); //Bitmap bmpReduced = ReduceBitmap(bmp, width, height); //System.Drawing.Bitmap bitmap = new System.Drawing.Bitmap(width,height,System.Drawing.Imaging.PixelFormat.Format32bppArgb); //var bits = bitmap.LockBits(new Rectangle(0, 0, width,height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); //Marshal.Copy(frame, 0, bits.Scan0, frame.Length); //bitmap.UnlockBits(bits); //System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format32bppArgb); //using (var gr = Graphics.FromImage(bmp)) // gr.DrawImage(bitmap, new Rectangle(0, 0, width, height)); //var buffer = new byte[width * height * 4];//(widght - _ - height - _ - 4); //var bits2 = bmp.LockBits(new Rectangle(0, 0, stream.Width, stream.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb); //Marshal.Copy(bits2.Scan0, buffer, 0, buffer.Length); //bitmap.UnlockBits(bits2); stream.WriteFrame(true, frame, 0, frame.Length); } } //loop_finished = true; writtingThreadFinnishEvent.Set(); } catch (Exception e) { UniLog.Log("OwO: " + e.Message); UniLog.Log(e.StackTrace); } finally { writtingThreadFinnishEvent.Set(); } }
public void CaptureFinished() { aviWriter.Close(); aviWriter = null; }
public void Close() { aviWriter.Close(); }
private static void CreateMapFromPoints(List <Position> points, Settings settings) { points = points.Select(LocationUtils.ToMercator).ToList(); var boundingBox = LocationUtils.GetBoundingBox(points); var mapper = Tiler.RenderMap(boundingBox, settings.VideoConfig.Width, settings.VideoConfig.Height); points = mapper.GetPixels(points).ToList(); points = points.SkipTooClose(8).ToList(); points = points.SmoothLineChaikin(settings.SofteningSettings); mapper.Save(Path.Combine(settings.OutputDirectory, "empty-map.png")); var writer = new AviWriter(Path.Combine(settings.OutputDirectory, "map.avi")) { FramesPerSecond = settings.VideoConfig.Framerate, EmitIndex1 = true }; IAviVideoStream stream = new NullVideoStream(settings.VideoConfig.Width, settings.VideoConfig.Height); if (settings.VideoConfig.ProduceVideo) { var encoder = new MotionJpegVideoEncoderWpf(settings.VideoConfig.Width, settings.VideoConfig.Height, 70); stream = writer.AddEncodingVideoStream(encoder, true, settings.VideoConfig.Width, settings.VideoConfig.Height); stream.Width = settings.VideoConfig.Width; stream.Height = settings.VideoConfig.Height; } double lengthSeconds = settings.VideoConfig.VideoDuration.TotalSeconds; double totalDistanceMeters = 0; double yieldFrame = Math.Max(1, (points.Count / (lengthSeconds * settings.VideoConfig.Framerate))); double nextFrame = 1; int wroteFrames = 0; for (int i = 1; i < points.Count; i++) { var previousPoint = mapper.FromPixelsToMercator(points[i - 1]); var currentPoint = mapper.FromPixelsToMercator(points[i]); totalDistanceMeters += previousPoint.DistanceMeters(currentPoint); if (mapper.IsStashed) { mapper.StashPop(); } mapper.DrawLine(points[i - 1], points[i]); if (settings.DisplayDistance || settings.DisplayDateTime) { mapper.Stash(); } if (settings.DisplayDistance) { mapper.WriteText(string.Format("{0:0}km", totalDistanceMeters / 1000)); } if (settings.DisplayDateTime) { // mapper.WriteText(points[i].Time.ToString(), settings.VideoConfig.Height - 200); Position positionWgs84 = currentPoint.GetWgs84(); var ianaTz = TimeZoneLookup.GetTimeZone(positionWgs84.Latitude, positionWgs84.Longitude).Result; TimeSpan offset = TimeZoneConverter.TZConvert.GetTimeZoneInfo(ianaTz).GetUtcOffset(points[i].Time); mapper.WriteText(points[i].Time.ToUniversalTime().Add(offset).ToString("MM/dd hh tt"), settings.VideoConfig.Height - 100); } if (i >= nextFrame) { byte[] frameData = mapper.GetBitmap(); stream.WriteFrame(true, frameData, 0, frameData.Length); wroteFrames++; nextFrame += yieldFrame; } } if (mapper.IsStashed) { mapper.StashPop(); } byte[] lastFrameData = mapper.GetBitmap(); stream.WriteFrame(true, lastFrameData, 0, lastFrameData.Length); writer.Close(); // DrawBoundingBox(boundingBox, mapper); string path = Path.Combine(settings.OutputDirectory, "complete-map.png"); mapper.Save(path); Console.WriteLine("Wrote frames: {0}, points.Count={1}, yieldFrame={2}, path={3}", wroteFrames, points.Count, yieldFrame, path); }
/// <summary> /// Stops the record. /// </summary> public void StopRecord() { Camera.FrameAvailable -= Camera_FrameAvailable; _writer.Close(); }
public Recorder(string fileName, FourCC codec, int quality, int audioSourceIndex, SupportedWaveFormat audioWaveFormat, bool encodeAudio, int audioBitRate) { System.Windows.Media.Matrix toDevice; using (var source = new HwndSource(new HwndSourceParameters())) { toDevice = source.CompositionTarget.TransformToDevice; } screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11); screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22); try { // Create AVI writer and specify FPS writer = new AviWriter(fileName) { FramesPerSecond = 10, EmitIndex1 = true, }; } catch (Exception) { try { writer.Close(); } catch (Exception) { } throw; } // Create video stream videoStream = CreateVideoStream(codec, quality); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used videoStream.Name = "Screencast"; if (audioSourceIndex >= 0) { var waveFormat = ToWaveFormat(audioWaveFormat); audioStream = CreateAudioStream(waveFormat, encodeAudio, audioBitRate); // Set only name. Other properties were when creating stream, // either explicitly by arguments or implicitly by the encoder used audioStream.Name = "Voice"; audioSource = new WaveInEvent { DeviceNumber = audioSourceIndex, WaveFormat = waveFormat, // Buffer size to store duration of 1 frame BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond), NumberOfBuffers = 3, }; audioSource.DataAvailable += audioSource_DataAvailable; } screenThread = new Thread(RecordScreen) { Name = typeof(Recorder).Name + ".RecordScreen", IsBackground = true }; if (audioSource != null) { videoFrameWritten.Set(); audioBlockWritten.Reset(); audioSource.StartRecording(); } screenThread.Start(); }
private void RenderVideo() { try { if (!TrackResults) { return; } Log.Debug($"Generated all images, now rendering movie."); //var files = Directory.EnumerateFiles(@"D:\Temp\Light\", "*.bmp"); //files = files.OrderBy(s => s); //int fps = (int) (RenderingTasks.Count()/10f); // Movie should last 5 seconds int fps = 10; var writer = new AviWriter(@"D:\Temp\Light\test.avi") { FramesPerSecond = fps, // Emitting AVI v1 index in addition to OpenDML index (AVI v2) // improves compatibility with some software, including // standard Windows programs like Media Player and File Explorer EmitIndex1 = true }; var stream = writer.AddVideoStream(); stream.Width = GetWidth(); stream.Height = GetHeight(); stream.Codec = KnownFourCCs.Codecs.Uncompressed; stream.BitsPerPixel = BitsPerPixel.Bpp32; Log.Debug($"Waiting for image rendering of {RenderingTasks.Count} images to complete"); foreach (var renderingTask in RenderingTasks) { renderingTask.RunSynchronously(); Bitmap image = renderingTask.Result; //} //foreach (var file in files) //{ lock (_imageSync) { //Bitmap image = (Bitmap) Image.FromFile(file); //image = new Bitmap(image, stream.Width, stream.Height); byte[] imageData = (byte[])ToByteArray(image, ImageFormat.Bmp); if (imageData == null) { Log.Warn($"No image data for file."); continue; } if (imageData.Length != stream.Height * stream.Width * 4) { imageData = imageData.Skip(imageData.Length - (stream.Height * stream.Width * 4)).ToArray(); } // fill frameData with image // write data to a frame stream.WriteFrame(true, // is key frame? (many codecs use concept of key frames, for others - all frames are keys) imageData, // array with frame data 0, // starting index in the array imageData.Length // length of the data ); } } writer.Close(); } catch (Exception e) { Log.Error("Rendering movie", e); } }
public void Dispose() { _animation.Dispose(); _writer.Close(); _stream = null; }