コード例 #1
0
        private void StartRecordingThread(string videoFileName, ISeleniumTest scenario, int fps)
        {
            fileName = videoFileName;

            using (writer = new AviWriter(videoFileName)
            {
                FramesPerSecond = fps,
                EmitIndex1 = true
            })
            {
                stream = writer.AddVideoStream();
                this.ConfigureStream(stream);

                while (!scenario.IsFinished || forceStop)
                {
                    GetScreenshot(buffer);

                    //Thread safety issues
                    lock (locker)
                    {
                        stream.WriteFrame(true, buffer, 0, buffer.Length);
                    }

                }
            }

        }
コード例 #2
0
 public void Recording(byte[] frameData)
 {
     stream.WriteFrame(true,            // is key frame? (many codecs use concept of key frames, for others - all frames are keys)
                       frameData,       // array with frame data
                       0,               // starting index in the array
                       frameData.Length // length of the data
                       );
 }
コード例 #3
0
        public void Init()
        {
            _writer = new AviWriter(VideoPath)
            {
                FramesPerSecond = 30,
                EmitIndex1      = true
            };

            try {
                _stream = _writer.AddUncompressedVideoStream(DmdWidth, DmdHeight);
                Logger.Info("Uncompressed encoder found.");
            } catch (InvalidOperationException e) {
                Logger.Warn("Error creating Uncompressed encoded stream: {0}.", e.Message);
            }

            try {
                if (_stream == null)
                {
                    _stream = _writer.AddMpeg4VideoStream(
                        DmdWidth, DmdHeight, Fps,
                        quality: 100,
                        codec: KnownFourCCs.Codecs.X264,
                        forceSingleThreadedAccess: true
                        );
                    Logger.Info("X264 encoder found.");
                }
            } catch (InvalidOperationException e) {
                Logger.Warn("Error creating X264 encoded stream: {0}.", e.Message);
            }

            try {
                if (_stream == null)
                {
                    _stream = _writer.AddMotionJpegVideoStream(DmdWidth, DmdHeight,
                                                               quality: 100
                                                               );
                }
                Logger.Info("MJPEG encoder found.");
            } catch (InvalidOperationException e) {
                Logger.Warn("Error creating MJPEG encoded stream: {0}.", e.Message);
            }

            if (_stream == null)
            {
                Logger.Error("No encoder available, aborting.");
                return;
            }
            _animation = Observable
                         .Interval(TimeSpan.FromTicks(1000 * TimeSpan.TicksPerMillisecond / Fps))
                         .Subscribe(_ => {
                if (_frame != null)
                {
                    _stream?.WriteFrame(true, _frame, 0, _frame.Length);
                }
            });
            Logger.Info("Writing video to {0}.", VideoPath);
        }
コード例 #4
0
        public void WriteFrame(SpanBitmap bmp)
        {
            _Cache.AsTypeless().SetPixels(0, 0, bmp);

            if (_Cache.TryGetBuffer(out var segment))
            {
                _Writer.WriteFrame(true, segment.Array, segment.Offset, segment.Count);
            }
        }
コード例 #5
0
ファイル: AviWriter.cs プロジェクト: alantan/Captura
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        public void WriteFrame(IBitmapFrame Frame)
        {
            if (!(Frame is RepeatFrame))
            {
                using (Frame)
                {
                    Frame.CopyTo(_videoBuffer);
                }
            }

            lock (_syncLock)
            {
                if (IsTransparentOrTruncatedFrame(_videoBuffer))
                {
                    // To avoid dropped frames, just repeat the previous one

                    if (_hasOneGoodFrame)
                    {
                        // Use previous frame instead

                        _videoStream.WriteFrame(true, _prevVideoBuffer, 0, _prevVideoBuffer.Length);
                    }
                    else
                    {
                        // Just need to make do with what we have

                        _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
                    }

                    return;
                }

                if (!_hasOneGoodFrame)
                {
                    _hasOneGoodFrame = true;
                }

                _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);

                // Save frame in case we need it as stand-in for next one

                Buffer.BlockCopy(_videoBuffer, 0, _prevVideoBuffer, 0, _videoBuffer.Length);
            }
        }
コード例 #6
0
        /// <summary>
        /// Render a video based on JPEG-images
        /// </summary>
        /// <param name="fps">Requested frames-per-second</param>
        /// <param name="width">Width of the images</param>
        /// <param name="height">Height of the images</param>
        /// <param name="quality">Requested quality</param>
        /// <param name="path">Path to the folder containing frame-images</param>
        /// <param name="renderGuid">Unique GUID for this frame-batch</param>
        /// <returns>Path to the video</returns>
        public static async Task <string> RenderVideoAsync(int fps, int width, int height, int quality, string path, string renderGuid)
        {
            if (quality < 1 && quality > 100)
            {
                throw new ArgumentException("Quality can only be between 1 and 100.");
            }

            Task <string> renderT = Task.Run(() =>
            {
                // Compose output path
                string outputPath = string.Format("{0}/{1}.avi", path, renderGuid);

                // Create a new writer with the requested FPS
                AviWriter writer = new AviWriter(outputPath)
                {
                    FramesPerSecond = fps
                };

                // Create a new stream to process it
                IAviVideoStream stream = writer.AddEncodingVideoStream(new MotionJpegVideoEncoderWpf(width, height, quality));
                stream.Width           = width;
                stream.Height          = height;

                // Create an output stream
                byte[] frameData = new byte[stream.Width * stream.Height * 4];

                // Retrieve all iamges for this batch
                string[] images = Directory.GetFiles(path, string.Format("{0}*.jpg", renderGuid));

                // Process image per image
                foreach (string file in images)
                {
                    // Decode the bitmap
                    JpegBitmapDecoder decoder = new JpegBitmapDecoder(new Uri(file), BitmapCreateOptions.None, BitmapCacheOption.Default);

                    // Get bitmap source
                    BitmapSource source = decoder.Frames[0];

                    // Copy pixels
                    source.CopyPixels(frameData, 1920 * 4, 0);

                    // Write it to the stream
                    stream.WriteFrame(true, frameData, 0, frameData.Length);
                }

                // Close writer
                writer.Close();

                return(outputPath);
            });

            await renderT;

            return(renderT.Result);
        }
コード例 #7
0
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        /// <param name="Image">The Image frame to write.</param>
        public void WriteFrame(Bitmap Image)
        {
            var bits = Image.LockBits(new Rectangle(Point.Empty, Image.Size), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);

            Marshal.Copy(bits.Scan0, _videoBuffer, 0, _videoBuffer.Length);
            Image.UnlockBits(bits);

            Image.Dispose();

            _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
        }
コード例 #8
0
ファイル: AviWriter.cs プロジェクト: zaifworks/Captura
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        public void WriteFrame(IBitmapFrame Frame)
        {
            if (!(Frame is RepeatFrame))
            {
                using (Frame)
                {
                    Frame.CopyTo(_videoBuffer, _videoBuffer.Length);
                }
            }

            lock (_writer)
                _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
        }
コード例 #9
0
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        public void WriteFrame(IBitmapFrame frame)
        {
            if (!(frame is RepeatFrame))
            {
                using (frame)
                {
                    frame.CopyTo(_videoBuffer, _videoBuffer.Length);
                }
            }

            lock (_syncLock)
                _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
        }
コード例 #10
0
        public void OnRenderUpdated(object _, Bitmap image)
        {
            if (_videoStream == null)
            {
                CreateVideoStream(image.Width, image.Height);
            }

            var buffer = new byte[image.Width * image.Height * 4];

            CopyToBuffer(image, buffer);

            var isKeyrame = _videoStream.FramesWritten % 24 == 0;

            _videoStream.WriteFrame(isKeyrame, buffer, 0, buffer.Length);
        }
コード例 #11
0
ファイル: PeeperForm.cs プロジェクト: mcjt/Peeper
        void Record()
        {
            int  FrameRate = (int)FpsUpDown.Value;
            Size RSize     = RecordPanel.Size;

            int       RndNum            = new Random().Next();
            string    OutFile           = Path.GetFullPath(string.Format("rec_{0}.avi", RndNum));
            string    OutFileCompressed = Path.GetFullPath(string.Format("rec_{0}.webm", RndNum));
            AviWriter Writer            = new AviWriter(OutFile);

            Writer.FramesPerSecond = FrameRate;
            Writer.EmitIndex1      = true;

            IAviVideoStream VStream = Writer.AddVideoStream(RSize.Width, RSize.Height, BitsPerPixel.Bpp24);

            VStream.Codec        = KnownFourCCs.Codecs.Uncompressed;
            VStream.BitsPerPixel = BitsPerPixel.Bpp24;

            Bitmap   Bmp = new Bitmap(RSize.Width, RSize.Height);
            Graphics G   = Graphics.FromImage(Bmp);

            Stopwatch SWatch = new Stopwatch();

            SWatch.Start();

            while (!AbortRecording)
            {
                G.CopyFromScreen(DesktopLocation.Add(RecorderOffset), Point.Empty, RSize);
                //G.DrawString("Text Embedding", SystemFonts.CaptionFont, Brushes.Red, new PointF(0, 0));
                Bmp.RotateFlip(RotateFlipType.RotateNoneFlipY);
                byte[] Data = Bmp.ToByteArray();
                VStream.WriteFrame(true, Data, 0, Data.Length);
                while ((float)SWatch.ElapsedMilliseconds / 1000 < 1.0f / FrameRate)
                {
                    ;
                }
                SWatch.Restart();
            }

            G.Dispose();
            Writer.Close();

            if (WebmCheckBox.Checked)
            {
                Program.FFMPEG("-i \"{0}\" -c:v libvpx -b:v 1M -c:a libvorbis \"{1}\"", OutFile, OutFileCompressed);
                File.Delete(OutFile);
            }
        }
コード例 #12
0
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        public void WriteFrame(IBitmapFrame Frame)
        {
            if (!(Frame is RepeatFrame))
            {
                using (Frame)
                {
                    var image = Frame.Bitmap;

                    var bits = image.LockBits(new Rectangle(Point.Empty, image.Size), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
                    Marshal.Copy(bits.Scan0, _videoBuffer, 0, _videoBuffer.Length);
                    image.UnlockBits(bits);
                }
            }

            lock (_writer)
                _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
        }
コード例 #13
0
        public void SaveAsAvi(string fileName)
        {
            var writer = new AviWriter(fileName)
            {
                FramesPerSecond = (1000000 + this.Header.FrameDelay / 2) / this.Header.FrameDelay,
                EmitIndex1      = true
            };

            try
            {
                IAviVideoStream videoStream = writer.AddMotionJpegVideoStream(this.Header.Width, this.Header.Height, 70);
                IAviAudioStream audioStream = writer.AddAudioStream(this.AudioHeader.NumChannels, this.AudioHeader.Frequency, 16);

                this.BeginPlay();

                try
                {
                    byte[] audio;
                    byte[] video;

                    while (this.RetrieveNextFrame(out audio, out video))
                    {
                        if (video != null)
                        {
                            byte[] buffer = SnmFile.Convert16BppTo32Bpp(video);
                            videoStream.WriteFrame(true, buffer, 0, buffer.Length);
                        }

                        if (audio != null)
                        {
                            audioStream.WriteBlock(audio, 0, audio.Length);
                        }
                    }
                }
                finally
                {
                    this.EndPlay();
                }
            }
            finally
            {
                writer.Close();
            }
        }
コード例 #14
0
ファイル: AviWriter.cs プロジェクト: moondust-git/captura
        /// <summary>
        /// Writes an Image frame.
        /// </summary>
        /// <param name="Image">The Image frame to write.</param>
        public void WriteFrame(Bitmap Image)
        {
            var hash = Image.GetHashCode();

            if (lastFrameHash != hash)
            {
                using (Image)
                {
                    var bits = Image.LockBits(new Rectangle(Point.Empty, Image.Size), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
                    Marshal.Copy(bits.Scan0, _videoBuffer, 0, _videoBuffer.Length);
                    Image.UnlockBits(bits);
                }

                lastFrameHash = hash;
            }

            lock (_writer)
                _videoStream.WriteFrame(true, _videoBuffer, 0, _videoBuffer.Length);
        }
コード例 #15
0
        private static void Process(IAviSettings aviSettings, IReadOnlyCollection <string> imageFiles)
        {
            if (imageFiles.Count == 0)
            {
                return;
            }

            using (var writer = new AviWriter(aviSettings.OutputAvi)
            {
                FramesPerSecond = aviSettings.FPS, EmitIndex1 = true
            })
            {
                IAviVideoStream stream    = null;
                byte[]          buffer    = null;
                bool            first     = true;
                var             rectangle = new Rectangle();

                foreach (var file in imageFiles)
                {
                    using (var bitmap = (Bitmap)Image.FromFile(file))
                    {
                        if (first)
                        {
                            first = false;

                            //stream = writer.AddUncompressedVideoStream(image.Width, image.Height);
                            stream = writer.AddMotionJpegVideoStream(bitmap.Width, bitmap.Height, quality: 90);
                            //stream = writer.AddMpeg4VideoStream(image.Width, image.Height, fps, quality: 70, codec: KnownFourCCs.Codecs.MicrosoftMpeg4V2, forceSingleThreadedAccess: true);

                            buffer    = new byte[bitmap.Width * bitmap.Height * 4 /* four bytes per pixel */];
                            rectangle = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
                        }


                        var raw = bitmap.LockBits(rectangle, ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
                        Marshal.Copy(raw.Scan0, buffer, 0, buffer.Length);
                        bitmap.UnlockBits(raw);

                        stream.WriteFrame(true, buffer, 0, buffer.Length);
                    }
                }
            }
        }
コード例 #16
0
ファイル: Xp.cs プロジェクト: aaru-dps/Aaru.VideoNow
        public static void Decode(string filename, Stream fs, bool swapped, long framePosition)
        {
            char progress = ' ';

            var aviWriter = new AviWriter(filename + ".avi")
            {
                EmitIndex1 = true, FramesPerSecond = 18
            };

            IAviVideoStream videoStream = aviWriter.AddVideoStream(144, 80, BitsPerPixel.Bpp24);

            videoStream.Codec = KnownFourCCs.Codecs.Uncompressed;
            IAviAudioStream audioStream = aviWriter.AddAudioStream(2, 17784, 8);

            fs.Position = framePosition;
            byte[] frameBuffer = new byte[19760];
            fs.Read(frameBuffer, 0, frameBuffer.Length);

            int audioStart = swapped ? 9 : 8;

            byte[] frameMarkerToUse = swapped ? SwappedFrameMarker : FrameMarker;
            byte[] frameMaskToUse   = swapped ? SwappedFrameMask : FrameMask;

            if (swapped)
            {
                frameBuffer = Swapping.SwapBuffer(frameBuffer);
            }

            var outFs = new MemoryStream();

            for (int i = 9; i <= frameBuffer.Length; i += 10)
            {
                switch ((i / 10) % 4)
                {
                case 0:
                    progress = '-';

                    break;

                case 1:
                    progress = '\\';

                    break;

                case 2:
                    progress = '|';

                    break;

                case 3:
                    progress = '/';

                    break;
                }

                Console.Write($"\r{Localization.ExtractingAudio}", progress);
                outFs.WriteByte(frameBuffer[i]);
            }

            byte[] videoFrame = Color.DecodeFrame(frameBuffer);
            videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length);
            audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length);

            int totalFrames = 1;

            framePosition += 19760;
            byte[] buffer = new byte[frameMarkerToUse.Length];

            while (framePosition + 19760 < fs.Length)
            {
                switch (totalFrames % 4)
                {
                case 0:
                    progress = '-';

                    break;

                case 1:
                    progress = '\\';

                    break;

                case 2:
                    progress = '|';

                    break;

                case 3:
                    progress = '/';

                    break;
                }

                Console.Write($"\r{Localization.LookingForMoreFrames}", progress);

                for (int i = 0; i < buffer.Length; i++)
                {
                    buffer[i] &= frameMaskToUse[i];
                }

                if (!buffer.SequenceEqual(frameMarkerToUse))
                {
                    Console.Write("\r                                      \r");
                    Console.WriteLine(Localization.FrameAndNextAreNotAligned, totalFrames);
                    long expectedFramePosition = framePosition;

                    while (framePosition < fs.Length)
                    {
                        fs.Position = framePosition;
                        fs.Read(buffer, 0, buffer.Length);

                        for (int i = 0; i < buffer.Length; i++)
                        {
                            buffer[i] &= frameMaskToUse[i];
                        }

                        if (buffer.SequenceEqual(frameMarkerToUse))
                        {
                            Console.Write("\r                                      \r");

                            fs.Position = framePosition;
                            frameBuffer = new byte[19760];
                            fs.Read(frameBuffer, 0, frameBuffer.Length);

                            if (swapped)
                            {
                                frameBuffer = Swapping.SwapBuffer(frameBuffer);
                            }

                            outFs = new MemoryStream();

                            for (int i = 9; i <= frameBuffer.Length; i += 10)
                            {
                                switch ((i / 10) % 4)
                                {
                                case 0:
                                    progress = '-';

                                    break;

                                case 1:
                                    progress = '\\';

                                    break;

                                case 2:
                                    progress = '|';

                                    break;

                                case 3:
                                    progress = '/';

                                    break;
                                }

                                Console.Write($"\r{Localization.ExtractingAudio}", progress);
                                outFs.WriteByte(frameBuffer[i]);
                            }

                            videoFrame = Color.DecodeFrame(frameBuffer);
                            videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length);
                            audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length);

                            totalFrames++;
                            Console.Write("\r                                      \r");

                            Console.WriteLine(Localization.FrameFoundAtPosition, framePosition, totalFrames,
                                              framePosition - expectedFramePosition);

                            Console.
                            WriteLine(framePosition % 2352 == 0 ? Localization.FrameIsAtSectorBoundary : Localization.FrameIsNotAtSectorBoundary,
                                      totalFrames);

                            framePosition += 19760;

                            break;
                        }

                        framePosition++;
                    }

                    continue;
                }

                if (framePosition % 2352 == 0)
                {
                    Console.Write("\r                                      \r");
                    Console.WriteLine(Localization.FrameIsAtSectorBoundary, totalFrames);
                }

                Console.Write("\r                                      \r");
                fs.Position = framePosition;
                frameBuffer = new byte[19760];
                fs.Read(frameBuffer, 0, frameBuffer.Length);

                if (swapped)
                {
                    frameBuffer = Swapping.SwapBuffer(frameBuffer);
                }

                outFs = new MemoryStream();

                for (int i = 9; i <= frameBuffer.Length; i += 10)
                {
                    switch ((i / 10) % 4)
                    {
                    case 0:
                        progress = '-';

                        break;

                    case 1:
                        progress = '\\';

                        break;

                    case 2:
                        progress = '|';

                        break;

                    case 3:
                        progress = '/';

                        break;
                    }

                    Console.Write($"\r{Localization.ExtractingAudio}", progress);
                    outFs.WriteByte(frameBuffer[i]);
                }

                videoFrame = Color.DecodeFrame(frameBuffer);
                videoStream.WriteFrame(true, videoFrame, 0, videoFrame.Length);
                audioStream.WriteBlock(outFs.ToArray(), 0, (int)outFs.Length);

                totalFrames++;
                fs.Position = framePosition;
                fs.Read(buffer, 0, buffer.Length);
                framePosition += 19760;
            }

            Console.Write("\r                                      \r");
            Console.WriteLine(Localization.FramesFound, totalFrames);

            outFs.Close();
            aviWriter.Close();
        }
コード例 #17
0
        /// <summary>
        /// Handles the FrameAvailable event of the Camera control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="CameraFrameEventArgs"/> instance containing the event data.</param>
        private void Camera_FrameAvailable(object sender, CameraFrameEventArgs e)
        {
            var frame = Camera.GetRawFrame(e.Value.SourceFrame);

            _video.WriteFrame(true, frame, 0, frame.Length);
        }