Exemplo n.º 1
0
        static void Main(string[] args)
        {
            var file1 = @"C:\Users\masaaoki\Videos\Conversation - 180.mp4";
            var file2 = @"C:\Users\masaaoki\Videos\Captures\Provisioning Flow - Top - Google Chrome 2020-05-31 18-06-04.mp4";

            args = new[] { file2, DefaultOutPath, "200" };

            string fullFileName = null;
            string outPath      = null;
            int    interval     = -1;

            try
            {
                switch (args.Length)
                {
                case 3:
                    fullFileName = args[0];
                    outPath      = args[1];
                    interval     = int.Parse(args[2]);
                    break;

                case 2:
                    fullFileName = args[0];
                    outPath      = args[1];
                    break;

                case 1:
                    fullFileName = args[0];
                    break;

                default:
                    PrintHelpMessage();
                    return;
                }

                if (!File.Exists(fullFileName))
                {
                    Debug.WriteLine("file not exist.");
                    return;
                }

                if (!Directory.Exists(outPath))
                {
                    Debug.WriteLine("folder not exist.");
                    return;
                }

                var lib = new VideoLib();
                lib.ExtractImage(fullFileName, outPath, interval);
            }
            catch (Exception ex)
            {
                throw;
            }
        }
Exemplo n.º 2
0
        void adjustAudioLength(VideoLib.AudioFrame frame)
        {

            //videoDebug.AudioFrameLengthAdjust = 0;

            if (syncMode == SyncMode.AUDIO_SYNCS_TO_VIDEO)
            {

                int n = videoDecoder.NrChannels * videoDecoder.BytesPerSample;

                double diff = audioPlayer.getAudioClock() - getVideoClock();

                if (Math.Abs(diff) < AV_NOSYNC_THRESHOLD)
                {

                    // accumulate the diffs
                    audioDiffCum = diff + audioDiffAvgCoef * audioDiffCum;

                    if (audioDiffAvgCount < AUDIO_DIFF_AVG_NB)
                    {

                        audioDiffAvgCount++;

                    }
                    else
                    {

                        double avgDiff = audioDiffCum * (1.0 - audioDiffAvgCoef);

                        // Shrinking/expanding buffer code....
                        if (Math.Abs(avgDiff) >= audioDiffThreshold)
                        {

                            int wantedSize = (int)(frame.Length + diff * videoDecoder.SamplesPerSecond * n);

                            // get a correction percent from 10 to 60 based on the avgDiff
                            // in order to converge a little faster
                            double correctionPercent = Misc.clamp(10 + (Math.Abs(avgDiff) - audioDiffThreshold) * 15, 10, 60);

                            //Util.DebugOut(correctionPercent);

                            //AUDIO_SAMPLE_CORRECTION_PERCENT_MAX

                            int minSize = (int)(frame.Length * ((100 - correctionPercent)
                                / 100));

                            int maxSize = (int)(frame.Length * ((100 + correctionPercent)
                                / 100));

                            if (wantedSize < minSize)
                            {

                                wantedSize = minSize;

                            }
                            else if (wantedSize > maxSize)
                            {

                                wantedSize = maxSize;
                            }

                            // make sure the samples stay aligned after resizing the buffer
                            wantedSize = (wantedSize / n) * n;

                            if (wantedSize < frame.Length)
                            {

                                // remove samples 
                                //videoDebug.AudioFrameLengthAdjust = wantedSize - frame.Length;
                                frame.Length = wantedSize;

                            }
                            else if (wantedSize > frame.Length)
                            {

                                // add samples by copying final samples
                                int nrExtraSamples = wantedSize - frame.Length;
                                //videoDebug.AudioFrameLengthAdjust = nrExtraSamples;

                                byte[] lastSample = new byte[n];

                                for (int i = 0; i < n; i++)
                                {

                                    lastSample[i] = frame.Data[frame.Length - n + i];
                                }

                                frame.Stream.Position = frame.Length;

                                while (nrExtraSamples > 0)
                                {

                                    frame.Stream.Write(lastSample, 0, n);
                                    nrExtraSamples -= n;
                                }

                                frame.Stream.Position = 0;
                                frame.Length = wantedSize;
                            }

                        }

                    }

                }
                else
                {

                    // difference is TOO big; reset diff stuff 
                    audioDiffAvgCount = 0;
                    audioDiffCum = 0;
                }
            }

        }
Exemplo n.º 3
0
        void adjustAudioSamplesPerSecond(VideoLib.AudioFrame frame)
        {

            //videoDebug.AudioFrameLengthAdjust = 0;

            if (syncMode == SyncMode.AUDIO_SYNCS_TO_VIDEO)
            {

                int n = videoDecoder.NrChannels * videoDecoder.BytesPerSample;

                double diff = audioPlayer.getAudioClock() - getVideoClock();

                if (Math.Abs(diff) < AV_NOSYNC_THRESHOLD)
                {

                    // accumulate the diffs
                    audioDiffCum = diff + audioDiffAvgCoef * audioDiffCum;

                    if (audioDiffAvgCount < AUDIO_DIFF_AVG_NB)
                    {

                        audioDiffAvgCount++;

                    }
                    else
                    {

                        double avgDiff = audioDiffCum * (1.0 - audioDiffAvgCoef);

                        // Shrinking/expanding buffer code....
                        if (Math.Abs(avgDiff) >= audioDiffThreshold)
                        {

                            int wantedSize = (int)(frame.Length + diff * videoDecoder.SamplesPerSecond * n);

                            // get a correction percent from 10 to 60 based on the avgDiff
                            // in order to converge a little faster
                            double correctionPercent = Misc.clamp(10 + (Math.Abs(avgDiff) - audioDiffThreshold) * 15, 10, 60);

                            //Util.DebugOut(correctionPercent);

                            //AUDIO_SAMPLE_CORRECTION_PERCENT_MAX

                            int minSize = (int)(frame.Length * ((100 - correctionPercent)
                                / 100));

                            int maxSize = (int)(frame.Length * ((100 + correctionPercent)
                                / 100));

                            if (wantedSize < minSize)
                            {

                                wantedSize = minSize;

                            }
                            else if (wantedSize > maxSize)
                            {

                                wantedSize = maxSize;
                            }

                            // adjust samples per second to speed up or slow down the audio
                            Int64 length = frame.Length;
                            Int64 sps = videoDecoder.SamplesPerSecond;
                            int samplesPerSecond = (int)((length * sps) / wantedSize);
                            //videoDebug.AudioFrameLengthAdjust = samplesPerSecond;
                            audioPlayer.SamplesPerSecond = samplesPerSecond;

                        }
                        else
                        {

                            audioPlayer.SamplesPerSecond = videoDecoder.SamplesPerSecond;
                        }

                    }

                }
                else
                {

                    // difference is TOO big; reset diff stuff 
                    audioDiffAvgCount = 0;
                    audioDiffCum = 0;
                }
            }

        }
Exemplo n.º 4
0
        public VideoPlayerViewModel(DisplayVideoFrameDelegate displayVideoFrameCallback, 
            VideoLib.VideoPlayer.DecodedVideoFormat decodedVideoFormat)
        {
            this.displayVideoFrameCallback = displayVideoFrameCallback;
            this.decodedVideoFormat = decodedVideoFormat;

            videoDecoder = new VideoLib.VideoPlayer();
            videoDecoder.setLogCallback(videoDecoderLogCallback, true, true);

            audioPlayer = new AudioPlayer();

            videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE);
            videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick);
            //videoRefreshTimer.SynchronizingObject = this;
            videoRefreshTimer.AutoReset = false;

            audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE);
            audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick);
            audioRefreshTimer.AutoReset = false;
            //audioRefreshTimer.SynchronizingObject = null;

            audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB);

            syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO;
            VideoState = VideoState.CLOSED;

            playCommand = new Command(new Action(() => startPlay()));
            pauseCommand = new Command(new Action(() => pausePlay()));
            closeCommand = new Command(new Action(() => close()));

            DurationSeconds = 0;
            PositionSeconds = 0;           
        }
Exemplo n.º 5
0
        void write(VideoLib.AudioFrame frame)
        {

            if (audioBuffer == null || frame.Length == 0) return;

            // store pts for this frame and the byte offset at which this frame is
            // written
            pts = frame.Pts;
            ptsPos = offsetBytes;

            int playPos, writePos;
            audioBuffer.GetCurrentPosition(out playPos, out writePos);

            if (playPos <= offsetBytes && offsetBytes < writePos)
            {

                log.Warn("playpos:" + playPos.ToString() + " offset:" + offsetBytes.ToString() + " writePos:" + writePos.ToString() + " dataSize:" + frame.Length.ToString());
                offsetBytes = writePos;
            }

            audioBuffer.Write(frame.Data, 0, frame.Length, offsetBytes, LockFlags.None);

            offsetBytes = (offsetBytes + frame.Length) % bufferSizeBytes;

            if (audioState == AudioState.START_PLAY_AFTER_NEXT_WRITE)
            {

                audioBuffer.Play(0, PlayFlags.Looping);
                audioState = AudioState.PLAYING;
            }

        }
Exemplo n.º 6
0
        public void play(VideoLib.AudioFrame frame)
        {

            if (audioBuffer == null || frame.Length == 0) return;
       
            // store pts for this frame and the byte offset at which this frame is
            // written
            pts = frame.Pts;
            ptsPos = offsetBytes;

            int playPos, writePos;
            audioBuffer.GetCurrentPosition(out playPos, out writePos);

            if (playPos <= offsetBytes && offsetBytes < writePos)
            {

                //log.Warn("playpos:" + playPos.ToString() + " offset:" + offsetBytes.ToString() + " writePos:" + writePos.ToString() + " dataSize:" + frame.Length.ToString());
                offsetBytes = writePos;
            }

            audioBuffer.Write(frame.Data, 0, frame.Length, offsetBytes, LockFlags.None);

            offsetBytes = (offsetBytes + frame.Length) % bufferSizeBytes;

            if (Status == BufferStatus.None)
            {
                // start playing
                audioBuffer.Play(0, PlayFlags.Looping);               
            }

            //System.Diagnostics.Debug.Print("AudioClock:" + getAudioClock().ToString());
        }
Exemplo n.º 7
0
        void seekFunc(double positionSeconds, VideoLib.VideoPlayer.SeekKeyframeMode mode)
        {
            if (VideoState == VideoPlayerControl.VideoState.CLOSED)
            {
                return;
            }

            // wait for video and audio decoding to block
            // To make sure no packets are in limbo
            // before flushing any ffmpeg internal or external queues. 
            videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.BLOCK, FrameQueue.FrameQueueState.BLOCK,
                FrameQueue.FrameQueueState.BLOCK);

            if (videoDecoder.seek(positionSeconds, mode) == true)
            {
                // flush the framequeue	and audioplayer buffer				
                videoDecoder.FrameQueue.flush();
                audioPlayer.flush();

                audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp();
            }

            if (VideoState == VideoPlayerControl.VideoState.PLAYING)
            {
                videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.PLAY,
                    FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY);
            }
            else if (VideoState == VideoPlayerControl.VideoState.PAUSED)
            {
                // display the first new frame in paused mode
                videoDecoder.FrameQueue.startSingleFrame();
            }
        }
Exemplo n.º 8
0
        public async Task seek(double positionSeconds, VideoLib.VideoPlayer.SeekKeyframeMode mode = VideoLib.VideoPlayer.SeekKeyframeMode.SEEK_BACKWARDS)
        {
            if (videoDecoder.FrameQueue.IsBuffering) return;

            if (SeekTask != null)
            {
                try
                {
                    await SeekTask;
                }
                catch(TaskCanceledException)
                {

                }
            }

            SeekTask = Task.Factory.StartNew(() => seekFunc(positionSeconds, mode), CancelTokenSource.Token);
        }
Exemplo n.º 9
0
        public VideoPlayerViewModel(Control owner,
            VideoLib.VideoPlayer.OutputPixelFormat decodedVideoFormat = VideoLib.VideoPlayer.OutputPixelFormat.YUV420P)
        {

            this.owner = owner;
            DecodedVideoFormat = decodedVideoFormat;

            videoDecoder = new VideoLib.VideoPlayer();

            videoDecoder.FrameQueue.Finished += new EventHandler((s, e) =>
            {
                owner.BeginInvoke(new Func<Task>(async () => await close()));
            });

            videoDecoder.FrameQueue.IsBufferingChanged += new EventHandler((s, e) =>
                {
                    owner.BeginInvoke(new Action(() =>
                    {
                        if (IsBufferingChanged != null)
                        {
                            IsBufferingChanged(this, videoDecoder.FrameQueue.IsBuffering);
                        }
                    }));
                });

            audioPlayer = new AudioPlayer(owner);
            videoRender = new VideoRender(owner);

            audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB);

            //syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO;
            syncMode = SyncMode.VIDEO_SYNCS_TO_AUDIO;

            videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE);
            videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick);
            videoRefreshTimer.AutoReset = false;

            audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE);
            audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick);
            audioRefreshTimer.AutoReset = false;

            DurationSeconds = 0;
            PositionSeconds = 0;

            videoPts = 0;
            audioPts = 0;

            owner.HandleDestroyed += new EventHandler(async (s, e) => await close());

            VideoState = VideoState.CLOSED;
            VideoLocation = "";

            Subtitles = new Subtitles(Log);
            //interruptIOTokenSource = new CancellationTokenSource();       
        }
Exemplo n.º 10
0
 void displayVideoFrame(VideoLib.VideoFrame videoFrame)
 {
     videoFrame.copyFrameDataTexture(yuvTexture);
 }