public void play() { if (VideoState == VideoState.PLAYING || VideoState == VideoState.CLOSED) { return; } audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY); if (VideoState == VideoState.OPEN) { startDemuxing(); if (videoDecoder.HasVideo) { videoRefreshTimer.start(); } if (videoDecoder.HasAudio) { audioRefreshTimer.start(); } } VideoState = VideoState.PLAYING; }
internal static void TestProtoPacketsPerf() { short psize; PUserLogin p = new PUserLogin { UserName = "******", Password = "******" }; p.Setup(); byte[] ps = p.Serialize(); BasePacket.Deserialize(ps, ps.Length, 0, out psize); HRTimer timer = HRTimer.CreateAndStart(); Parallel.For(0, ITERATIONS_COUNT, i => // for (int i = 0; i < ITERATIONS_COUNT; i++) { p.Setup(); ps = p.Serialize(); BasePacket.Deserialize(ps, ps.Length, 0, out psize); }); // } System.Console.WriteLine(timer.StopWatch()); }
void seekFunc(double positionSeconds, VideoLib.VideoPlayer.SeekKeyframeMode mode) { if (VideoState == VideoPlayerControl.VideoState.CLOSED) { return; } // wait for video and audio decoding to block // To make sure no packets are in limbo // before flushing any ffmpeg internal or external queues. videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.BLOCK, FrameQueue.FrameQueueState.BLOCK, FrameQueue.FrameQueueState.BLOCK); if (videoDecoder.seek(positionSeconds, mode) == true) { // flush the framequeue and audioplayer buffer videoDecoder.FrameQueue.flush(); audioPlayer.flush(); audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } if (VideoState == VideoPlayerControl.VideoState.PLAYING) { videoDecoder.FrameQueue.setState(FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY, FrameQueue.FrameQueueState.PLAY); } else if (VideoState == VideoPlayerControl.VideoState.PAUSED) { // display the first new frame in paused mode videoDecoder.FrameQueue.startSingleFrame(); } }
internal static void TestCollectionsPerf() { var list = new List <Player>(); HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < ITERATIONS_COUNT; i++) { lock (list) { list.Add(new Player { Id = i }); } } System.Console.WriteLine(timer.StopWatch()); timer = HRTimer.CreateAndStart(); foreach (Player player in list) { if (player.Id != -1) { player.Id++; } } System.Console.WriteLine(timer.StopWatch()); timer = HRTimer.CreateAndStart(); list.RemoveAll(p => p.Id > 0); System.Console.WriteLine(timer.StopWatch()); }
public VideoPlayerViewModel(DisplayVideoFrameDelegate displayVideoFrameCallback, VideoLib.VideoPlayer.DecodedVideoFormat decodedVideoFormat) { this.displayVideoFrameCallback = displayVideoFrameCallback; this.decodedVideoFormat = decodedVideoFormat; videoDecoder = new VideoLib.VideoPlayer(); videoDecoder.setLogCallback(videoDecoderLogCallback, true, true); audioPlayer = new AudioPlayer(); videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick); //videoRefreshTimer.SynchronizingObject = this; videoRefreshTimer.AutoReset = false; audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick); audioRefreshTimer.AutoReset = false; //audioRefreshTimer.SynchronizingObject = null; audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB); syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO; VideoState = VideoState.CLOSED; playCommand = new Command(new Action(() => startPlay())); pauseCommand = new Command(new Action(() => pausePlay())); closeCommand = new Command(new Action(() => close())); DurationSeconds = 0; PositionSeconds = 0; }
public ComputerPlayer(enPiece piece, int nDepth, bool bPerfectFinish) : base(piece) { m_nDepth = nDepth; m_timer = new HRTimer(); m_bPerfectFinish = bPerfectFinish; }
void processVideoFrame() { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; Rectangle scaledVideoRec = ImageUtils.stretchRectangle( new Rectangle(0, 0, videoDecoder.Width, videoDecoder.Height), videoRender.Canvas); Rectangle canvas = ImageUtils.centerRectangle(videoRender.Canvas, scaledVideoRec); // grab a decoded frame, returns false if the queue is stopped VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (VideoState == VideoState.CLOSED && videoFrame == null) { return; } else if (VideoState == VideoState.PLAYING) { videoPts = videoFrame.Pts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false) { videoRender.display(videoFrame, canvas, Color.Black, VideoRender.RenderMode.NORMAL); videoDebug.VideoFrames = videoDebug.VideoFrames + 1; } actualDelay = synchronizeVideo(videoPts); } else if (VideoState == VideoState.PAUSED) { videoRender.display(null, canvas, Color.Black, VideoRender.RenderMode.PAUSED); } // do not update ui elements on main thread inside videoStateLock // or we can get a deadlock videoDebug.update(); updateUI(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; videoDebug.NrVideoFramesDropped = videoDebug.NrVideoFramesDropped + 1; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }
public VideoPlayerViewModel(Control owner, VideoLib.VideoPlayer.OutputPixelFormat decodedVideoFormat = VideoLib.VideoPlayer.OutputPixelFormat.YUV420P) { this.owner = owner; DecodedVideoFormat = decodedVideoFormat; videoDecoder = new VideoLib.VideoPlayer(); videoDecoder.FrameQueue.Finished += new EventHandler((s, e) => { owner.BeginInvoke(new Func <Task>(async() => await close())); }); videoDecoder.FrameQueue.IsBufferingChanged += new EventHandler((s, e) => { owner.BeginInvoke(new Action(() => { if (IsBufferingChanged != null) { IsBufferingChanged(this, videoDecoder.FrameQueue.IsBuffering); } })); }); audioPlayer = new AudioPlayer(owner); videoRender = new VideoRender(owner); audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB); //syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO; syncMode = SyncMode.VIDEO_SYNCS_TO_AUDIO; videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick); videoRefreshTimer.AutoReset = false; audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick); audioRefreshTimer.AutoReset = false; DurationSeconds = 0; PositionSeconds = 0; videoPts = 0; audioPts = 0; owner.HandleDestroyed += new EventHandler(async(s, e) => await close()); VideoState = VideoState.CLOSED; VideoLocation = ""; Subtitles = new Subtitles(Log); //interruptIOTokenSource = new CancellationTokenSource(); }
double getVideoClock() { if (VideoState == VideoState.PAUSED) { return(videoPts); } else { return(videoPtsDrift - HRTimer.getTimestamp()); } }
double getVideoClock() { if (videoDecoder.FrameQueue.VideoPacketQueueState != PacketQueue.PacketQueueState.OPEN || videoDecoder.FrameQueue.IsBuffering) { return(videoPts); } else { return(videoPtsDrift - HRTimer.getTimestamp()); } }
double synchronizeAudio(int frameLength) { // calculate delay to play next frame int bytesPerSecond = audioPlayer.SamplesPerSecond * videoDecoder.BytesPerSample * videoDecoder.NrChannels; double delay = frameLength / (double)bytesPerSecond; // adjust delay based on the actual current time audioFrameTimer += delay; double actualDelay = audioFrameTimer - HRTimer.getTimestamp(); return(actualDelay); }
private void SendResponsesProc() { const float timeToCall = 1000 / WORLD_DELAY_BETWEEN_FRAMES_MS - 1; HRTimer timer = new HRTimer(); DateTime opTime = DateTime.UtcNow; while (!_terminating) { if (_unsentResponsesAvailable) { timer.StartWatch(); Pair <int, BaseResponse>[] responses; lock (_responsesPool) { responses = _responses.ToArray(); _responses.Clear(); _unsentResponsesAvailable = false; } int cnt = responses.Length; for (int i = 0; i < cnt; i++) // Parallel.For(0, cnt, i => //!!! { int clientId; Pair <int, BaseResponse> response = responses[i]; if (_playerClients.TryGetValue(response.Key, out clientId)) { _netServer.Send(clientId, response.Value); } _responsesPool.Push(response); // }); } WriteLog(LogEventType.TCPResponsesSend, timer.StopWatch().ToString("F")); } DateTime curTime = DateTime.UtcNow; TimeSpan elapsed = curTime - opTime; int timeToIdle = (int)(timeToCall - elapsed.TotalMilliseconds); while (timeToIdle > 0) { Thread.Sleep(timeToIdle / 2); elapsed = (curTime = DateTime.UtcNow) - opTime; timeToIdle = (int)(timeToCall - elapsed.TotalMilliseconds); } elapsed = curTime - opTime; opTime = curTime; WriteLog(LogEventType.TCPResponsesProc, elapsed.TotalMilliseconds.ToString("F")); } }
public VideoPanelViewModel() { // //TODO: Add the constructor code here // //videoRender = null; //mediaPlayer.Dock = DockStyle.Fill; //mediaPlayer.stretchToFit = true; videoRender = new VideoRender(VideoPanel); audioPlayer = new StreamingAudioBuffer(this); //videoRender.initialize(0,0); videoDecoder = new VideoPlayer(); videoDecoder.FrameQueue.Closed += new EventHandler(frameQueue_Closed); videoDecoder.setLogCallback(new VideoPlayer.LogCallbackDelegate(videoDecoderLogCallback), true, true); videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick); //videoRefreshTimer.SynchronizingObject = this; videoRefreshTimer.AutoReset = false; audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick); audioRefreshTimer.AutoReset = false; audioRefreshTimer.SynchronizingObject = null; videoDebug = new VideoDebugForm(); audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB); //syncMode = SyncMode.VIDEO_SYNCS_TO_AUDIO; syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO; VideoState = VideoState.CLOSED; updateTimeTrackBar = true; timeTrackBarToolTip = new CustomToolTip(); timeTrackBarToolTip.BackColor = SystemColors.Info; this.Controls.Add(timeTrackBarToolTip); timeTrackBarToolTip.Show(); timeTrackBarToolTip.BringToFront(); timeTrackBarToolTip.Visible = false; muteCheckBox.Checked = bool.Parse(Settings.getVar(Settings.VarName.VIDEO_MUTED)); volumeTrackBar.Value = Util.lerp <int>(Double.Parse(Settings.getVar(Settings.VarName.VIDEO_VOLUME)), volumeTrackBar.Minimum, volumeTrackBar.Maximum); }
double synchronizeVideo(double videoPts) { // assume delay to next frame equals delay between previous frames double delay = videoPts - previousVideoPts; if (delay <= 0 || delay >= 1.0) { // if incorrect delay, use previous one delay = previousVideoDelay; } previousVideoPts = videoPts; previousVideoDelay = delay; if (videoDecoder.HasAudio && syncMode == SyncMode.VIDEO_SYNCS_TO_AUDIO) { // synchronize video to audio double diff = getVideoClock() - audioPlayer.getAudioClock(); // Skip or repeat the frame. Take delay into account // FFPlay still doesn't "know if this is the best guess." double sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD; if (Math.Abs(diff) < AV_NOSYNC_THRESHOLD) { if (diff <= -sync_threshold) { delay = 0; } else if (diff >= sync_threshold) { delay = 2 * delay; } } } // adjust delay based on the actual current time videoFrameTimer += delay; double actualDelay = videoFrameTimer - HRTimer.getTimestamp(); videoDebug.VideoDelay = delay; videoDebug.ActualVideoDelay = actualDelay; videoDebug.VideoSync = getVideoClock(); videoDebug.AudioSync = audioPlayer.getAudioClock(); videoDebug.VideoQueueSize = videoDecoder.FrameQueue.VideoPacketsInQueue; videoDebug.AudioQueueSize = videoDecoder.FrameQueue.AudioPacketsInQueue; return(actualDelay); }
public Client() { // first of all force asyncio AsyncIO.ForceDotNet.Force(); // time to send all messages before any socket gets disposed, default: 0. NetMQConfig.Linger = System.TimeSpan.FromMilliseconds(250); timer = new HRTimer(); subscriber = new SubscriberSocket(); messageHandlers = new SortedDictionary <System.UInt64, Action <NetMQMessage> >(); config(); }
protected virtual void Dispose(bool safe) { if (safe) { if (videoRender != null) { videoRender.Dispose(); videoRender = null; } if (audioPlayer != null) { audioPlayer.Dispose(); audioPlayer = null; } if (videoDecoder != null) { videoDecoder.Dispose(); videoDecoder = null; } /*if (demuxPacketsTask != null) * { * demuxPacketsTask.Dispose(); * demuxPacketsTask = null; * }*/ if (CancelTokenSource != null) { CancelTokenSource.Dispose(); CancelTokenSource = null; } if (demuxPacketsCancellationTokenSource != null) { demuxPacketsCancellationTokenSource.Dispose(); demuxPacketsCancellationTokenSource = null; } if (videoRefreshTimer != null) { videoRefreshTimer.Dispose(); videoRefreshTimer = null; } if (audioRefreshTimer != null) { audioRefreshTimer.Dispose(); audioRefreshTimer = null; } } }
internal void TestGameMapPerf() { const ushort mapWidth = 1000, mapHeight = 1000; int count = mapWidth * mapHeight; HRTimer timer = HRTimer.CreateAndStart(); #if UNSAFE_ARRAY int sizeOfTile = Marshal.SizeOf(typeof(Tile)); Tile *pTiles = (Tile *)Memory.HeapAlloc(count * sizeOfTile); #else Tile[,] aTiles = new Tile[MapWidth, MapHeight]; #endif System.Console.WriteLine(timer.StopWatch()); timer = HRTimer.CreateAndStart(); for (int i = 0; i < count; i++) { #if !UNSAFE_ARRAY fixed(Tile *pTiles = aTiles) #endif { pTiles[i].Type = TileType.Wall; pTiles[i].TypeIndex = 100; } } System.Console.WriteLine(timer.StopWatch()); #if UNSAFE_ARRAY Memory.HeapFree(pTiles); #endif using (ServerMap map = new ServerMap(mapWidth, mapHeight, 0)) { timer = HRTimer.CreateAndStart(); for (ushort y = 0; y < mapHeight; y++) { for (ushort x = 0; x < mapWidth; x++) { Tile *tile = map[x, y]; (*tile).Type = TileType.Nothing; (*tile).TypeIndex = 1; } } System.Console.WriteLine(timer.StopWatch()); } }
void videoRefreshTimer_Tick(Object sender, EventArgs e) { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; // grab a decoded frame, returns false if the queue is stopped VideoLib.VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (VideoState == VideoState.CLOSED && videoFrame == null) { return; } else if (VideoState == VideoState.PLAYING) { videoPts = videoFrame.Pts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false && displayVideoFrameCallback != null) { displayVideoFrameCallback(videoFrame); } actualDelay = synchronizeVideo(videoPts); } else if (VideoState == VideoState.PAUSED) { //videoRender.display(null, canvas, Color.Black, VideoRender.RenderMode.PAUSED); } updateObservableVariables(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; //videoDebug.NrVideoFramesDropped = videoDebug.NrVideoFramesDropped + 1; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }
internal static void TestMapWindowGetPerf() { using (ServerMap serverMap = ServerMap.LoadFromFile("RK.save")) { ServerMap map = serverMap; HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < 100; i++) { int startX = 0, startY = 0; int wWidth = 150, wHeight = 150; map.GetWindow(startX, startY, wWidth, wHeight); } System.Console.WriteLine(timer.StopWatch()); } }
internal static void TestHybridLockPerf() { HRTimer timer = HRTimer.CreateAndStart(); LightLock hl = new LightLock(); int y = 0; Parallel.For(0, ITERATIONS_COUNT, i => { hl.WaitOne(); y++; hl.Set(); }); System.Console.WriteLine(y.ToString()); System.Console.WriteLine(timer.StopWatch()); }
internal static void TestNetPerf1x1() { PTestXkb tcpPacketToSend = new PTestXkb(); ManualResetEventSlim tcpSync = new ManualResetEventSlim(false); TCPServer tcpServer = new TCPServer(new TCPServerSettings ( 100, 100, 10, 1024, 10001 )); tcpServer.ClientDataReceived += (server, clientId, packets) => { server.Send(clientId, packets[0]); }; tcpServer.ClientDataSent += (server, clientId) => { server.DropClient(clientId); tcpSync.Set(); }; tcpServer.Start(); TCPClient tcpClient = new TCPClient(new TCPClientSettings ( 1024, "127.0.0.1", 10001, false )); tcpClient.Connected += client => { client.Send(tcpPacketToSend); }; HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < ITERATIONS_COUNT; i++) { tcpClient.Connect(); tcpSync.Wait(); tcpSync.Reset(); } System.Console.WriteLine(timer.StopWatch()); }
internal static void TestMultithreadDictionaryPerf() { var dict = new Dictionary <long, int>(); _tEvents = new WaitHandle[THREADS_COUNT]; for (int i = 0; i < THREADS_COUNT; i++) { _tEvents[i] = new ManualResetEvent(false); } HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < THREADS_COUNT; i++) { object[] param = { i, dict, _tEvents[i] }; ThreadPool.QueueUserWorkItem(DoTestMultithreadDictionary, param); } WaitHandle.WaitAll(_tEvents); System.Console.WriteLine(timer.StopWatch()); }
internal static void TestMultithreadListPerf() { var list = new List <Player>(); var sync = new ReaderWriterLockSlim(); _tEvents = new WaitHandle[THREADS_COUNT]; for (int i = 0; i < THREADS_COUNT; i++) { _tEvents[i] = new ManualResetEvent(false); } HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < THREADS_COUNT; i++) { object[] param = { i, list, _tEvents[i], sync }; ThreadPool.QueueUserWorkItem(DoTestMultithreadList, param); } WaitHandle.WaitAll(_tEvents); System.Console.WriteLine(timer.StopWatch()); }
internal static void TestToArrayToListPerf() { List <Pair <int, BaseResponse> > list = new List <Pair <int, BaseResponse> >(); for (int i = 0; i < ITERATIONS_COUNT; i++) { list.Add(new Pair <int, BaseResponse>(i, new RUserLogin())); } var converted = list.ToList(); HRTimer timer = HRTimer.CreateAndStart(); // for (int i = 0; i < ITERATIONS_COUNT; i++) // { // converted[i].Value.Id = 0; // } foreach (Pair <int, BaseResponse> pair in converted) { pair.Value.Id = 0; } System.Console.WriteLine(timer.StopWatch()); }
void demuxPackets(CancellationToken token) { audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); bool success = true; // decode frames one by one, or handle seek requests do { if (seekRequest == true) { // wait for video and audio decoding to pause/block // To make sure no packets are in limbo // before flushing any ffmpeg internal or external queues. videoDecoder.FrameQueue.pause(); if (videoDecoder.seek(seekPosition) == true) { // flush the framequeue and audioplayer buffer videoDecoder.FrameQueue.flush(); audioPlayer.flush(); // refill/buffer the framequeue from the new position fillFrameQueue(); audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } seekRequest = false; // allow video and audio decoding to continue videoDecoder.FrameQueue.start(); } else { success = videoDecoder.demuxPacket(); } } while (success == true && !token.IsCancellationRequested); }
public VideoPanelViewModel() { // //TODO: Add the constructor code here // //videoRender = null; //mediaPlayer.Dock = DockStyle.Fill; //mediaPlayer.stretchToFit = true; videoRender = new VideoRender(VideoPanel); audioPlayer = new StreamingAudioBuffer(this); //videoRender.initialize(0,0); videoDecoder = new VideoPlayer(); videoDecoder.FrameQueue.Closed += new EventHandler(frameQueue_Closed); videoDecoder.setLogCallback(new VideoPlayer.LogCallbackDelegate(videoDecoderLogCallback), true, true); videoRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); videoRefreshTimer.Tick += new EventHandler(videoRefreshTimer_Tick); //videoRefreshTimer.SynchronizingObject = this; videoRefreshTimer.AutoReset = false; audioRefreshTimer = HRTimerFactory.create(HRTimerFactory.TimerType.TIMER_QUEUE); audioRefreshTimer.Tick += new EventHandler(audioRefreshTimer_Tick); audioRefreshTimer.AutoReset = false; audioRefreshTimer.SynchronizingObject = null; videoDebug = new VideoDebugForm(); audioDiffAvgCoef = Math.Exp(Math.Log(0.01) / AUDIO_DIFF_AVG_NB); //syncMode = SyncMode.VIDEO_SYNCS_TO_AUDIO; syncMode = SyncMode.AUDIO_SYNCS_TO_VIDEO; VideoState = VideoState.CLOSED; updateTimeTrackBar = true; timeTrackBarToolTip = new CustomToolTip(); timeTrackBarToolTip.BackColor = SystemColors.Info; this.Controls.Add(timeTrackBarToolTip); timeTrackBarToolTip.Show(); timeTrackBarToolTip.BringToFront(); timeTrackBarToolTip.Visible = false; muteCheckBox.Checked = bool.Parse(Settings.getVar(Settings.VarName.VIDEO_MUTED)); volumeTrackBar.Value = Util.lerp<int>(Double.Parse(Settings.getVar(Settings.VarName.VIDEO_VOLUME)), volumeTrackBar.Minimum, volumeTrackBar.Maximum); }
void videoRefreshTimer_Tick(Object sender, EventArgs e) { bool skipVideoFrame = false; restartvideo: double actualDelay = 0.04; // grab a decoded frame, returns null if the queue is paused or closed VideoFrame videoFrame = videoDecoder.FrameQueue.getDecodedVideoFrame(); if (videoFrame == null) { if (VideoState == VideoState.CLOSED) { videoRender.display(null, Color.Black, RenderMode.CLEAR_SCREEN); videoRender.releaseResources(); return; } videoRender.display(null, Color.Black, RenderMode.PAUSED); } else { if (videoRender.RenderMode == RenderMode.PAUSED) { // reset videoFrameTimer before (re)starting rendering audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } videoPts = videoFrame.Pts; videoDts = videoFrame.Dts; videoPtsDrift = videoFrame.Pts + HRTimer.getTimestamp(); if (skipVideoFrame == false) { videoRender.display(videoFrame, Color.Black, RenderMode.NORMAL); } actualDelay = synchronizeVideo(videoPts); NrFramesRendered++; framePts = videoFrame.FramePts; //frameDts = videoFrame.FrameDts; isKeyFrame = videoFrame.IsKey; } updateObservableVariables(); if (actualDelay < 0.010) { // delay is too small skip next frame skipVideoFrame = true; NrFramesDropped++; goto restartvideo; } // start timer with delay for next frame videoRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); videoRefreshTimer.start(); }
internal static void TestDataCopyPerf() { const int buffersSize = 100; const int buffersCount = ITERATIONS_COUNT / buffersSize; byte[] srcData = new byte[ITERATIONS_COUNT]; for (int i = 0; i < srcData.Length; i++) { srcData[i] = (byte)i; } byte[][] smallBuffers = new byte[buffersCount][]; for (int i = 0; i < buffersCount; i++) { smallBuffers[i] = new byte[buffersSize]; } System.Console.WriteLine("Array.Copy:"); HRTimer timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; Array.Copy(srcData, srcIdx, smallBuffers[i], 0, buffersSize); } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("Buffer.BlockCopy:"); timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; Buffer.BlockCopy(srcData, srcIdx, smallBuffers[i], 0, buffersSize); } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("Marshal.Copy:"); timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) { Marshal.Copy(srcData, srcIdx, new IntPtr(bDestData), buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("memcpy:"); timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) fixed(byte *bSrcData = srcData) { Memory.Copy(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("memcpy2:"); timer = HRTimer.CreateAndStart(); fixed(byte *bSrcData = srcData) for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) { Memory.Copy(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("memmove:"); timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) fixed(byte *bSrcData = srcData) { Memory.Move(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("memmove2:"); timer = HRTimer.CreateAndStart(); fixed(byte *bSrcData = srcData) for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) { Memory.Move(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("CustomCopy:"); timer = HRTimer.CreateAndStart(); for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) fixed(byte *bSrcData = srcData) { CustomCopy(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); System.Console.WriteLine("CustomCopy2:"); timer = HRTimer.CreateAndStart(); fixed(byte *bSrcData = srcData) for (int i = 0; i < buffersCount; i++) { int srcIdx = i * buffersSize; fixed(byte *bDestData = smallBuffers[i]) { CustomCopy(bDestData, &bSrcData[srcIdx], buffersSize); } } System.Console.WriteLine(timer.StopWatch()); }
void audioRefreshTimer_Tick(Object sender, EventArgs e) { restartaudio: double actualDelay = 0.04; if (!videoDecoder.HasVideo) { updateObservableVariables(); } // returns null when framequeue is paused or closed VideoLib.AudioFrame audioFrame = videoDecoder.FrameQueue.getDecodedAudioFrame(); if (audioFrame == null) { // stop audio if playing audioPlayer.stop(); if (VideoState == VideoState.CLOSED) { audioPlayer.flush(); return; } // when paused spin idle } else { if (audioPlayer.Status == SharpDX.DirectSound.BufferStatus.None) { // reset audio frame timer before (re)starting playing audioFrameTimer = videoFrameTimer = HRTimer.getTimestamp(); } audioPts = audioFrame.Pts; audioDts = audioFrame.Dts; // if the audio is lagging behind too much, skip the buffer completely double diff = getVideoClock() - audioFrame.Pts; if (diff > 0.2 && diff < 3 && syncMode == SyncMode.AUDIO_SYNCS_TO_VIDEO) { //log.Warn("dropping audio buffer, lagging behind: " + (getVideoClock() - audioFrame.Pts).ToString() + " seconds"); goto restartaudio; } //adjustAudioSamplesPerSecond(audioFrame); adjustAudioLength(audioFrame); audioPlayer.play(audioFrame); int frameLength = audioFrame.Length; actualDelay = synchronizeAudio(frameLength); if (actualDelay < 0) { // delay too small, play next frame as quickly as possible goto restartaudio; } } // start timer with delay for next frame audioRefreshTimer.Interval = (int)(actualDelay * 1000 + 0.5); audioRefreshTimer.start(); }