/// <summary> /// write an audio frame to the stream /// </summary> /// <param name="data">raw audio data; if length 0, write EOR</param> public void WriteAudioFrame(short[] samples) { if (audiodone) { throw new Exception("Can't write audio after end of relevance!"); } if (videoqueue.Count > 5) { throw new Exception("A\\V Desync?"); } int datalen = samples.Length * sizeof(short); byte[] data = _bufferpool.GetBufferAtLeast(datalen); Buffer.BlockCopy(samples, 0, data, 0, datalen); if (datalen == 0) { audiodone = true; } var f = new NutFrame(data, datalen, audiopts, 1, (ulong)avparams.samplerate, 1, _bufferpool); _bufferpool.ReleaseBuffer(data); audiopts += (ulong)samples.Length / (ulong)avparams.channels; audioqueue.Enqueue(f); while (videoqueue.Count > 0 && f >= videoqueue.Peek()) { videoqueue.Dequeue().WriteData(output); } }
/// <summary> /// write a video frame to the stream /// </summary> /// <param name="data">raw video data; if length 0, write EOR</param> public void WriteVideoFrame(int[] video) { if (videodone) { throw new InvalidOperationException("Can't write data after end of relevance!"); } if (audioqueue.Count > 5) { throw new Exception("A\\V Desync?"); } int datalen = video.Length * sizeof(int); byte[] data = _bufferpool.GetBufferAtLeast(datalen); Buffer.BlockCopy(video, 0, data, 0, datalen); if (datalen == 0) { videodone = true; } var f = new NutFrame(data, datalen, videopts, (ulong)avparams.fpsden, (ulong)avparams.fpsnum, 0, _bufferpool); _bufferpool.ReleaseBuffer(data); videopts++; videoqueue.Enqueue(f); while (audioqueue.Count > 0 && f >= audioqueue.Peek()) { audioqueue.Dequeue().WriteData(output); } }