private async Task <(MediaBuffer, bool)> ReadBufferAsync(HttpRequest request) { IMemoryOwner <byte> memoryBuffer = null; try { bool endOfStream = false; int bufferLength = 0; memoryBuffer = _pool.Rent(BUFFER_SIZE); var memory = memoryBuffer.Memory; while (memory.Length > 0) { var bytesRead = await request.Body.ReadAsync(memory); if (bytesRead == 0) { endOfStream = true; break; } bufferLength += bytesRead; memory = memory.Slice(bytesRead); } var mediaBuffer = new MediaBuffer(memoryBuffer, bufferLength); memoryBuffer = null; // avoid it being released. return(mediaBuffer, endOfStream); } finally { memoryBuffer?.Dispose(); } }
public void UpdateSample(Bitmap bmp, MediaBuffer mb) { var g = Graphics.FromImage(bmp); var text = DateTime.Now.ToString("HH:mm:ss.fff"); var font = new System.Drawing.Font(FontFamily.GenericMonospace, 120); var textSize = g.MeasureString(text, font); g.FillRectangle(Brushes.Black, 0f, 0f, textSize.Width, textSize.Height); g.DrawString(text, font, Brushes.Yellow, 0f, 0f); var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, bmp.PixelFormat); var size = data.Stride * data.Height; var pBuffer = mb.Lock(out int cbMaxLen, out int cbCurLen); Kernel32.CopyMemory(pBuffer, data.Scan0, (uint)size); //Marshal.Copy(testArgb, 0, pBuffer, testArgb.Length); mb.CurrentLength = size; mb.Unlock(); bmp.UnlockBits(data); g.Dispose(); //bmp.Dispose(); }
void RefreshRenders() { lock (_csAVQueue) { if (null != _videoRender) { if (_videoQueue.Count > 0) { MediaSample sample = _videoQueue.First.Value; if (GetClock() >= sample.StartTime) { if (_unmanaged) { _videoRender.SetFrame(sample.UnmanagedBuffer.DataPtr, _videoStreamInfo.FrameWidth, _videoStreamInfo.FrameHeight); _videoRender.Draw(); sample.UnmanagedBuffer.Release(); _videoQueue.RemoveFirst(); } else { MediaBuffer buffer = sample.Buffer; _videoRender.SetFrame(buffer.Start, _videoStreamInfo.FrameWidth, _videoStreamInfo.FrameHeight); _videoRender.Draw(); _videoQueue.RemoveFirst(); } } } } } }
static bool EncodeH264Stream(Options opt, Transcoder transcoder) { bool success = false; try { using (var file = System.IO.File.OpenRead(opt.InputFile)) { int videoBufferSize = MediaSample.VideoBufferSizeInBytes(opt.Width, opt.Height, opt.Color.Id); if (videoBufferSize <= 0) { return(false); } MediaSample mediaSample = new MediaSample(); MediaBuffer mediaBuffer = new MediaBuffer(videoBufferSize); mediaSample.Buffer = mediaBuffer; int readBytes; while (true) { mediaBuffer.SetData(0, videoBufferSize); readBytes = file.Read(mediaBuffer.Start, 0, mediaBuffer.DataSize); if (readBytes == videoBufferSize) { mediaBuffer.SetData(0, readBytes); if (!transcoder.Push(0, mediaSample)) { PrintStatus("Transcoder push", transcoder.Error); success = false; break; } success = true; } else { if (!transcoder.Flush()) { success = false; } PrintStatus("Transcoder flush", transcoder.Error); break; } } } } catch (System.IO.DirectoryNotFoundException dnfe) { Console.WriteLine(dnfe); success = false; } return(success); }
/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; this.inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; this.resampler = new Resampler(); if (!resampler.MediaObject.SupportsInputWaveFormat(0, inputStream.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", "inputStream"); } resampler.MediaObject.SetInputWaveFormat(0, inputStream.WaveFormat); if (!resampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", "outputStream"); } resampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
public void Close() { logger.Debug("MfWriter::Close()"); closed = true; if (sinkWriter != null) { sinkWriter.Dispose(); sinkWriter = null; } if (bufTexture != null) { bufTexture.Dispose(); bufTexture = null; } if (videoSample != null) { videoSample.Dispose(); videoSample = null; } if (mediaBuffer != null) { mediaBuffer.Dispose(); mediaBuffer = null; } }
static void WriteAuFile(string outputDir, int au_index, MediaBuffer buffer) { string filePath = outputDir + "/au_" + String.Format("{0:0000}", au_index) + ".h264"; using (BinaryWriter file = new BinaryWriter(File.Open(filePath, FileMode.Append))) { file.BaseStream.Write(buffer.Start, buffer.DataOffset, buffer.DataSize); } }
public void ResamplerCanCallProcessInput() { DmoResampler dmoResampler = new DmoResampler(); dmoResampler.MediaObject.SetInputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2)); dmoResampler.MediaObject.SetOutputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2)); using (MediaBuffer buffer = new MediaBuffer(44100 * 2 * 4)) { buffer.Length = 8000; dmoResampler.MediaObject.ProcessInput(0, buffer, DmoInputDataBufferFlags.None, 0, 0); } }
static bool Encode(Options opt) { DeleteFile(opt.OutputFile); MediaSocket inSocket = new MediaSocket(); inSocket.File = opt.InputFile; MediaSocket outSocket = CreateOutputSocket(opt); bool success = false; // create Transcoder using (Transcoder transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); if (!transcoder.Open()) { PrintError("Transcoder open", transcoder.Error); return(false); } using (FileStream outputFile = File.OpenWrite(opt.OutputFile)) { MediaSample outputSample = new MediaSample(); int outputIndex = -1; while (transcoder.Pull(out outputIndex, outputSample)) { MediaBuffer buffer = outputSample.Buffer; outputFile.Write(buffer.Start, buffer.DataOffset, buffer.DataSize); } ErrorInfo error = transcoder.Error; PrintError("Transcoder pull", error); if ((error.Code == (int)CodecError.EOS) && (error.Facility == ErrorFacility.Codec)) { // ok success = true; } } transcoder.Close(); } return(success); }
public void Dispose() { if (this.inputMediaBuffer != null) { this.inputMediaBuffer.Dispose(); this.inputMediaBuffer = null; } this.outputBuffer.Dispose(); if (this.mp3Decoder != null) { this.mp3Decoder.Dispose(); this.mp3Decoder = null; } }
protected override void Dispose(bool disposing) { if (this.inputMediaBuffer != null) { this.inputMediaBuffer.Dispose(); this.inputMediaBuffer = null; } this.outputBuffer.Dispose(); if (this.dmoResampler != null) { this.dmoResampler = null; } base.Dispose(disposing); }
/// <summary> /// Dispose of this obejct and clean up resources /// </summary> public void Dispose() { if (inputMediaBuffer != null) { inputMediaBuffer.Dispose(); inputMediaBuffer = null; } outputBuffer.Dispose(); if (mp3Decoder != null) { mp3Decoder.Dispose(); mp3Decoder = null; } }
private void SetupSampleBuffer(MfVideoArgs args) { logger.Debug("SetupSampleBuffer(...)"); int width = args.Width; int height = args.Height; //if (width % 2 != 0) //{// должно быть четным... // width++; //} //if (height % 2 != 0) //{ // height++; //} Format format = MfTool.GetDXGIFormatFromVideoFormatGuid(args.Format); if (format == Format.Unknown) { throw new NotSupportedException("Format not suppored " + args.Format); } var _descr = new Texture2DDescription { Format = format, Width = width, Height = height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1 }, }; bufTexture = new Texture2D(device, _descr); MediaBuffer mediaBuffer = null; try { MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer); bufSample = MediaFactory.CreateSample(); bufSample.AddBuffer(mediaBuffer); } finally { mediaBuffer?.Dispose(); } }
/// <summary> /// Dispose /// </summary> /// <param name="disposing">True if disposing (not from finalizer)</param> protected override void Dispose(bool disposing) { if (inputMediaBuffer != null) { inputMediaBuffer.Dispose(); inputMediaBuffer = null; } outputBuffer.Dispose(); if (resampler != null) { //resampler.Dispose(); s resampler = null; } base.Dispose(disposing); }
/// <summary> /// GenerateNext is called by the Generator base class when the next sample should be read. /// </summary> /// <param name="previous">Time of previous sample.</param> /// <returns>Time for current sample.</returns> protected override DateTime GenerateNext(DateTime previous) { DateTime originatingTime = default(DateTime); int streamIndex = 0; SourceReaderFlags flags = SourceReaderFlags.None; long timestamp = 0; Sample sample = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp); if (sample != null) { originatingTime = this.start + TimeSpan.FromTicks(timestamp); MediaBuffer buffer = sample.ConvertToContiguousBuffer(); int currentByteCount = 0; int maxByteCount = 0; IntPtr data = buffer.Lock(out maxByteCount, out currentByteCount); if (streamIndex == this.imageStreamIndex) { using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp)) { sharedImage.Resource.CopyFrom(data); this.Image.Post(sharedImage, originatingTime); } } else if (streamIndex == this.audioStreamIndex) { AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat); Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount); this.Audio.Post(audioBuffer, originatingTime); } buffer.Unlock(); buffer.Dispose(); sample.Dispose(); } if (flags == SourceReaderFlags.Endofstream) { return(DateTime.MaxValue); // Used to indicated there is no more data } if (originatingTime <= previous) { return(previous + TimeSpan.FromTicks(1)); // To enforce strictly increasing times for the generator } return(originatingTime); }
public DmoMp3FrameDecompressor(WaveFormat sourceFormat) { this.mp3Decoder = new WindowsMediaMp3Decoder(); if (!this.mp3Decoder.MediaObject.SupportsInputWaveFormat(0, sourceFormat)) { throw new ArgumentException("Unsupported input format"); } this.mp3Decoder.MediaObject.SetInputWaveFormat(0, sourceFormat); this.pcmFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.Channels); if (!this.mp3Decoder.MediaObject.SupportsOutputWaveFormat(0, this.pcmFormat)) { throw new ArgumentException(string.Format("Unsupported output format {0}", this.pcmFormat)); } this.mp3Decoder.MediaObject.SetOutputWaveFormat(0, this.pcmFormat); this.inputMediaBuffer = new MediaBuffer(sourceFormat.AverageBytesPerSecond); this.outputBuffer = new DmoOutputDataBuffer(this.pcmFormat.AverageBytesPerSecond); }
private static BitmapSource CreateBitmapSource(MediaBuffer contiguousBuffer, int frameWidth, int frameHeight) { try { var thumbnailBuffer = contiguousBuffer.Lock(out _, out var thumbnailBufferSize); return(BitmapSource.Create( frameWidth, frameHeight, 96, 96, PixelFormats.Bgr32, null, thumbnailBuffer, thumbnailBufferSize, frameWidth * 4)); } finally { contiguousBuffer.Unlock(); } }
static void PrintNalus(MediaBuffer buffer) { // This parsing code assumes that MediaBuffer contains // a single Access Unit of one or more complete NAL Units while (buffer.DataSize > 1) { int dataOffset = buffer.DataOffset; int dataSize = buffer.DataSize; // is this a NALU with a 3 byte start code prefix if (dataSize >= 3 && 0x00 == buffer.Start[dataOffset + 0] && 0x00 == buffer.Start[dataOffset + 1] && 0x01 == buffer.Start[dataOffset + 2]) { PrintNaluHeader(buffer.Start[dataOffset + 3]); // advance in the buffer buffer.SetData(dataOffset + 3, dataSize - 3); } // OR is this a NALU with a 4 byte start code prefix else if (dataSize >= 4 && 0x00 == buffer.Start[dataOffset + 0] && 0x00 == buffer.Start[dataOffset + 1] && 0x00 == buffer.Start[dataOffset + 2] && 0x01 == buffer.Start[dataOffset + 3]) { PrintNaluHeader(buffer.Start[dataOffset + 4]); // advance in the buffer buffer.SetData(dataOffset + 4, dataSize - 4); } else { // advance in the buffer buffer.SetData(dataOffset + 1, dataSize - 1); } // NOTE: Some NALUs may have a trailing zero byte. The `while` // condition `buffer->dataSize() > 1` will effectively // skip the trailing zero byte. } }
/// <summary> /// Initializes a new instance of the DMO MP3 Frame decompressor /// </summary> /// <param name="sourceFormat"></param> public DmoMp3FrameDecompressor(WaveFormat sourceFormat) { this.mp3Decoder = new WindowsMediaMp3Decoder(); if (!mp3Decoder.MediaObject.SupportsInputWaveFormat(0, sourceFormat)) { throw new ArgumentException("Unsupported input format"); } mp3Decoder.MediaObject.SetInputWaveFormat(0, sourceFormat); pcmFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.Channels); // 16 bit if (!mp3Decoder.MediaObject.SupportsOutputWaveFormat(0, pcmFormat)) { throw new ArgumentException(String.Format("Unsupported output format {0}", pcmFormat)); } mp3Decoder.MediaObject.SetOutputWaveFormat(0, pcmFormat); // a second is more than enough to decompress a frame at a time inputMediaBuffer = new MediaBuffer(sourceFormat.AverageBytesPerSecond); outputBuffer = new DmoOutputDataBuffer(pcmFormat.AverageBytesPerSecond); }
private void Decode(byte[] nal, double time) { try { var encodedSample = MediaFactory.CreateSample(); try { using (MediaBuffer mb = MediaFactory.CreateMemoryBuffer(nal.Length)) { var dest = mb.Lock(out int cbMaxLength, out int cbCurrentLength); //logger.Debug(sampleCount + " Marshal.Copy(...) " + nal.Length); Marshal.Copy(nal, 0, dest, nal.Length); mb.CurrentLength = nal.Length; mb.Unlock(); encodedSample.AddBuffer(mb); if (!double.IsNaN(time)) { var sampleTime = MfTool.SecToMfTicks(time); //(long)(time * 10_000_000); encodedSample.SampleTime = sampleTime; } } var res = decoder.ProcessSample(encodedSample, OnSampleDecoded); if (!res) { //... } } finally { encodedSample?.Dispose(); } } catch (Exception ex) { logger.Error(ex); } }
public BufferHelper(Sample sample, int sampleHeight) { buffer2D = null; buffer = sample.ConvertToContiguousBuffer(); buffer2D = buffer.QueryInterfaceOrNull <SharpDX.MediaFoundation.Buffer2D>(); int length = 0, pitch = 0; if (buffer2D != null) { byte[] arr = new byte[IntPtr.Size]; buffer2D.Lock2D(arr, out pitch); length = buffer2D.ContiguousLength; Data = new IntPtr(BitConverter.ToInt32(arr, 0)); } else { int curlen; Data = buffer.Lock(out length, out curlen); pitch = length / sampleHeight; } Length = length; Pitch = pitch; }
public void ResamplerCanCallProcessOutput() { DmoResampler dmoResampler = new DmoResampler(); WaveFormat inputFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2); WaveFormat outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(48000, 2); dmoResampler.MediaObject.SetInputWaveFormat(0, inputFormat); dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); dmoResampler.MediaObject.AllocateStreamingResources(); using (MediaBuffer inputBuffer = new MediaBuffer(inputFormat.AverageBytesPerSecond)) { inputBuffer.Length = inputFormat.AverageBytesPerSecond / 10; Debug.WriteLine(String.Format("Input Length {0}", inputBuffer.Length)); dmoResampler.MediaObject.ProcessInput(0, inputBuffer, DmoInputDataBufferFlags.None, 0, 0); Debug.WriteLine(String.Format("Input Length {0}", inputBuffer.Length)); Debug.WriteLine(String.Format("Input Lookahead {0}", dmoResampler.MediaObject.GetInputSizeInfo(0).MaxLookahead)); //Debug.WriteLine(String.Format("Input Max Latency {0}", resampler.MediaObject.GetInputMaxLatency(0))); using (DmoOutputDataBuffer outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond)) { // one buffer for each output stream dmoResampler.MediaObject.ProcessOutput(DmoProcessOutputFlags.None, 1, new DmoOutputDataBuffer[] { outputBuffer }); Debug.WriteLine(String.Format("Converted length: {0}", outputBuffer.Length)); Debug.WriteLine(String.Format("Converted flags: {0}", outputBuffer.StatusFlags)); //Assert.AreEqual((int)(inputBuffer.Length * 48000.0 / inputFormat.SampleRate), outputBuffer.Length, "Converted buffer length"); } using (DmoOutputDataBuffer outputBuffer = new DmoOutputDataBuffer(48000 * 2 * 4)) { // one buffer for each output stream dmoResampler.MediaObject.ProcessOutput(DmoProcessOutputFlags.None, 1, new DmoOutputDataBuffer[] { outputBuffer }); Debug.WriteLine(String.Format("Converted length: {0}", outputBuffer.Length)); Debug.WriteLine(String.Format("Converted flags: {0}", outputBuffer.StatusFlags)); //Assert.AreEqual((int)(inputBuffer.Length * 48000.0 / inputFormat.SampleRate), outputBuffer.Length, "Converted buffer length"); } } dmoResampler.MediaObject.FreeStreamingResources(); }
protected void InitCom(WaveFormat inputformat, WaveFormat outputformat) { lock (_lockObj) { var source = BaseStream; _resampler = new WMResampler(); MediaObject mediaObject = _resampler.MediaObject; if (!mediaObject.SupportsInputFormat(0, inputformat)) { throw new NotSupportedException("Inputformat not supported."); } mediaObject.SetInputType(0, inputformat); if (!mediaObject.SupportsOutputFormat(0, outputformat)) { throw new NotSupportedException("Outputformat not supported."); } mediaObject.SetOutputType(0, outputformat); _inputBuffer = new MediaBuffer(inputformat.BytesPerSecond / 2); _outputBuffer = new DmoOutputDataBuffer(outputformat.BytesPerSecond / 2); } }
internal void Initialize(WaveFormat inputformat, WaveFormat outputformat) { Ratio = (double)outputformat.BytesPerSecond / inputformat.BytesPerSecond; lock (LockObj) { Resampler = new WMResampler(); MediaObject mediaObject = Resampler.MediaObject; if (!mediaObject.SupportsInputFormat(0, inputformat)) { throw new NotSupportedException("Inputformat not supported."); } mediaObject.SetInputType(0, inputformat); if (!mediaObject.SupportsOutputFormat(0, outputformat)) { throw new NotSupportedException("Outputformat not supported."); } mediaObject.SetOutputType(0, outputformat); InputBuffer = new MediaBuffer(inputformat.BytesPerSecond / 2); OutputBuffer = new DmoOutputDataBuffer(outputformat.BytesPerSecond / 2); } }
public void Encode(Texture2D texture) { // var device = encoder?.device; if (device != null) { using (var sharedRes = texture.QueryInterface <SharpDX.DXGI.Resource>()) { using (var sharedTexture = device.OpenSharedResource <Texture2D>(sharedRes.SharedHandle)) { device.ImmediateContext.CopyResource(sharedTexture, bufTexture); } } } Sample inputSample = null; try { MediaBuffer mediaBuffer = null; try { MediaFactory.CreateDXGISurfaceBuffer(IID.D3D11Texture2D, bufTexture, 0, false, out mediaBuffer); inputSample = MediaFactory.CreateSample(); inputSample.AddBuffer(mediaBuffer); inputSample.SampleTime = 0; inputSample.SampleDuration = 0; } finally { mediaBuffer?.Dispose(); } if (processor != null) { Sample processedSample = null; try { bool result = processor.ProcessSample(inputSample, out processedSample); if (result) { encoder.ProcessSample(processedSample); //EncodeSample(processedSample); } } finally { processedSample?.Dispose(); } } else { encoder.ProcessSample(inputSample); //EncodeSample(inputSample); } } finally { inputSample?.Dispose(); } }
/// <summary> /// Reads a resampled sequence of bytes from the <see cref="DmoResampler" /> and advances the position within the /// stream by the /// number of bytes read. /// </summary> /// <param name="buffer"> /// An array of bytes. When this method returns, the <paramref name="buffer" /> contains the specified /// byte array with the values between <paramref name="offset" /> and (<paramref name="offset" /> + /// <paramref name="count" /> - 1) replaced by the bytes read from the current source. /// </param> /// <param name="offset"> /// The zero-based byte offset in the <paramref name="buffer" /> at which to begin storing the data /// read from the current stream. /// </param> /// <param name="count">The maximum number of bytes to read from the current source.</param> /// <returns>The total number of bytes read into the buffer.</returns> public override int Read(byte[] buffer, int offset, int count) { lock (LockObj) { int read = 0; while (read < count) { MediaObject mediaObject = Resampler.MediaObject; if (mediaObject.IsReadyForInput(0)) { var bytesToRead = (int)OutputToInput(count - read); _readBuffer = _readBuffer.CheckBuffer(bytesToRead); int bytesRead = base.Read(_readBuffer, 0, bytesToRead); if (bytesRead <= 0) { break; } if (_disposed) { break; } if (InputBuffer.MaxLength < bytesRead) { InputBuffer.Dispose(); InputBuffer = new MediaBuffer(bytesRead); } InputBuffer.Write(_readBuffer, 0, bytesRead); mediaObject.ProcessInput(0, InputBuffer); OutputBuffer.Reset(); do { var outputBuffer = (MediaBuffer)OutputBuffer.Buffer; if (outputBuffer.MaxLength < count) { outputBuffer.Dispose(); OutputBuffer.Buffer = new MediaBuffer(count); } OutputBuffer.Buffer.SetLength(0); mediaObject.ProcessOutput(ProcessOutputFlags.None, new[] { OutputBuffer }, 1); if (OutputBuffer.Length <= 0) { Debug.WriteLine("DmoResampler::Read: No data in output buffer."); break; } OutputBuffer.Read(buffer, offset + read); read += OutputBuffer.Length; } while (/*_outputBuffer.DataAvailable*/ false); //todo: Implement DataAvailable } else { Debug.WriteLine("Case of not ready for input is not implemented yet."); //todo: . } } return(read); } }
public bool NextAudioBuffer(byte[] buffer, ref int length) { if (_cancellationPending) { length = 0; return(false); } lock (_csAVQueue) { int bytesWritten = 0; while ((_audioQueue.Count > 0) && (bytesWritten < buffer.Length)) { MediaSample mediaSample = _audioQueue.First.Value; if (_unmanaged) { UnmanagedMediaBuffer mediaBuffer = mediaSample.UnmanagedBuffer; int chunk = Math.Min(mediaBuffer.DataSize, buffer.Length - bytesWritten); Marshal.Copy(mediaBuffer.DataPtr, buffer, bytesWritten, chunk); bytesWritten += chunk; mediaBuffer.Remove(chunk); if (mediaBuffer.DataSize == 0) { mediaBuffer.Release(); _audioQueue.RemoveFirst(); } } else { MediaBuffer mediaBuffer = mediaSample.Buffer; int chunk = Math.Min(mediaBuffer.DataSize, buffer.Length - bytesWritten); Array.Copy(mediaBuffer.Start, mediaBuffer.DataOffset, buffer, bytesWritten, chunk); bytesWritten += chunk; { int newDataOffset = mediaBuffer.DataOffset + chunk; int newDataSize = mediaBuffer.DataSize - chunk; if (0 == newDataSize) { newDataOffset = 0; } mediaBuffer.SetData(newDataOffset, newDataSize); } if (mediaBuffer.DataSize == 0) { _audioQueue.RemoveFirst(); } } } length = bytesWritten; } return(true); }
public void ProcessData(byte[] data, double time) { try { var encodedSample = MediaFactory.CreateSample(); try { using (MediaBuffer mb = MediaFactory.CreateMemoryBuffer(data.Length)) { try { var dest = mb.Lock(out int cbMaxLength, out int cbCurrentLength); Marshal.Copy(data, 0, dest, data.Length); mb.CurrentLength = data.Length; } finally { mb.Unlock(); } encodedSample.AddBuffer(mb); } if (!double.IsNaN(time) && time > 0) { // может глючить рендерер если метки будут кривые!! // TODO: сделать валидацию вр.меток до декодера и после... var sampleTime = MfTool.SecToMfTicks(time); if (presentationAdjust == long.MaxValue) { var presentaionTime = presentationClock.Time; presentationAdjust = presentaionTime - sampleTime; } encodedSample.SampleTime = sampleTime; // + presentationAdjust; encodedSample.SampleDuration = 0; // MfTool.SecToMfTicks(0.033); //logger.Debug(">>>>>>>>>>> " + sampleTime); } else { encodedSample.SampleTime = 0; encodedSample.SampleDuration = 0; } //logger.Debug("ProcessData " + time); var res = decoder.ProcessSample(encodedSample, OnSampleDecoded, OnMediaTypeChanged); if (!res) { logger.Debug("decoder.ProcessSample() " + res); //... } } finally { encodedSample?.Dispose(); } } catch (Exception ex) { logger.Error(ex); } }
static bool DecodeJpeg(string inputFile, string outputFile) { int frameWidth, frameHeight; if (!GetFrameSize(inputFile, out frameWidth, out frameHeight)) { return(false); } Console.WriteLine("Input frame size: {0}x{1}", frameWidth, frameHeight); // read input bytes byte[] inputData; try { inputData = System.IO.File.ReadAllBytes(inputFile); } catch (System.Exception e) { Console.WriteLine(e.ToString()); return(false); } DeleteFile(outputFile); MediaSocket inSocket = createInputSocket(frameWidth, frameHeight); MediaSocket outSocket = createOutputSocket(outputFile, frameWidth, frameHeight); // create Transcoder using (Transcoder transcoder = new Transcoder()) { transcoder.AllowDemoMode = true; transcoder.Inputs.Add(inSocket); transcoder.Outputs.Add(outSocket); bool res = transcoder.Open(); PrintError("Open Transcoder", transcoder.Error); if (!res) { return(false); } MediaBuffer buffer = new MediaBuffer(); buffer.Attach(inputData, true); MediaSample sample = new MediaSample(); sample.Buffer = buffer; res = transcoder.Push(0, sample); PrintError("Push Transcoder", transcoder.Error); if (!res) { return(false); } transcoder.Flush(); transcoder.Close(); } return(true); }
public CoreAction Clone() { switch (Type) { case Types.CoreVersion: return(CoreAction.CoreVersion(Ticks, Version)); case Types.KeyPress: return(CoreAction.KeyPress(Ticks, KeyCode, KeyDown)); case Types.LoadDisc: return(CoreAction.LoadDisc(Ticks, Drive, (MediaBuffer != null) ? (new MemoryBlob(MediaBuffer.GetBytes())) : null)); case Types.LoadTape: return(CoreAction.LoadTape(Ticks, (MediaBuffer != null) ? (new MemoryBlob(MediaBuffer.GetBytes())) : null)); case Types.Reset: return(CoreAction.Reset(Ticks)); case Types.RunUntil: { List <UInt16> samples = null; if (AudioSamples != null) { samples = new List <UInt16>(AudioSamples); } return(CoreAction.RunUntil(Ticks, StopTicks, samples)); } case Types.LoadCore: return(CoreAction.LoadCore(Ticks, CoreState)); case Types.CreateSnapshot: return(CoreAction.CreateSnapshot(Ticks, SnapshotId)); case Types.RevertToSnapshot: return(CoreAction.RevertToSnapshot(Ticks, SnapshotId)); default: return(null); } }
private void CleanupAndDispose() { if (currentBuffer != null) { currentBuffer.Unlock(); currentBuffer.Dispose(); currentBuffer = null; } if (currentSample != null) { currentSample.Dispose(); currentSample = null; } }
/// <summary> /// Gets the decoded PCM samples. See remarks. /// </summary> /// <param name="startingPositionInSeconds">The starting position in seconds.</param> /// <returns>An enumerator of pointer to PCM decoded data with the same format as returned by <see cref="WaveFormat"/>.</returns> /// <remarks> /// This method is only working as a single enumerator at a time. /// The <see cref="SetSourceStream(System.IO.Stream)"/> must be set before calling <see cref="GetSamples()"/> /// </remarks> public IEnumerable<DataPointer> GetSamples(TimeSpan startingPositionInSeconds) { // A new reader is setup, so initialize it. lock (sourceReaderLock) { // If the reader was changed if (nextSourceReader != null) { if (sourceReader != null) sourceReader.Dispose(); sourceReader = nextSourceReader; nextSourceReader = null; } } // Make sure that any prior call CleanupAndDispose(); CheckIfDisposed(); // Set the position sourceReader.SetCurrentPosition((long)(startingPositionInSeconds.TotalSeconds * 1e7)); while (true) { int streamIndex; SourceReaderFlags flags; long time; CheckIfDisposed(); using (currentSample = sourceReader.ReadSample(SourceReaderIndex.FirstAudioStream, SourceReaderControlFlags.None, out streamIndex, out flags, out time)) { if ((flags & SourceReaderFlags.Endofstream) != 0) break; CheckIfDisposed(); using (currentBuffer = currentSample.ConvertToContiguousBuffer()) { int bufferMaxLength; int bufferCurrentLength; CheckIfDisposed(); var ptr = currentBuffer.Lock(out bufferMaxLength, out bufferCurrentLength); yield return new DataPointer(ptr, bufferCurrentLength); // Warning, because the yield could never return here, currentBuffer and currentSample should be disposed when disposing this object or when // calling it again on the GetSamples method. // In case a Dispose occured while decoding if (currentBuffer == null) break; currentBuffer.Unlock(); } } } // They have been disposed, so we can just clear them currentBuffer = null; currentSample = null; }