Ejemplo n.º 1
0
        void RefreshRenders()
        {
            lock (_csAVQueue)
            {
                if (null != _videoRender)
                {
                    if (_videoQueue.Count > 0)
                    {
                        MediaSample sample = _videoQueue.First.Value;

                        if (GetClock() >= sample.StartTime)
                        {
                            if (_unmanaged)
                            {
                                _videoRender.SetFrame(sample.UnmanagedBuffer.DataPtr, _videoStreamInfo.FrameWidth, _videoStreamInfo.FrameHeight);
                                _videoRender.Draw();
                                sample.UnmanagedBuffer.Release();
                                _videoQueue.RemoveFirst();
                            }
                            else
                            {
                                MediaBuffer buffer = sample.Buffer;
                                _videoRender.SetFrame(buffer.Start, _videoStreamInfo.FrameWidth, _videoStreamInfo.FrameHeight);
                                _videoRender.Draw();
                                _videoQueue.RemoveFirst();
                            }
                        }
                    }
                }
            }
        }
Ejemplo n.º 2
0
        public MediaSample PullSample(AudioFormat format, TimeSpan tsDuration)
        {
            if (AudioCodec == null)
            {
                return(null);
            }

            int nSamples     = AudioCodec.AudioFormat.CalculateNumberOfSamplesForDuration(tsDuration);
            int nBytesNeeded = nSamples * AudioCodec.AudioFormat.BytesPerSample;

            /// Greater than 4 samples in our buffer, remove some
            if (ReceiveAudioQueue.Size > nBytesNeeded * AudioCodec.AudioFormat.BytesPerSample)
            {
                ReceiveAudioQueue.GetNSamples(ReceiveAudioQueue.Size - nBytesNeeded * 2);
            }

            if (ReceiveAudioQueue.Size >= nBytesNeeded)
            {
                byte [] bAudioData = ReceiveAudioQueue.GetNSamples(nBytesNeeded);

                /// Incoming RTP packets' audio data is in the codecs native format, we may need to resample for our host (Our windows muxer always expects 16x16, so ulaw must be resampled)
                MediaSample currentsample = new MediaSample(bAudioData, AudioCodec.AudioFormat);

                MediaSample newsample = RecvResampler.Resample(currentsample, format);

                return(newsample);
            }

            return(null);
        }
Ejemplo n.º 3
0
        protected override void PushNextPacket()
        {
            if (AudioCodec == null)
            {
                return;
            }

            RTPPacket packet = IncomingRTPPacketBuffer.GetPacket();

            if (packet == null)
            {
                return;
            }

            byte[] bNewAudioData = AudioCodec.DecodeToBytes(packet);

            if (bNewAudioData != null)
            {
                ReceiveAudioQueue.AppendData(bNewAudioData);
                if (ReceiveAudioQueue.Size > m_nPacketBytes * MaxAudioPacketsQueue)                // someone isn't taking our packets (either directly our through IAudioSource), so let's not get too big
                {
                    ReceiveAudioQueue.GetNSamples(ReceiveAudioQueue.Size - m_nPacketBytes * MaxAudioPacketsQueue);
                }

                if (RenderSink != null)
                {
                    MediaSample samp = new MediaSample(bNewAudioData, AudioCodec.AudioFormat);
                    RenderSink.PushSample(samp, this);
                }
            }
        }
Ejemplo n.º 4
0
        static bool EncodeH264Stream(Options opt, Transcoder transcoder)
        {
            bool success = false;

            try
            {
                using (var file = System.IO.File.OpenRead(opt.InputFile))
                {
                    int videoBufferSize = MediaSample.VideoBufferSizeInBytes(opt.Width, opt.Height, opt.Color.Id);

                    if (videoBufferSize <= 0)
                    {
                        return(false);
                    }

                    MediaSample mediaSample = new MediaSample();
                    MediaBuffer mediaBuffer = new MediaBuffer(videoBufferSize);
                    mediaSample.Buffer = mediaBuffer;

                    int readBytes;

                    while (true)
                    {
                        mediaBuffer.SetData(0, videoBufferSize);
                        readBytes = file.Read(mediaBuffer.Start, 0, mediaBuffer.DataSize);
                        if (readBytes == videoBufferSize)
                        {
                            mediaBuffer.SetData(0, readBytes);

                            if (!transcoder.Push(0, mediaSample))
                            {
                                PrintStatus("Transcoder push", transcoder.Error);
                                success = false;
                                break;
                            }

                            success = true;
                        }
                        else
                        {
                            if (!transcoder.Flush())
                            {
                                success = false;
                            }

                            PrintStatus("Transcoder flush", transcoder.Error);

                            break;
                        }
                    }
                }
            }
            catch (System.IO.DirectoryNotFoundException dnfe)
            {
                Console.WriteLine(dnfe);
                success = false;
            }

            return(success);
        }
Ejemplo n.º 5
0
        void timer_Tick(object sender, EventArgs e)
        {
            //var dbgtime = DateTime.Now;

            var frame = CaptureFrame();

            //var dbgtime2 = DateTime.Now;
            //Debug.WriteLine("Capture Time: {0}", (dbgtime2 - dbgtime).TotalMilliseconds);

            // Save to file
            //string filename = string.Format("Cube_{0}.png", encodedFrames);
            //SaveFrame(filename, frame);

            var sample = new MediaSample();

            if (encodedFrames > 0)
            {
                runningTime      = DateTime.Now - firstSampleTime;
                sample.StartTime = runningTime.TotalSeconds;
                sample.EndTime   = -1;
            }
            else
            {
                sample.StartTime = 0;
                firstSampleTime  = DateTime.Now;
            }

            int stride = frame.PixelWidth * 4;

            sample.Buffer = new MediaBuffer(new byte[stride * frame.PixelHeight]);
            frame.CopyPixels(sample.Buffer.Start, stride, 0);

            cRunningTime.Text = String.Format("{0:d2}:{1:d2}.{2:d3}", runningTime.Minutes, runningTime.Seconds, runningTime.Milliseconds);

            int currentQueueCount = 0;

            lock (sync)
            {
                currentQueueCount = samples.Count;

                if (currentQueueCount < 10)
                {
                    samples.Enqueue(sample);
                    ++encodedFrames;
                }
                else
                {
                    ++droppedFrames;
                }
            }

            cEncodedFrames.Text = encodedFrames.ToString();
            cDroppedFrames.Text = droppedFrames.ToString();
            if (runningTime.TotalMilliseconds > 0)
            {
                cEffectiveFramerate.Text = string.Format("{0:f2}", (double)encodedFrames / runningTime.TotalSeconds);
            }
            cQueue.Text = (currentQueueCount < 10 ? currentQueueCount.ToString() : "MAX");
        }
Ejemplo n.º 6
0
 internal static org_webRtc_MediaSample_t org_webRtc_MediaSample_ToC(MediaSample value)
 {
     if (null == value)
     {
         return(System.IntPtr.Zero);
     }
     return(Wrapper.Org_WebRtc.OverrideApi.org_webRtc_MediaSample_wrapperClone(value.native_));
 }
Ejemplo n.º 7
0
        static bool Transcode(Options opt)
        {
            using (Transcoder transcoder = new Transcoder())
            {
                transcoder.AllowDemoMode = true;

                MediaSocket inSocket  = CreateInputSocket(opt);
                MediaSocket outSocket = CreateOutputSocket(opt);

                transcoder.Inputs.Add(inSocket);
                transcoder.Outputs.Add(outSocket);

                DeleteFile(opt.OutputFile);

                if (!transcoder.Open())
                {
                    PrintStatus("Transcoder open", transcoder.Error);
                    return(false);
                }

                for (int fileCount = 0; ; fileCount++)
                {
                    string pattern  = "au_{0:0000}.h264";
                    string fileName = string.Format(pattern, fileCount);
                    string filePath = Path.Combine(opt.InputDir, fileName);

                    if (!File.Exists(filePath))
                    {
                        fileName = string.Format(pattern, fileCount - 1);
                        Console.WriteLine("Decoded " + fileCount + " files." + "(last decoded file: " + fileName + ")");
                        break;
                    }

                    var sample = new MediaSample();
                    sample.Buffer = new MediaBuffer(File.ReadAllBytes(filePath));

                    if (!transcoder.Push(0, sample))
                    {
                        PrintStatus("Transcoder push", transcoder.Error);
                        return(false);
                    }
                }

                if (!transcoder.Flush())
                {
                    PrintStatus("Transcoder flush", transcoder.Error);
                    return(false);
                }

                Console.WriteLine("Output file: " + opt.OutputFile);

                transcoder.Close();

                return(true);
            }
        }
Ejemplo n.º 8
0
        static bool Encode(Options opt)
        {
            DeleteFile(opt.OutputFile);

            MediaSocket inSocket = new MediaSocket();

            inSocket.File = opt.InputFile;

            MediaSocket outSocket = CreateOutputSocket(opt);

            bool success = false;

            // create Transcoder
            using (Transcoder transcoder = new Transcoder())
            {
                transcoder.AllowDemoMode = true;
                transcoder.Inputs.Add(inSocket);
                transcoder.Outputs.Add(outSocket);

                if (!transcoder.Open())
                {
                    PrintError("Transcoder open", transcoder.Error);
                    return(false);
                }

                using (FileStream outputFile = File.OpenWrite(opt.OutputFile))
                {
                    MediaSample outputSample = new MediaSample();
                    int         outputIndex  = -1;

                    while (transcoder.Pull(out outputIndex, outputSample))
                    {
                        MediaBuffer buffer = outputSample.Buffer;
                        outputFile.Write(buffer.Start, buffer.DataOffset, buffer.DataSize);
                    }

                    ErrorInfo error = transcoder.Error;
                    PrintError("Transcoder pull", error);

                    if ((error.Code == (int)CodecError.EOS) &&
                        (error.Facility == ErrorFacility.Codec))
                    {
                        // ok
                        success = true;
                    }
                }

                transcoder.Close();
            }

            return(success);
        }
Ejemplo n.º 9
0
        static bool transcodeAUs(Options opt)
        {
            using (var transcoder = new Transcoder())
            {
                transcoder.AllowDemoMode = true;

                string imgFile = BuildImgPath(opt, 0);

                if (!setTranscode(transcoder, imgFile, opt))
                {
                    return(false);
                }

                for (int i = 0; ; i++)
                {
                    if (i > 0)
                    {
                        imgFile = BuildImgPath(opt, i);
                    }

                    if (!File.Exists(imgFile))
                    {
                        break;
                    }

                    var sample = new MediaSample();
                    sample.Buffer = new MediaBuffer(File.ReadAllBytes(imgFile));

                    if (!transcoder.Push(0, sample))
                    {
                        PrintError("Transcoder push", transcoder.Error);
                        return(false);
                    }
                }

                if (!transcoder.Flush())
                {
                    PrintError("Transcoder flush", transcoder.Error);
                    return(false);
                }

                Console.WriteLine("Output file: " + opt.OutputFile);

                transcoder.Close();
            }

            return(true);
        }
Ejemplo n.º 10
0
        public static bool IsSampleEmpty(MediaSample sample)
        {
            if (sample != null)
            {
                if (sample.Buffer != null && sample.Buffer.DataSize > 0)
                {
                    return(false);
                }

                if (sample.UnmanagedBuffer != null && sample.UnmanagedBuffer.DataSize > 0)
                {
                    return(false);
                }
            }
            return(true);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Push a sample to this filter's outgoing queue.
        /// </summary>
        /// <param name="sample"></param>
        public void PushSample(MediaSample sample, object objSource)
        {
            if (AudioCodec == null)
            {
                return;
            }

            MediaSample newsample = SendResampler.Resample(sample, AudioCodec.AudioFormat);

            SendAudioQueue.AppendData(newsample.Data);

            if (SendAudioQueue.Size > MaxSendBufferSize)
            {
                SendAudioQueue.GetNSamples(SendAudioQueue.Size - MaxSendBufferSize);
            }
        }
Ejemplo n.º 12
0
        static bool parse_h264_stream(Options opt)
        {
            DeleteDirectory(opt.OutputDir);

            MediaSocket inSocket = new MediaSocket();

            inSocket.File       = opt.InputFile;
            inSocket.StreamType = StreamType.H264;

            MediaSocket outSocket = CreateOutputSocket(opt);

            using (Transcoder transcoder = new Transcoder())
            {
                transcoder.Inputs.Add(inSocket);
                transcoder.Outputs.Add(outSocket);

                var res = transcoder.Open();
                PrintError("transcoder open", transcoder.Error);

                if (!res)
                {
                    return(false);
                }

                int         inputIndex = 0;
                MediaSample accessUnit = new MediaSample();

                if (!MakeDir(opt.OutputDir))
                {
                    Console.WriteLine("cannot create output directory: " + opt.OutputDir);
                    return(false);
                }

                while (transcoder.Pull(out inputIndex, accessUnit))
                {
                    // Each call to Transcoder::pull returns one Access Unit.
                    // The Access Unit may contain one or more NAL units.
                    var au_buffer = accessUnit.Buffer;
                    Console.WriteLine("AU #" + au_index + ", " + au_buffer.DataSize + " bytes");
                    WriteAuFile(opt.OutputDir, au_index, au_buffer);
                    PrintNalus(au_buffer);
                    ++au_index;
                }
            }

            return(true);
        }
Ejemplo n.º 13
0
        bool ProcessSample(IntPtr pBuffer, int dataLen, double sampleTime)
        {
            // not initialized
            if (null == transcoder)
            {
                return(true);
            }

            if (sampleTime < 0)
            {
                sampleTime = 0;
            }

            if ((null == sampleBuffer) || (sampleBuffer.Length != dataLen))
            {
                sampleBuffer = new byte[dataLen];
            }

            Marshal.Copy(pBuffer, sampleBuffer, 0, dataLen);

            MediaSample inputSample = new MediaSample();

            inputSample.Buffer    = new MediaBuffer(sampleBuffer);
            inputSample.StartTime = sampleTime;

            System.Diagnostics.Debug.WriteLine(string.Format("transcoder.Push(stream: {0}, sampleTime: {1}, sampleData: {2})", transcoderInputIndex, inputSample.StartTime, inputSample.Buffer.DataSize));

            // transcoder.Push() is not threads safe.
            // lock (transcoder){ } ensure that only one thread is calling transcoder.Push()
            lock (transcoder)
            {
                if (!transcoder.Push(transcoderInputIndex, inputSample))
                {
                    transcoderError = transcoder.Error.Clone() as ErrorInfo;

                    System.Threading.ThreadPool.QueueUserWorkItem(delegate
                    {
                        // call mediaControl.Stop() from a new thread otherwise it will deadlock
                        int hr = mediaControl.Stop();
                    }, null);

                    return(false);
                }
            }

            return(true);
        }
        public override bool Push(int inputIndex, MediaSample inputSample)
        {
            if (inputSample == null)
            {
                return(EndOfStream(inputIndex));
            }

            if (inputIndex == _audioInputIndex)
            {
                return(EncodeAndMux(_audioEncoder, inputSample, _audioMuxIndex));
            }
            else if (inputIndex == _videoInputIndex)
            {
                return(EncodeAndMux(_videoEncoder, inputSample, _videoMuxIndex));
            }

            return(false);
        }
Ejemplo n.º 15
0
        void DecoderThread()
        {
            while (!_decoderEOS && !_cancellationPending)
            {
                if (IsAVQueueFull())
                {
                    System.Threading.Thread.Sleep(1);
                    continue;
                }

                int index = -1;

                MediaSample mediaSample = new MediaSample();
                bool        res;

                if (_unmanaged)
                {
                    res = _transcoder.PullUnmanaged(out index, mediaSample);
                }
                else
                {
                    res = _transcoder.Pull(out index, mediaSample);
                }

                if (res)
                {
                    lock (_csAVQueue)
                    {
                        if (index == _videoStreamIndex)
                        {
                            _videoQueue.AddLast(mediaSample);
                        }
                        else if (index == _audioStreamIndex)
                        {
                            _audioQueue.AddLast(mediaSample);
                        }
                    }
                }
                else
                {
                    _decoderEOS = true;
                }
            }
        }
Ejemplo n.º 16
0
        private void TranscodeWorker()
        {
            while (true)
            {
                MediaSample sample = null;

                lock (sync)
                {
                    if (samples.Count > 0)
                    {
                        sample = samples.Dequeue();
                    }
                }

                if (sample != null)
                {
                    if (!transcoder.Push(0, sample))
                    {
                        var error = transcoder.Error.Clone() as ErrorInfo;
                        LogError("Transcoder.Push", error);
                        transcoder.Close();
                        transcoder.Dispose();
                        Dispatcher.BeginInvoke(new Action <ErrorInfo>(SingalEncodeError), error);
                        return;
                    }
                }
                else
                {
                    if (stopEncodeThread)
                    {
                        transcoder.Flush();
                        transcoder.Close();
                        transcoder.Dispose();
                        return;
                    }
                    else
                    {
                        Thread.Sleep(1);
                    }
                }
            }
        }
Ejemplo n.º 17
0
 public bool Push(int inputIndex, MediaSample inputSample)
 {
     return(t.Push(inputIndex, inputSample));
 }
 public override bool PushUnmanaged(int inputIndex, MediaSample inputSample)
 {
     return(Push(inputIndex, inputSample));
 }
Ejemplo n.º 19
0
        static bool DecodeJpeg(string inputFile, string outputFile)
        {
            int frameWidth, frameHeight;

            if (!GetFrameSize(inputFile, out frameWidth, out frameHeight))
            {
                return(false);
            }

            Console.WriteLine("Input frame size: {0}x{1}", frameWidth, frameHeight);

            // read input bytes
            byte[] inputData;
            try
            {
                inputData = System.IO.File.ReadAllBytes(inputFile);
            }
            catch (System.Exception e)
            {
                Console.WriteLine(e.ToString());
                return(false);
            }

            DeleteFile(outputFile);

            MediaSocket inSocket  = createInputSocket(frameWidth, frameHeight);
            MediaSocket outSocket = createOutputSocket(outputFile, frameWidth, frameHeight);

            // create Transcoder
            using (Transcoder transcoder = new Transcoder())
            {
                transcoder.AllowDemoMode = true;
                transcoder.Inputs.Add(inSocket);
                transcoder.Outputs.Add(outSocket);

                bool res = transcoder.Open();
                PrintError("Open Transcoder", transcoder.Error);
                if (!res)
                {
                    return(false);
                }

                MediaBuffer buffer = new MediaBuffer();
                buffer.Attach(inputData, true);

                MediaSample sample = new MediaSample();
                sample.Buffer = buffer;

                res = transcoder.Push(0, sample);

                PrintError("Push Transcoder", transcoder.Error);
                if (!res)
                {
                    return(false);
                }

                transcoder.Flush();
                transcoder.Close();
            }

            return(true);
        }
Ejemplo n.º 20
0
        /// <summary>
        /// Überträgt alle Daten aus dem Zwischenspeicher in den Direct Show Graphen.
        /// </summary>
        private void Flush()
        {
            // Nothing to do
            if (m_Index < 1)
                return;

            // Be safe
            try
            {
                // Allocate buffer -  may fail during shutdown so be cautious
                using (var sample = new MediaSample( m_Allocator.GetBuffer( IntPtr.Zero, IntPtr.Zero, MemBufferFlags.None ) ))
                {
                    // Set flags
                    if (m_SyncWaiting)
                        sample.IsSyncPoint = true;

                    // May want to stamp the time
                    if (m_SyncTime.HasValue)
                    {
                        // Get the time
                        long tStart = m_SyncTime.Value;

                        // Set it
                        sample.SetTime( tStart, tStart + 1 );
                    }

                    // Debug only
                    //System.Diagnostics.Debug.WriteLine( string.Format( "{0} {1} {2}", this, m_SyncTime.HasValue ? new TimeSpan( m_SyncTime.Value ) : TimeSpan.Zero, m_Index ) );

                    // Get the address of the data
                    IntPtr data = sample.BaseAddress;

                    // Fill it
                    Marshal.Copy( m_Buffer, 0, data, m_Index );

                    // Set real size
                    sample.ActualDataLength = m_Index;

                    // Set the media type
                    if (m_SendType)
                    {
                        // Once only
                        m_SendType = false;

                        // Add to sample
                        sample.MediaType = m_Type;
                    }

                    // Send to process - will COM release
                    m_Sink( sample.ComInterface );
                }
            }
            catch
            {
                // Ignore any error
            }
            finally
            {
                // Reset all - even in case of error
                m_SyncWaiting = false;
                m_SyncTime = null;
                m_Index = 0;
            }
        }
Ejemplo n.º 21
0
        static bool SplitFile(string inputFile)
        {
            string       outputFileExt     = ".mpg";
            string       encodingPreset    = Preset.Video.DVD.NTSC_4x3_PCM;
            const double splitPartDuration = 10;     // seconds

            int audioStreamIndex = -1;
            int videoStreamIndex = -1;

            int audioFrameSize  = 0;
            int audioSampleRate = 0;

            using (var transcoder1 = new Transcoder())
            {
                // In order to use the OEM release for testing (without a valid license) the transcoder demo mode must be enabled.
                transcoder1.AllowDemoMode = true;

                using (var inputInfo = new MediaInfo())
                {
                    inputInfo.Inputs[0].File = inputFile;
                    if (!inputInfo.Open())
                    {
                        PrintError("Open MediaInfo", inputInfo.Error);
                        return(false);
                    }

                    // Configure transcoder1 input and output
                    var inputSocket = MediaSocket.FromMediaInfo(inputInfo);
                    transcoder1.Inputs.Add(inputSocket);

                    for (int i = 0; i < inputSocket.Pins.Count; i++)
                    {
                        StreamInfo inputStreamInfo = inputSocket.Pins[i].StreamInfo;

                        if ((inputStreamInfo.MediaType == MediaType.Video) && videoStreamIndex < 0)
                        {
                            var streamInfo = new VideoStreamInfo();

                            VideoStreamInfo inputVideoStreamInfo = inputStreamInfo as VideoStreamInfo;

                            streamInfo.ColorFormat = ColorFormat.YUV420;
                            streamInfo.StreamType  = StreamType.UncompressedVideo;
                            streamInfo.ScanType    = inputVideoStreamInfo.ScanType;

                            streamInfo.FrameWidth         = inputVideoStreamInfo.FrameWidth;
                            streamInfo.FrameHeight        = inputVideoStreamInfo.FrameHeight;
                            streamInfo.DisplayRatioWidth  = inputVideoStreamInfo.DisplayRatioWidth;
                            streamInfo.DisplayRatioHeight = inputVideoStreamInfo.DisplayRatioHeight;

                            var outputPin = new MediaPin();
                            outputPin.StreamInfo = streamInfo;

                            var outputSocket = new MediaSocket();
                            outputSocket.Pins.Add(outputPin);
                            outputSocket.StreamType = streamInfo.StreamType;

                            videoStreamIndex = transcoder1.Outputs.Count;
                            transcoder1.Outputs.Add(outputSocket);
                        }

                        if ((inputStreamInfo.MediaType == MediaType.Audio) && audioStreamIndex < 0)
                        {
                            var streamInfo = new AudioStreamInfo();

                            AudioStreamInfo inputAudioStreamInfo = inputStreamInfo as AudioStreamInfo;

                            streamInfo.StreamType = StreamType.LPCM;

                            streamInfo.PcmFlags      = inputAudioStreamInfo.PcmFlags;
                            streamInfo.Channels      = inputAudioStreamInfo.Channels;
                            streamInfo.SampleRate    = inputAudioStreamInfo.SampleRate;
                            streamInfo.BitsPerSample = inputAudioStreamInfo.BitsPerSample;

                            var outputPin = new MediaPin();
                            outputPin.StreamInfo = streamInfo;

                            var outputSocket = new MediaSocket();
                            outputSocket.Pins.Add(outputPin);
                            outputSocket.StreamType = streamInfo.StreamType;

                            audioStreamIndex = transcoder1.Outputs.Count;
                            transcoder1.Outputs.Add(outputSocket);

                            audioFrameSize  = inputAudioStreamInfo.Channels * inputAudioStreamInfo.BitsPerSample / 8;
                            audioSampleRate = inputAudioStreamInfo.SampleRate;
                        }
                    }
                }

                bool res = transcoder1.Open();
                PrintError("Open Transcoder1", transcoder1.Error);
                if (!res)
                {
                    return(false);
                }

                var sample = new MediaSample();
                int outputIndex;

                int        splitPartNum  = 0;
                double     splitTime     = splitPartDuration;
                double     partStartTime = 0;
                Transcoder transcoder2   = null;

                List <SplitRecord> splitStats = new List <SplitRecord>();

                List <MediaSample> audioSamplesQueue = new List <MediaSample>();

                try
                {
                    for (; ;)
                    {
                        if ((audioSamplesQueue.Count > 0) && (audioSamplesQueue[0].StartTime < splitTime))
                        {
                            outputIndex = audioStreamIndex;
                            sample      = audioSamplesQueue[0];
                            audioSamplesQueue.RemoveAt(0);
                        }
                        else
                        {
                            if (!transcoder1.Pull(out outputIndex, sample))
                            {
                                break;
                            }

                            if ((outputIndex != audioStreamIndex) &&
                                (outputIndex != videoStreamIndex))
                            {
                                continue;
                            }
                        }

                        if (outputIndex == audioStreamIndex)
                        {
                            double sampleDuration = (double)(sample.Buffer.DataSize) / (double)(audioFrameSize * audioSampleRate);
                            if (sample.StartTime >= splitTime)
                            {
                                audioSamplesQueue.Add(sample);
                                sample = new MediaSample();
                                continue;
                            }
                            else if ((sample.StartTime + sampleDuration) > splitTime)
                            {
                                double sample1Duration   = splitTime - sample.StartTime;
                                int    sample1BufferSize = (int)(sample1Duration * audioSampleRate) * audioFrameSize;

                                if (sample1BufferSize < sample.Buffer.DataSize)
                                {
                                    int buffer2Size = sample.Buffer.DataSize - sample1BufferSize;
                                    var buffer2     = new MediaBuffer(new byte[buffer2Size]);
                                    buffer2.SetData(0, buffer2Size);

                                    Array.Copy(sample.Buffer.Start, sample1BufferSize, buffer2.Start, 0, buffer2Size);

                                    var sample2 = new MediaSample();
                                    sample2.StartTime = sample.StartTime + sample1Duration;
                                    sample2.Buffer    = buffer2;

                                    if (sample1BufferSize > 0)
                                    {
                                        sample.Buffer.SetData(sample.Buffer.DataOffset, sample1BufferSize);
                                    }
                                    else
                                    {
                                        sample.Buffer.SetData(0, 0);
                                    }

                                    audioSamplesQueue.Add(sample2);
                                }
                            }
                        }


                        if ((transcoder2 == null) ||
                            ((sample.StartTime + 0.0001 >= splitTime) && (outputIndex == videoStreamIndex)))
                        {
                            if (transcoder2 != null)
                            {
                                transcoder2.Flush();
                                transcoder2.Close();
                                transcoder2.Dispose();
                            }

                            SplitRecord splitStat = new SplitRecord();
                            splitStat.StartTime       = splitTime;
                            splitStat.StartTimeActual = sample.StartTime;

                            splitPartNum += 1;
                            splitTime     = splitPartNum * splitPartDuration;
                            partStartTime = sample.StartTime;

                            transcoder2 = new Transcoder();
                            transcoder2.AllowDemoMode = true;

                            // Configure transcoder2 input and output
                            {
                                for (int i = 0; i < transcoder1.Outputs.Count; i++)
                                {
                                    var streamInfo = transcoder1.Outputs[i].Pins[0].StreamInfo.Clone() as StreamInfo;
                                    var pin        = new MediaPin();
                                    pin.StreamInfo = streamInfo;

                                    var socket = new MediaSocket();
                                    socket.Pins.Add(pin);
                                    socket.StreamType = streamInfo.StreamType;

                                    transcoder2.Inputs.Add(socket);
                                }

                                var outputSocket = MediaSocket.FromPreset(encodingPreset);

                                string fileName = GenerateOutputFileName(inputFile, splitPartNum) + outputFileExt;
                                string filePath = Path.Combine(GetExeDir(), fileName);

                                try
                                {
                                    File.Delete(filePath);
                                }
                                catch { }

                                outputSocket.File = filePath;
                                transcoder2.Outputs.Add(outputSocket);

                                splitStat.FileName = fileName;
                            }

                            if (splitStats.Count > 0)
                            {
                                SplitRecord lastRecord = splitStats[splitStats.Count - 1];
                                lastRecord.EndTime       = splitStat.StartTime;
                                lastRecord.EndTimeActual = splitStat.StartTimeActual;
                            }

                            splitStats.Add(splitStat);

                            res = transcoder2.Open();
                            PrintError("Open Transcoder2", transcoder2.Error);
                            if (!res)
                            {
                                return(false);
                            }
                        }

                        if ((splitStats.Count > 0))
                        {
                            SplitRecord lastRecord = splitStats[splitStats.Count - 1];
                            lastRecord.EndTime       = sample.StartTime;
                            lastRecord.EndTimeActual = lastRecord.EndTime;
                        }

                        if (sample.StartTime >= 0)
                        {
                            sample.StartTime = sample.StartTime - partStartTime;
                        }

                        res = transcoder2.Push(outputIndex, sample);
                        if (!res)
                        {
                            PrintError("Push Transcoder2", transcoder2.Error);
                            return(false);
                        }
                    }
                }
                finally
                {
                    if (transcoder2 != null)
                    {
                        transcoder2.Flush();
                        transcoder2.Close();
                        transcoder2.Dispose();
                        transcoder2 = null;
                    }
                }

                if ((transcoder1.Error.Facility != ErrorFacility.Codec) ||
                    (transcoder1.Error.Code != (int)CodecError.EOS))
                {
                    PrintError("Pull Transcoder1", transcoder1.Error);
                    return(false);
                }

                transcoder1.Close();

                // print split stats
                Console.WriteLine();
                foreach (var record in splitStats)
                {
                    Console.WriteLine("{0} start: {1} end: {2} act. start: {3} act. end: {4}", record.FileName,
                                      FormatTime(record.StartTime), FormatTime(record.EndTime), FormatTime(record.StartTimeActual), FormatTime(record.EndTimeActual));
                }
                Console.WriteLine();
            }

            return(true);
        }
Ejemplo n.º 22
0
        static bool Encode(Options opt)
        {
            string       outFilename    = "cube." + opt.FileExtension;
            const int    imageCount     = 250;
            const double inputFrameRate = 25.0;

            using (var transcoder = new Transcoder())
            {
                // In order to use the OEM release for testing (without a valid license),
                // the transcoder demo mode must be enabled.
                transcoder.AllowDemoMode = true;

                try
                {
                    bool result;

                    try
                    {
                        File.Delete(outFilename);
                    }catch {}

                    // Configure Input
                    {
                        using (MediaInfo medInfo = new MediaInfo())
                        {
                            medInfo.Inputs[0].File = GetImagePath(0);

                            result = medInfo.Open();
                            PrintError("Open MediaInfo", medInfo.Error);
                            if (!result)
                            {
                                return(false);
                            }

                            VideoStreamInfo vidInfo = (VideoStreamInfo)medInfo.Outputs[0].Pins[0].StreamInfo.Clone();
                            vidInfo.FrameRate = inputFrameRate;

                            MediaPin pin = new MediaPin();
                            pin.StreamInfo = vidInfo;

                            MediaSocket socket = new MediaSocket();
                            socket.Pins.Add(pin);

                            transcoder.Inputs.Add(socket);
                        }
                    }

                    // Configure Output
                    {
                        MediaSocket socket = MediaSocket.FromPreset(opt.PresetID);
                        socket.File = outFilename;

                        transcoder.Outputs.Add(socket);
                    }

                    // Encode Images
                    result = transcoder.Open();
                    PrintError("Open Transcoder", transcoder.Error);
                    if (!result)
                    {
                        return(false);
                    }

                    for (int i = 0; i < imageCount; i++)
                    {
                        string imagePath = GetImagePath(i);

                        MediaBuffer mediaBuffer = new MediaBuffer(File.ReadAllBytes(imagePath));

                        MediaSample mediaSample = new MediaSample();
                        mediaSample.StartTime = i / inputFrameRate;
                        mediaSample.Buffer    = mediaBuffer;

                        if (!transcoder.Push(0, mediaSample))
                        {
                            PrintError("Push Transcoder", transcoder.Error);
                            return(false);
                        }
                    }

                    result = transcoder.Flush();
                    PrintError("Flush Transcoder", transcoder.Error);
                    if (!result)
                    {
                        return(false);
                    }

                    transcoder.Close();
                    Console.WriteLine("Output video: \"{0}\"", outFilename);
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.ToString());
                    return(false);
                }
            }

            return(true);
        }
        public async override Task ProcessMediaStream(IAsyncStreamReader <MediaStreamMessage> requestStream, IServerStreamWriter <MediaStreamMessage> responseStream, ServerCallContext context)
        {
            //First message from the client is (must be) MediaStreamDescriptor
            var clientState = new StreamState()
            {
                Processor = new BatchImageProcessor(_logger)
            };

            _ = await requestStream.MoveNext();

            var requestMessage = requestStream.Current;

            _logger.LogInformation($"[Received MediaStreamDescriptor] SequenceNum: {requestMessage.SequenceNumber}");
            var response = ProcessMediaStreamDescriptor(requestMessage.MediaStreamDescriptor, clientState);

            var responseMessage = new MediaStreamMessage()
            {
                MediaStreamDescriptor = response
            };

            await responseStream.WriteAsync(responseMessage);

            // Process rest of the MediaStream message sequence
            var          height         = (int)requestMessage.MediaStreamDescriptor.MediaDescriptor.VideoFrameSampleFormat.Dimensions.Height;
            var          width          = (int)requestMessage.MediaStreamDescriptor.MediaDescriptor.VideoFrameSampleFormat.Dimensions.Width;
            ulong        responseSeqNum = 0;
            int          messageCount   = 1;
            List <Image> imageBatch     = new List <Image>();

            while (await requestStream.MoveNext())
            {
                // Extract message IDs
                requestMessage = requestStream.Current;
                var requestSeqNum = requestMessage.SequenceNumber;
                _logger.LogInformation($"[Received MediaSample] SequenceNum: {requestSeqNum}");

                // Retrieve the sample content
                ReadOnlyMemory <byte> content = null;
                var inputSample = requestMessage.MediaSample;

                switch (inputSample.ContentCase)
                {
                case MediaSample.ContentOneofCase.ContentReference:

                    content = clientState.MemoryMappedFile.Memory.Slice(
                        (int)inputSample.ContentReference.AddressOffset,
                        (int)inputSample.ContentReference.LengthBytes);

                    break;

                case MediaSample.ContentOneofCase.ContentBytes:
                    content = inputSample.ContentBytes.Bytes.ToByteArray();
                    break;
                }

                var mediaStreamMessageResponse = new MediaStreamMessage()
                {
                    SequenceNumber    = ++responseSeqNum,
                    AckSequenceNumber = requestSeqNum
                };

                imageBatch.Add(GetImageFromContent(content, width, height));

                // If batch size hasn't been reached
                if (messageCount < _batchSize)
                {
                    // Return acknowledge message
                    mediaStreamMessageResponse.MediaSample = new MediaSample();
                    await responseStream.WriteAsync(mediaStreamMessageResponse);

                    messageCount++;
                    continue;
                }

                foreach (var inference in inputSample.Inferences)
                {
                    NormalizeInference(inference);
                }

                // Process images
                var inferencesResponse  = clientState.Processor.ProcessImages(imageBatch);
                var mediaSampleResponse = new MediaSample()
                {
                    Inferences = { inferencesResponse }
                };

                mediaStreamMessageResponse.MediaSample = mediaSampleResponse;

                await responseStream.WriteAsync(mediaStreamMessageResponse);

                imageBatch.Clear();
                messageCount = 1;
            }

            clientState.Dispose();
        }
Ejemplo n.º 24
0
        /// <summary>
        /// Pull from all our input pins, then combine, then subtract
        /// </summary>
        /// <param name="tsElapsed"></param>
        void DoPushPull(TimeSpan tsElapsed)
        {
            lock (PushPullLock)
            {
                PushPullObject[] members = null;
                lock (MemberLock)
                {
                    members = Members.ToArray();
                }

                if (members.Length <= 0)
                {
                    return;
                }

                Dictionary <IAudioSource, short[]> InputSamples = new Dictionary <IAudioSource, short[]>();

                /// Convert our short data to int so we don't during addition
                // int[] combinedint = Utils.MakeIntArrayFromShortArray(sInitialData);
                int[] combinedint = new int[AudioFormat.CalculateNumberOfSamplesForDuration(tsElapsed)];


                ///Sum the input data from all our input sources, storing the data for each source so we can subtract it when sending
                foreach (PushPullObject nextobj in members)
                {
                    if (nextobj.AudioSource == null)
                    {
                        continue;
                    }

                    // Always pull data from a source even if it's not active, because some just queue their buffers
                    MediaSample sample = nextobj.AudioSource.PullSample(AudioFormat, tsElapsed);
                    if (sample == null)
                    {
                        continue;
                    }

                    if (nextobj.AudioSource.IsSourceActive == false)
                    {
                        continue;
                    }

                    short[] sData = sample.GetShortData();

                    /// Amplify our data if told to
                    if (nextobj.AudioSource.SourceAmplitudeMultiplier != 1.0f)
                    {
                        for (int i = 0; i < sData.Length; i++)
                        {
                            sData[i] = (short)(nextobj.AudioSource.SourceAmplitudeMultiplier * sData[i]);
                        }
                    }

                    InputSamples.Add(nextobj.AudioSource, sData);

                    Utils.SumArrays(combinedint, sData);
                }

                /// Push data to all our output filters, subtracting the data this member supplied
                foreach (PushPullObject nextobj in members)
                {
                    if (nextobj.AudioSink == null)
                    {
                        continue;
                    }

                    if (nextobj.AudioSink.IsSinkActive == false)
                    {
                        continue;
                    }

                    /// copy the summed data so we don't mangle it for the next client
                    int[] nCopy = new int[combinedint.Length];
                    Array.Copy(combinedint, nCopy, nCopy.Length);

                    foreach (IAudioSource excludesource in nextobj.SourceExcludeList)
                    {
                        if (InputSamples.ContainsKey(excludesource) == true)  // If we are in the dictionary, we are not muted, so no need to subtract
                        {
                            short[] sData = InputSamples[excludesource];
                            Utils.SubtractArray(nCopy, sData);
                        }
                    }


                    /// Amplify our data if told to
                    if (nextobj.AudioSink.SinkAmplitudeMultiplier != 1.0f)
                    {
                        for (int i = 0; i < nCopy.Length; i++)
                        {
                            nCopy[i] = (int)(nextobj.AudioSink.SinkAmplitudeMultiplier * nCopy[i]);
                        }
                    }


                    //short[] sOutput = Utils.MakeShortArrayFromIntArray(nCopy);
                    short[] sOutput = Utils.AGCAndShortArray(nCopy, short.MaxValue);


                    MediaSample outputsample = new MediaSample(sOutput, AudioFormat);
                    nextobj.AudioSink.PushSample(outputsample, this);
                }
            }
        }
Ejemplo n.º 25
0
        static int Main(string[] args)
        {
            Library.Initialize();

            // Set license information. To run AVBlocks in demo mode, comment the next line out
            // Library.SetLicense("<license-string>");

            string    inputFile          = Path.Combine(ExeDir, @"..\assets\mov\big_buck_bunny_trailer_iphone.m4v");
            string    outputFile         = "overlay_yuv_jpegs.mp4";
            const int imageOverlayFrames = 250;
            string    imageOverlayFiles  = Path.Combine(ExeDir, @"..\assets\overlay\cube\cube{0:d4} (128x96).jpg");

            VideoStreamInfo uncompressedVideo = null;

            using (var info = new MediaInfo())
            {
                info.Inputs[0].File = inputFile;

                if (!info.Open())
                {
                    PrintError("load info", info.Error);
                    return((int)ExitCodes.Error);
                }

                foreach (var socket in info.Outputs)
                {
                    foreach (var pin in socket.Pins)
                    {
                        StreamInfo si = pin.StreamInfo;
                        if (si.MediaType == MediaType.Video)
                        {
                            uncompressedVideo = si.Clone() as VideoStreamInfo;
                            break;
                        }
                    }
                }
            }

            uncompressedVideo.StreamType  = StreamType.UncompressedVideo;
            uncompressedVideo.ColorFormat = ColorFormat.YUV420;

            var outputVideo = (VideoStreamInfo)uncompressedVideo.Clone();

            outputVideo.StreamType = StreamType.H264;

            try { System.IO.File.Delete(outputFile); }
            catch { }

            int decodedSamples   = 0;
            int overlayedSamples = 0;
            int encodedSamples   = 0;

            using (var overlay = new Overlay())
                using (var decoder = CreateDecoder(inputFile))
                    using (var encoder = CreateEncoder(uncompressedVideo, outputVideo, outputFile))
                    {
                        if (!decoder.Open())
                        {
                            PrintError("decoder open", decoder.Error);
                            return((int)ExitCodes.DecoderError);
                        }

                        if (!encoder.Open())
                        {
                            PrintError("encoder open", encoder.Error);
                            return((int)ExitCodes.EncoderError);
                        }

                        int outputIndex;
                        var decodedSample   = new MediaSample();
                        var overlayedSample = new MediaSample();

                        while (true)
                        {
                            if (!decoder.Pull(out outputIndex, decodedSample))
                            {
                                PrintError("decoder pull", decoder.Error);
                                break;
                            }
                            ++decodedSamples;

                            var imageOverlayFile = string.Format(imageOverlayFiles, overlayedSamples % imageOverlayFrames);

                            overlay.Close();
                            if (!overlay.Open(imageOverlayFile, StreamType.Jpeg, uncompressedVideo))
                            {
                                PrintError("overlay open", overlay.Error);
                                break;
                            }

                            if (!overlay.Push(0, decodedSample))
                            {
                                PrintError("overlay push", overlay.Error);
                                break;
                            }

                            if (!overlay.Pull(out outputIndex, overlayedSample))
                            {
                                PrintError("overlay pull", overlay.Error);
                                break;
                            }
                            ++overlayedSamples;


                            if (!encoder.Push(0, overlayedSample))
                            {
                                PrintError("encoder push", encoder.Error);
                                break;
                            }
                            ++encodedSamples;
                        }
                        ;

                        decoder.Close();
                        overlay.Close();
                        encoder.Flush();
                        encoder.Close();
                    }

            Console.WriteLine("samples decoded/overlayed/encoded: {0}/{1}/{2}",
                              decodedSamples, overlayedSamples, encodedSamples);

            bool success = (decodedSamples > 0 && decodedSamples == encodedSamples);

            if (success)
            {
                Console.WriteLine("output file: {0}", Path.GetFullPath(outputFile));
            }

            Library.Shutdown();

            return(success ? (int)ExitCodes.Success : (int)ExitCodes.Error);
        }
Ejemplo n.º 26
0
 public bool Pull(out int outputIndex, MediaSample outputSample)
 {
     return(t.Pull(out outputIndex, outputSample));
 }
        private bool EncodeAndMux(Transcoder encoder, MediaSample inputSample, int muxIndex)
        {
            int         outputIndex;
            MediaSample outputSample = new MediaSample();
            bool        res;

            string enc = (encoder == _audioEncoder ? "audio" : "video");

            while (true)     // encode and mux until the inputSample is fully consumed
            {
                while (true) // pull as much as possible from encoder and mux it

                {
                    Trace.Write(string.Format("{0}, pull, ", enc));

                    res = encoder.Pull(out outputIndex, outputSample);

                    if (res)
                    {
                        Trace.WriteLine(string.Format("{0}", outputSample.Buffer.DataSize));

                        if (_audioLog != null && encoder == _audioEncoder)
                        {
                            AppendSampleToFile(_audioLog, outputSample);
                        }

                        lock (_mux) {
                            Trace.Write(string.Format("{0}, mux, ", enc));

                            res = _mux.Push(muxIndex, outputSample);

                            if (!res)
                            {
                                _error = _mux.Error;
                                Trace.WriteLine(string.Format("Error {0} {1}", _error.Message, _error.Hint));
                                return(false);
                            }

                            Trace.WriteLine("success");
                        }
                    }
                    else
                    {
                        if (IsErrorInputNeeded(encoder.Error))
                        {
                            System.Diagnostics.Trace.WriteLine("InputNeeded");

                            break;
                        }

                        _error = encoder.Error;
                        System.Diagnostics.Trace.WriteLine(
                            string.Format("Error {0} {1}", _error.Message, _error.Hint));
                        return(false);
                    }
                }

                if (Util.IsSampleEmpty(inputSample))
                {
                    break;
                }

                System.Diagnostics.Trace.Write(
                    string.Format("{0}, push {1}, ", enc, inputSample.Buffer.DataSize));

                res = inputSample.UnmanagedBuffer != null?
                      encoder.PushUnmanaged(0, inputSample) :
                          encoder.Push(0, inputSample);

                if (res)
                {
                    // the input sample is fully or partially consumed
                    System.Diagnostics.Trace.WriteLine(
                        string.Format("success:{0}", inputSample.Buffer.DataSize));

                    if (inputSample.Buffer.DataSize == 0)
                    {
                        break;
                    }
                }
                else if (IsErrorInputFull(encoder.Error))
                {
                    // cannot take more input
                    Trace.WriteLine(string.Format("BufferFull:{0}", inputSample.Buffer.DataSize));
                }
                else
                {
                    _error = encoder.Error;
                    Trace.WriteLine(string.Format("Error {0} {1}", _error.Message, _error.Hint));
                    return(false);
                }
            }
            return(true);
        }
Ejemplo n.º 28
0
        public bool NextAudioBuffer(byte[] buffer, ref int length)
        {
            if (_cancellationPending)
            {
                length = 0;
                return(false);
            }

            lock (_csAVQueue)
            {
                int bytesWritten = 0;

                while ((_audioQueue.Count > 0) && (bytesWritten < buffer.Length))
                {
                    MediaSample mediaSample = _audioQueue.First.Value;

                    if (_unmanaged)
                    {
                        UnmanagedMediaBuffer mediaBuffer = mediaSample.UnmanagedBuffer;

                        int chunk = Math.Min(mediaBuffer.DataSize, buffer.Length - bytesWritten);
                        Marshal.Copy(mediaBuffer.DataPtr, buffer, bytesWritten, chunk);

                        bytesWritten += chunk;
                        mediaBuffer.Remove(chunk);

                        if (mediaBuffer.DataSize == 0)
                        {
                            mediaBuffer.Release();
                            _audioQueue.RemoveFirst();
                        }
                    }
                    else
                    {
                        MediaBuffer mediaBuffer = mediaSample.Buffer;

                        int chunk = Math.Min(mediaBuffer.DataSize, buffer.Length - bytesWritten);
                        Array.Copy(mediaBuffer.Start, mediaBuffer.DataOffset, buffer, bytesWritten, chunk);

                        bytesWritten += chunk;

                        {
                            int newDataOffset = mediaBuffer.DataOffset + chunk;
                            int newDataSize   = mediaBuffer.DataSize - chunk;
                            if (0 == newDataSize)
                            {
                                newDataOffset = 0;
                            }

                            mediaBuffer.SetData(newDataOffset, newDataSize);
                        }

                        if (mediaBuffer.DataSize == 0)
                        {
                            _audioQueue.RemoveFirst();
                        }
                    }
                }

                length = bytesWritten;
            }

            return(true);
        }
        public int SampleCB(double SampleTime, IMediaSample pSample)
        {
            if (!bProcess)
            {
                lastSampleTime = SampleTime;
                return(WinAPI.E_FAIL);
            }

            // internal stats
            ++sampleIndex;
            long tStart, tEnd;

            pSample.GetMediaTime(out tStart, out tEnd);
            Debug.Assert(tStart < tEnd);
            Debug.Assert(tStart > lastMediaTime);
            sampleProcessed += tEnd - tStart;
            sampleDropped   += tStart - lastMediaTime - 1;
            lastMediaTime    = tEnd - 1;

            int    dataLen = pSample.GetActualDataLength();
            IntPtr bufPtr;
            int    hr = pSample.GetPointer(out bufPtr);

            Debug.Assert(0 == hr);

            // BEGIN TRACE

            int bufSize = pSample.GetSize();

            long timeStart, timeEnd;

            pSample.GetTime(out timeStart, out timeEnd);

            string msg = string.Format(
                "SampleCB ({0}) {1}, sampleTime:{2} datalen:{3} bufsize:{4} mediaTime:{5}-{6} time:{7}-{8}",
                name, sampleIndex, SampleTime, dataLen, bufSize, tStart, tEnd, timeStart, timeEnd);

            Trace.WriteLine(msg);

            if (tStart - lastMediaTime - 1 > 0)
            {
                msg = string.Format("!!! Frame drop: {0}", tStart - lastMediaTime - 1 > 0);
                Trace.WriteLine(msg);
            }

            //END TRACE

            byte[] buf = new byte[dataLen];
            Marshal.Copy(bufPtr, buf, 0, dataLen);

            if (file != null)
            {
                file.Write(buf, 0, dataLen);
            }

            //DBG - simulate encoding error
            //if (sampleIndex > 100)
            //    goto STOP_CAPTURE;

            if (mediaState != null && mediaState.mpeg2Enc != null)
            {
                PrimoSoftware.AVBlocks.Transcoder enc = mediaState.mpeg2Enc;
                MediaSample inputSample = new MediaSample();
                inputSample.Buffer    = new MediaBuffer(buf);
                inputSample.StartTime = Math.Max(SampleTime, 0);
                //TODO: end time

                try
                {
                    bool pushed = false;

                    // transcoder.Push() is not threads safe.
                    // lock (enc){ } ensure that only one thread is calling transcoder.Push()
                    lock (enc)
                    {
                        pushed = enc.Push(StreamNumber, inputSample);
                    }

                    if (pushed)
                    {
                        return(0);
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Trace.WriteLine(ex.ToString());
                }

                Trace.WriteLine("PushSample FAILED");
            }

            //STOP_CAPTURE:

            Trace.WriteLine("SampleCB: Before Post STOP_CAPTURE");
            WinAPI.PostMessage(MainWindow, Util.WM_STOP_CAPTURE, new IntPtr(streamNumber), IntPtr.Zero);
            Trace.WriteLine("SampleCB: After Post STOP_CAPTURE");
            bProcess = false;
            return(WinAPI.E_FAIL);
        } // end of SampleCB
Ejemplo n.º 30
0
        static bool DecodeH264Stream(Options opt)
        {
            // Create an input socket from file
            MediaSocket inSocket = new MediaSocket();

            inSocket.File = opt.InputFile;

            // Create an output socket with one YUV 4:2:0 video pin
            VideoStreamInfo outStreamInfo = new VideoStreamInfo();

            outStreamInfo.StreamType  = StreamType.UncompressedVideo;
            outStreamInfo.ColorFormat = ColorFormat.YUV420;
            outStreamInfo.ScanType    = ScanType.Progressive;

            MediaPin outPin = new MediaPin();

            outPin.StreamInfo = outStreamInfo;

            MediaSocket outSocket = new MediaSocket();

            outSocket.StreamType = StreamType.UncompressedVideo;

            outSocket.Pins.Add(outPin);

            // Create Transcoder
            using (var transcoder = new Transcoder())
            {
                transcoder.AllowDemoMode = true;
                transcoder.Inputs.Add(inSocket);
                transcoder.Outputs.Add(outSocket);

                if (transcoder.Open())
                {
                    DeleteFile(opt.OutputFile);

                    int         inputIndex;
                    MediaSample yuvFrame = new MediaSample();

                    int frameCounter = 0;

                    using (System.IO.FileStream outfile = System.IO.File.OpenWrite(opt.OutputFile))
                    {
                        while (transcoder.Pull(out inputIndex, yuvFrame))
                        {
                            // Each call to Transcoder::pull returns a raw YUV 4:2:0 frame.
                            outfile.Write(yuvFrame.Buffer.Start, yuvFrame.Buffer.DataOffset, yuvFrame.Buffer.DataSize);
                            ++frameCounter;
                        }

                        PrintError("Transcoder pull", transcoder.Error);

                        Console.WriteLine("Frames decoded: {0}", frameCounter);
                        Console.WriteLine("Output file: {0}", opt.OutputFile);


                        outfile.Close();
                    }

                    transcoder.Close();
                    return(true);
                }

                PrintError("Transcoder open", transcoder.Error);

                return(false);
            }
        }
 private void AppendSampleToFile(System.IO.FileStream fileStream, MediaSample sample)
 {
     fileStream.Write(sample.Buffer.Start,
                      sample.Buffer.DataOffset,
                      sample.Buffer.DataSize);
 }