Example #1
0
 /// <summary>
 /// Set decoding state as "needed to be continued"
 /// </summary>
 public void ContinueDecoding()
 {
     _decoderState = DecoderState.Continue;
 }
Example #2
0
		/// <summary>
		/// Set decoding state as "needed to be continued"
		/// </summary>
		public void ContinueDecoding() 
		{
			_decoderState = DecoderState.Continue;
		}
 /// <summary>
 /// Fired when channel is active.
 /// </summary>
 /// <param name="context">The context.</param>
 public override void ChannelActive(IChannelHandlerContext context)
 {
     _remoteAddress = context.Channel.RemoteAddress.ToString();
     State = DecoderState.ReSync;
     base.ChannelActive(context);
 }
Example #4
0
        IEnumerator initDecoderAsync(string path)
        {
            print(LOG_TAG + " init Decoder.");
            decoderState = DecoderState.INITIALIZING;

            mediaPath = path;
            decoderID = -1;
            nativeCreateDecoderAsync(mediaPath, ref decoderID);

            int result = 0;

            do
            {
                yield return(null);

                result = nativeGetDecoderState(decoderID);
            } while (!(result == 1 || result == -1));

            //  Init success.
            if (result == 1)
            {
                print(LOG_TAG + " Init success.");
                isVideoEnabled = nativeIsVideoEnabled(decoderID);
                if (isVideoEnabled)
                {
                    float duration = 0.0f;
                    nativeGetVideoFormat(decoderID, ref videoWidth, ref videoHeight, ref duration);
                    videoTotalTime = duration > 0 ? duration : -1.0f;
                    print(LOG_TAG + " Video format: (" + videoWidth + ", " + videoHeight + ")");
                    print(LOG_TAG + " Total time: " + videoTotalTime);

                    setTextures(null, null, null);
                    useDefault = true;
                }

                //	Initialize audio.
                isAudioEnabled = nativeIsAudioEnabled(decoderID);
                print(LOG_TAG + " isAudioEnabled = " + isAudioEnabled);
                if (isAudioEnabled)
                {
                    if (isAllAudioChEnabled)
                    {
                        nativeSetAudioAllChDataEnable(decoderID, isAllAudioChEnabled);
                        getAudioFormat();
                    }
                    else
                    {
                        getAudioFormat();
                        initAudioSource();
                    }
                }

                decoderState = DecoderState.INITIALIZED;

                if (onInitComplete != null)
                {
                    onInitComplete.Invoke();
                }
            }
            else
            {
                print(LOG_TAG + " Init fail.");
                decoderState = DecoderState.INIT_FAIL;
            }
        }
Example #5
0
        //  Video progress is triggered using Update. Progress time would be set by nativeSetVideoTime.
        void Update()
        {
            switch (decoderState)
            {
            case DecoderState.START:
                if (isVideoEnabled)
                {
                    //  Prevent empty texture generate green screen.(default 0,0,0 in YUV which is green in RGB)
                    if (useDefault && nativeIsContentReady(decoderID))
                    {
                        getTextureFromNative();
                        setTextures(videoTexYch, videoTexUch, videoTexVch);
                        useDefault = false;
                    }

                    //	Update video frame by dspTime.
                    double setTime = AudioSettings.dspTime - globalStartTime;

                    //	Normal update frame.
                    if (setTime < videoTotalTime || videoTotalTime == -1.0f)
                    {
                        if (seekPreview && nativeIsContentReady(decoderID))
                        {
                            setPause();
                            seekPreview = false;
                            unmute();
                        }
                        else
                        {
                            nativeSetVideoTime(decoderID, (float)setTime);
                            GL.IssuePluginEvent(GetRenderEventFunc(), decoderID);
                        }
                    }
                    else
                    {
                        isVideoReadyToReplay = true;
                    }
                }

                if (nativeIsVideoBufferEmpty(decoderID) && !nativeIsEOF(decoderID))
                {
                    decoderState = DecoderState.BUFFERING;
                    hangTime     = AudioSettings.dspTime - globalStartTime;
                }

                break;

            case DecoderState.SEEK_FRAME:
                if (nativeIsSeekOver(decoderID))
                {
                    globalStartTime = AudioSettings.dspTime - hangTime;
                    decoderState    = DecoderState.START;
                    if (lastState == DecoderState.PAUSE)
                    {
                        seekPreview = true;
                        mute();
                    }
                }
                break;

            case DecoderState.BUFFERING:
                if (nativeIsVideoBufferFull(decoderID) || nativeIsEOF(decoderID))
                {
                    decoderState    = DecoderState.START;
                    globalStartTime = AudioSettings.dspTime - hangTime;
                }
                break;

            case DecoderState.PAUSE:
            case DecoderState.EOF:
            default:
                break;
            }

            if (isVideoEnabled || isAudioEnabled)
            {
                if ((!isVideoEnabled || isVideoReadyToReplay) && (!isAudioEnabled || isAudioReadyToReplay))
                {
                    decoderState         = DecoderState.EOF;
                    isVideoReadyToReplay = isAudioReadyToReplay = false;

                    if (onVideoEnd != null)
                    {
                        onVideoEnd.Invoke();
                    }
                }
            }
        }
Example #6
0
        private static void DecodeAdpcmImaWav(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // reference implementations:
            // https://wiki.multimedia.cx/index.php/IMA_ADPCM
            // https://github.com/Nanook/TheGHOST/blob/master/ImaAdpcmPlugin/Ima.cs
            // https://github.com/rochars/imaadpcm/blob/master/index.js

            DecoderState state = decoder.State;

            AdpcmImaWavChannel[] channel = new AdpcmImaWavChannel[2];
            int  nibbles  = 0;
            bool isStereo = decoder.AudioFormat.Channels == 2 ? true : false;

            // https://www.microchip.com/forums/m698891.aspx
            // Each block starts with a header consisting of the following 4 bytes:
            //  16 bit audio sample (2 bytes, little endian)
            //   8 bit step table index
            //   dummy byte (set to zero)
            channel[0].Predictor = reader.ReadInt16();
            channel[0].StepIndex = reader.ReadByte();
            channel[0].StepIndex = Clamp(channel[0].StepIndex, 0, 88);
            reader.ReadByte();

            if (isStereo)
            {
                channel[1].Predictor = reader.ReadInt16();
                channel[1].StepIndex = reader.ReadByte();
                channel[1].StepIndex = Clamp(channel[1].StepIndex, 0, 88);
                reader.ReadByte();
            }

            // Note that we encode two samples per byte,
            // but there are an odd number samples per block.
            // One of the samples is in the ADPCM block header.
            // So, a block looks like this:

            // Example: BlockAlign 2048, SamplesPerBlock 4089
            // 4 bytes, Block header including 1 sample
            // 2048-4 = 2044 bytes with 4089-1 = 4088 samples
            // Total of 4089 samples per block.

            // Example: BlockAlign 512, SamplesPerBlock 505
            // 4 bytes, Block header including 1 sample
            // 512-4 = 508 bytes with 505-1 = 504 samples
            // Total of 505 samples per block.

            if (isStereo)
            {
                int     offset = 0;
                short[] sample = new short[2 * (state.BlockAlign - 8)];
                for (nibbles = 2 * (state.BlockAlign - 8);
                     nibbles > 0;
                     nibbles -= 16)
                {
                    try
                    {
                        for (int i = 0; i < 4; i++)
                        {
                            byte buffer = reader.ReadByte();
                            sample[offset + i * 4 + 0] = AdpcmImaWavExpandNibble(ref channel[0], buffer & 0x0f);
                            sample[offset + i * 4 + 2] = AdpcmImaWavExpandNibble(ref channel[0], buffer >> 4);
                        }

                        for (int i = 0; i < 4; i++)
                        {
                            byte buffer = reader.ReadByte();
                            sample[offset + i * 4 + 1] = AdpcmImaWavExpandNibble(ref channel[1], buffer & 0x0f);
                            sample[offset + i * 4 + 3] = AdpcmImaWavExpandNibble(ref channel[1], buffer >> 4);
                        }
                    }
                    catch (System.IO.EndOfStreamException)
                    {
                        Log.Verbose("DecodeAdpcmImaWav: Reached end of stream - returning.");
                        break;
                    }

                    offset += 16;
                }

                for (int i = 0; i < sample.Length; i++)
                {
                    writer.Write(sample[i]);
                }
            }
            else
            {
                for (nibbles = 2 * (state.BlockAlign - 4);
                     nibbles > 0;
                     nibbles -= 2)
                {
                    try
                    {
                        byte buffer = reader.ReadByte();
                        writer.Write(AdpcmImaWavExpandNibble(ref channel[0], (buffer) & 0x0f));
                        writer.Write(AdpcmImaWavExpandNibble(ref channel[0], (buffer) >> 4));
                    }
                    catch (System.IO.EndOfStreamException)
                    {
                        Log.Verbose("DecodeAdpcmImaWav: Reached end of stream - returning.");
                        break;
                    }
                }
            }
        }
Example #7
0
        /*
         * EA ADPCM
         */
        private static void DecodeAdpcmEA(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            int[] EATable =
            {
                0x0000, 0x00F0, 0x01CC, 0x0188, 0x0000, 0x0000, 0xFF30, 0xFF24,
                0x0000, 0x0001, 0x0003, 0x0004, 0x0007, 0x0008, 0x000A, 0x000B,
                0x0000, 0xFFFF, 0xFFFD, 0xFFFC
            };

            DecoderState state = decoder.State;

            int[] c1 = new int[2];
            int[] c2 = new int[2];
            int[] d  = new int[2];

            int channels = decoder.AudioFormat.Channels;

            short[] prev = state.Prev;
            int[]   cur  = new int[prev.Length + channels];

            for (int c = 0; c < channels; c++)
            {
                byte input = reader.ReadByte();

                c1[c] = EATable[input >> 4];
                c2[c] = EATable[(input >> 4) + 4];
                d[c]  = (input & 0xf) + 8;
            }

            for (int i = 0; i < state.BlockAlign; i += channels)
            {
                AdpcmEASpl spl = new AdpcmEASpl();

                for (int c = 0; c < channels; c++)
                {
                    byte buffer = reader.ReadByte();
                    spl.u   = (uint)((buffer & 0xf0u) << 24);
                    spl.i >>= d[c];
                    spl.i   = (spl.i + cur[c] * c1[c] + prev[c] * c2[c] + 0x80) >> 8;

                    // Clamp result to 16-bit, -32768 - 32767
                    spl.i = Clamp(spl.i, short.MinValue, short.MaxValue);

                    prev[c] = (short)cur[c];
                    cur[c]  = spl.i;

                    writer.Write((short)spl.i);
                }

                for (int c = 0; c < channels; c++)
                {
                    byte buffer = reader.ReadByte();
                    spl.u   = (uint)(buffer & 0x0fu) << 28;
                    spl.i >>= d[c];
                    spl.i   = (spl.i + cur[c] * c1[c] + prev[c] * c2[c] + 0x80) >> 8;

                    // Clamp result to 16-bit, -32768 - 32767
                    spl.i = Clamp(spl.i, short.MinValue, short.MaxValue);

                    prev[c] = (short)cur[c];
                    cur[c]  = spl.i;

                    writer.Write((short)spl.i);
                }
            }
        }
        private void decodingLoop(CancellationToken cancellationToken)
        {
            var packet = ffmpeg.av_packet_alloc();

            const int max_pending_frames = 3;

            try
            {
                while (true)
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        return;
                    }

                    if (decodedFrames.Count < max_pending_frames)
                    {
                        int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

                        if (readFrameResult >= 0)
                        {
                            State = DecoderState.Running;

                            if (packet->stream_index == stream->index)
                            {
                                int sendPacketResult = ffmpeg.avcodec_send_packet(stream->codec, packet);

                                if (sendPacketResult == 0)
                                {
                                    AVFrame *frame    = ffmpeg.av_frame_alloc();
                                    AVFrame *outFrame = null;

                                    var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                                    if (result == 0)
                                    {
                                        var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                                        if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                                        {
                                            skipOutputUntilTime = null;

                                            if (convert)
                                            {
                                                outFrame         = ffmpeg.av_frame_alloc();
                                                outFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
                                                outFrame->width  = stream->codec->width;
                                                outFrame->height = stream->codec->height;

                                                var ret = ffmpeg.av_frame_get_buffer(outFrame, 32);
                                                if (ret < 0)
                                                {
                                                    throw new InvalidOperationException($"Error allocating video frame: {getErrorMessage(ret)}");
                                                }

                                                ffmpeg.sws_scale(convCtx, frame->data, frame->linesize, 0, stream->codec->height,
                                                                 outFrame->data, outFrame->linesize);
                                            }
                                            else
                                            {
                                                outFrame = frame;
                                            }

                                            if (!availableTextures.TryDequeue(out var tex))
                                            {
                                                tex = new Texture(new VideoTexture(codecParams.width, codecParams.height));
                                            }

                                            var upload = new VideoTextureUpload(outFrame, ffmpeg.av_frame_free);

                                            tex.SetData(upload);
                                            decodedFrames.Enqueue(new DecodedFrame {
                                                Time = frameTime, Texture = tex
                                            });
                                        }

                                        lastDecodedFrameTime = (float)frameTime;
                                    }

                                    // There are two cases: outFrame could be null in which case the above decode hasn't run, or the outFrame doesn't match the input frame,
                                    // in which case it won't be automatically freed by the texture upload. In both cases we need to free the input frame.
                                    if (outFrame != frame)
                                    {
                                        ffmpeg.av_frame_free(&frame);
                                    }
                                }
                                else
                                {
                                    Logger.Log($"Error {sendPacketResult} sending packet in VideoDecoder");
                                }
                            }

                            ffmpeg.av_packet_unref(packet);
                        }
                        else if (readFrameResult == AGffmpeg.AVERROR_EOF)
                        {
                            if (Looping)
                            {
                                Seek(0);
                            }
                            else
                            {
                                State = DecoderState.EndOfStream;
                            }
                        }
                        else
                        {
                            State = DecoderState.Ready;
                            Thread.Sleep(1);
                        }
                    }
                    else
                    {
                        // wait until existing buffers are consumed.
                        State = DecoderState.Ready;
                        Thread.Sleep(1);
                    }

                    while (!decoderCommands.IsEmpty)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            return;
                        }

                        if (decoderCommands.TryDequeue(out var cmd))
                        {
                            cmd();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Logger.Log($"VideoDecoder faulted: {e}");
                State = DecoderState.Faulted;
            }
            finally
            {
                ffmpeg.av_packet_free(&packet);

                if (State != DecoderState.Faulted)
                {
                    State = DecoderState.Stopped;
                }
            }
        }
        /// <summary>
        /// Циклический декодер буфера данных
        /// </summary>
        /// <param name="buf">буфер данных</param>
        /// <param name="bufSize">размер данных(в байтах)</param>
        public override void Decode(byte[] buf, int bufSize)
        {
            _posCurByte = 0;
            if (_writeDecLog && (_decLogStream != null))
            {
                _decLogStream.Write(buf, 0, bufSize);
            }

            while (_posCurByte < bufSize)
            {
                _curByte = buf[_posCurByte];
                switch (_state)
                {
                    case DecoderState.s5E:
                        if (0x5E != _curByte)
                        {
                            if (_finishFrame)
                            {
                                _finishFrame = false;
                                OnErrorFrame(buf, _posCurByte, bufSize, Resource.Get("eMissing5E"));
                            }

                            Reset();
                        }
                        else
                        {
                            _state = DecoderState.s4D;
                        }

                        break;
                    case DecoderState.s4D:
                        if (0x4D != _curByte)
                        {
                            OnErrorFrame(buf, _posCurByte, bufSize, Resource.Get("eMissing4D"));
                            Reset();
                        }
                        else
                        {
                            _state = DecoderState.sADDR;
                        }

                        break;
                    case DecoderState.sADDR:
                        _package.Addr = _curByte;
                        _state = DecoderState.sNBH;
                        break;
                    case DecoderState.sNBH:
                        _lenMsg = (uint)(_curByte << 8);
                        _state = DecoderState.sNBL;
                        break;
                    case DecoderState.sNBL:
                        _lenMsg += _curByte;
                        _state = DecoderState.sMSG;
                        break;
                    case DecoderState.sMSG:
                        _package.Data[_posMsg++] = _curByte;
                        if (_posMsg == _lenMsg)
                        {
                            _package.DataLen = (int)_posMsg;
                            _state = DecoderState.sCRC;
                        }

                        break;
                    case DecoderState.sCRC:
                        if (_curCRC8 != _curByte)
                        {
                            OnErrorFrame(buf, _posCurByte, bufSize, Resource.Get("eBadCRC"));
                        }
                        else
                        {
                            OnProtocolMsg(_package);
                            _finishFrame = true;
                        }

                        Reset();
                        break;
                }

                if (DecoderState.s5E != _state)
                {
                    _curCRC8 = _crc8Table[_curCRC8 ^ _curByte];
                }

                _posCurByte++;
            }
        }
Example #10
0
 /// <summary>
 /// Default constructor. Initializes internal _state machine.
 /// </summary>
 public EventStreamDecoder()
 {
     _workingBuffer = new byte[EventStreamMessage.PreludeLen];
     _stateFns      = new ProcessRead[] { Start, ReadPrelude, ProcessPrelude, ReadMessage, Error };
     _state         = DecoderState.Start;
 }
        /// <summary>
        /// Циклический декодер буфера данных.
        /// </summary>
        /// <param name="buf">Буфер данных.</param>
        /// <param name="bufSize">Размер данных(в байтах).</param>
        public override void Decode(byte[] buf, int bufSize)
        {
            _posCurByte = 0;
            if (_writeDecLog && (_decLogStream != null))
            {
                _decLogStream.Write(buf, 0, bufSize);
            }

            while (_posCurByte < bufSize)
            {
                _curByte = buf[_posCurByte];
                switch (_state)
                {
                    case DecoderState.s5E:
                        if (0x5E != _curByte)
                        {
                            if (_finishFrame)
                            {
                                _finishFrame = false;
                                OnErrorFrame(buf, _posCurByte, bufSize, "После сообщения не встретился 0x5E");
                            }

                            Reset();
                        }
                        else
                        {
                            _state = DecoderState.s4D;
                        }

                        break;
                    case DecoderState.s4D:
                        if (0x4D != _curByte)
                        {
                            OnErrorFrame(buf, _posCurByte, bufSize, "После 0x5E отсутствует 0x4D");
                            Reset();
                        }
                        else
                        {
                            _state = DecoderState.sADDR;
                        }

                        break;
                    case DecoderState.sADDR:
                        _package.Addr = _curByte;
                        _state = DecoderState.sNBH;
                        break;
                    case DecoderState.sNBH:
                        _lenMsg = (uint)(_curByte << 8);
                        _state = DecoderState.sNBL;
                        break;
                    case DecoderState.sNBL:
                        _lenMsg += _curByte;
                        _state = DecoderState.sCRCH;
                        break;
                    case DecoderState.sCRCH:
                        _state = DecoderState.sMSG;
                        if (_curCRC8 != _curByte)
                        {
                            OnErrorFrame(buf, _posCurByte, bufSize, "CRC8 заголовка расчитан неверно");
                            Reset();
                        }

                        break;
                    case DecoderState.sMSG:
                        _package.Data[_posMsg++] = _curByte;
                        if (_posMsg == _lenMsg)
                        {
                            _package.DataLen = (int)_posMsg;
                            _state = DecoderState.sCRC;
                        }

                        break;
                    case DecoderState.sCRC:
                        if (_curCRC8 != _curByte)
                        {
                            OnErrorFrame(buf, _posCurByte, bufSize, "CRC8 кадра расчитан неверно");
                        }
                        else
                        {
                            OnProtocolMsg(_package);
                            _finishFrame = true;
                        }

                        Reset();
                        break;
                }

                if (DecoderState.s5E != _state)
                {
                    _curCRC8 = _crc8Table[_curCRC8 ^ _curByte];
                }

                _posCurByte++;
            }
        }
Example #12
0
        private void DecodeAudio(UInt32 session, DecoderState decoderState, byte[] compressedAudio,
                                 byte[] posData, long sequence, bool isLast)
        {
            // We tell the decoded buffer to re-evaluate whether it needs to store
            // a few packets if the previous packet was marked last, or if there
            // was an abrupt change in sequence number
            bool reevaluateInitialBuffer = decoderState.WasPrevPacketMarkedLast;

            // Account for missing packets, out-of-order packets, & abrupt sequence changes
            if (decoderState.NextSequenceToDecode != 0)
            {
                long seqDiff = sequence - decoderState.NextSequenceToDecode;

                // If new packet is VERY late, then the sequence number has probably reset
                if (seqDiff < -MaxMissingPackets)
                {
                    Debug.Log("Sequence has possibly reset diff = " + seqDiff);
                    decoderState.Decoder.ResetState();
                    reevaluateInitialBuffer = true;
                }
                // If the packet came before we were expecting it to, but after the last packet, the sampling has probably changed
                // unless the packet is a last packet (in which case the sequence may have only increased by 1)
                else if (sequence > decoderState.LastReceivedSequence && seqDiff < 0 && !isLast)
                {
                    Debug.Log("Mumble sample rate may have changed");
                }
                // If the sequence number changes abruptly (which happens with push to talk)
                else if (seqDiff > MaxMissingPackets)
                {
                    Debug.Log("Mumble packet sequence changed abruptly pkt: " + sequence + " last: " + decoderState.LastReceivedSequence);
                    reevaluateInitialBuffer = true;
                }
                // If the packet is a bit late, drop it
                else if (seqDiff < 0 && !isLast)
                {
                    Debug.LogWarning("Received old packet " + sequence + " expecting " + decoderState.NextSequenceToDecode);
                    return;
                }
                // If we missed a packet, add a null packet to tell the decoder what happened
                else if (seqDiff > 0)
                {
                    Debug.LogWarning("dropped packet, recv: " + sequence + ", expected " + decoderState.NextSequenceToDecode);
                    //NumPacketsLost += packet.Value.Sequence - _nextSequenceToDecode;
                    float[] emptyPcmBuffer     = GetBufferToDecodeInto();
                    int     emptySampleNumRead = decoderState.Decoder.Decode(null, emptyPcmBuffer);
                    decoderState.NextSequenceToDecode = sequence + emptySampleNumRead / ((_outputSampleRate / 100) * _outputChannelCount);
                    //Debug.Log("Null read returned: " + emptySampleNumRead + " samples");

                    // Send this decoded data to the corresponding buffer
                    _mumbleClient.ReceiveDecodedVoice(session, emptyPcmBuffer, emptySampleNumRead,
                                                      posData, reevaluateInitialBuffer);
                    reevaluateInitialBuffer = false;
                }
            }

            //Debug.Log("Recv: " + sequence + " expected: " + decoderState.NextSequenceToDecode);

            float[] pcmBuffer = GetBufferToDecodeInto();
            int     numRead   = 0;

            if (compressedAudio.Length != 0)
            {
                numRead = decoderState.Decoder.Decode(compressedAudio, pcmBuffer);
                // Send this decoded data to the corresponding buffer
                _mumbleClient.ReceiveDecodedVoice(session, pcmBuffer, numRead, posData,
                                                  reevaluateInitialBuffer);
            }
            //else
            //Debug.Log("empty packet data?");

            if (numRead < 0)
            {
                Debug.LogError("num read is < 0");
                return;
            }

            //Debug.Log("numRead = " + numRead);
            decoderState.WasPrevPacketMarkedLast = isLast;
            decoderState.LastReceivedSequence    = sequence;
            if (!isLast)
            {
                decoderState.NextSequenceToDecode = sequence + numRead / ((_outputSampleRate / 100) * _outputChannelCount);
            }
            else
            {
                Debug.Log("Resetting #" + session + " decoder");
                decoderState.NextSequenceToDecode = 0;
                // Re-evaluate whether we need to fill up a buffer of audio before playing
                //lock (_bufferLock)
                //{
                //HasFilledInitialBuffer = (_encodedBuffer.Count + 1 >= InitialSampleBuffer);
                //}
                decoderState.Decoder.ResetState();
            }

            //Debug.Log("Recv: " + sequence + " next: " + decoderState.NextSequenceToDecode);
        }
        /// <summary>
        /// Decodes the byte buffer and builds QuickBlockTransfer packets.
        /// </summary>
        /// <param name="context">The handler context.</param>
        /// <param name="input">The input byte buffer from the socket.</param>
        /// <param name="output">The output packets.</param>
        protected override void Decode(IChannelHandlerContext context, IByteBuffer input, List<object> output)
        {
            switch (State)
            {
                case DecoderState.ReSync:
                    if (!input.IsReadable(QuickBlockV1BodySize + FrameSyncBytes)) break;
                    PerformanceCounters.FrameSyncTotal.Increment();
                    if (!SynchronizeFrame(input)) break;
                    State = DecoderState.StartFrame;
                    goto case DecoderState.StartFrame;

                case DecoderState.StartFrame:
                    if (!SkipNullBytes(input)) break;
                    State = DecoderState.FrameType;
                    goto case DecoderState.FrameType;

                case DecoderState.FrameType:
                    if (!input.IsReadable(QuickBlockHeaderSize)) break;
                    if (IsDataBlockHeader(input))
                    {
                        State = DecoderState.BlockHeader;
                        goto case DecoderState.BlockHeader;
                    }
                    if (IsServerList(input))
                    {
                        PerformanceCounters.ServerListReceivedTotal.Increment();
                        State = DecoderState.ServerList;
                        goto case DecoderState.ServerList;
                    }
                    throw new InvalidOperationException("Unknown frame type");

                case DecoderState.ServerList:
                    var content = ReadString(input);
                    if (content.Length == 0) break;
                    context.FireUserEventTriggered(ParseServerList(content));
                    State = DecoderState.StartFrame;
                    goto case DecoderState.StartFrame;

                case DecoderState.BlockHeader:
                    Packet = ParsePacketHeader(input);
                    PerformanceCounters.BlocksReceivedTotal.Increment();
                    if (Packet.Version == 2)
                        PerformanceCounters.CompressedBlocksReceivedTotal.Increment();
                    State = DecoderState.BlockBody;
                    goto case DecoderState.BlockBody;

                case DecoderState.BlockBody:
                    if (!input.IsReadable(Packet.Length)) break;
                    Packet.Content = ReadPacketBody(input, Packet.Length, Packet.Version);
                    PerformanceCounters.BlocksProcessedPerSecond.Increment();
                    State = DecoderState.Validate;
                    goto case DecoderState.Validate;

                case DecoderState.Validate:
                    if (Packet.TotalBlocks <= 0 || Packet.BlockNumber <= 0)
                    {
                        PerformanceCounters.ChecksumErrorsTotal.Increment();
                        throw new InvalidDataException("Header block values out of range. " + Packet);
                    }

                    if (VerifyChecksum(Packet.Content, Packet.Checksum))
                    {
                        ByteBlasterEventSource.Log.PacketCreated(Packet.ToString());
                        context.FireUserEventTriggered(Packet);
                    }
                    else
                    {
                        PerformanceCounters.ChecksumErrorsTotal.Increment();
                        throw new InvalidDataException("Block Checksum failed. " + Packet);
                    }

                    State = DecoderState.StartFrame;
                    goto case DecoderState.StartFrame;

                default:
                    throw new InvalidOperationException("Unknown Decoder State: " + State);
            }
        }
 /// <summary>
 /// Fired when an exception is caught.
 /// </summary>
 /// <param name="context">The context.</param>
 /// <param name="exception">The exception.</param>
 public override void ExceptionCaught(IChannelHandlerContext context, Exception exception)
 {
     State = DecoderState.ReSync;
     ByteBlasterEventSource.Log.Error(context.Name + " Channel Exception", exception);
     PerformanceCounters.DecoderExceptionsTotal.Increment();
     base.ExceptionCaught(context, exception);
 }
Example #15
0
 /// <summary>
 /// Starts decoding. Sets state to "ready" and clears buffer amount.
 /// </summary>
 public void StartDecoding()
 {
     _decoderState        = DecoderState.Ok;
     _decoderBufferAmount = 0;
 }
 /// <summary>
 /// Сброс текущего состояния декодера.
 /// </summary>
 public override void Reset()
 {
     _state = DecoderState.s5E;
     _curCRC8 = 0;
     _lenMsg = 0;
     _posMsg = 0;
 }
Example #17
0
        private static void DecodeAdpcmMs(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // https://wiki.multimedia.cx/index.php/Microsoft_ADPCM
            // see also https://github.com/DeltaEngine/DeltaEngine/blob/master/Multimedia/OpenAL/Helpers/MsAdpcmConverter.cs

            DecoderState state = decoder.State;

            adpcmMsChannel[] channel        = new adpcmMsChannel[2];
            byte             blockPredictor = 0;

            // determine total number of samples in this block
            // the initial 2 samples from the block preamble are sent directly to the output.
            // therefore, deduct 2 from the samples per block to calculate the remaining samples
            int totalSamples = (state.SamplesPerBlock - 2) * decoder.AudioFormat.Channels;

            if (totalSamples < 2)
            {
                return;
            }

            bool isStereo = decoder.AudioFormat.Channels == 2 ? true : false;

            //  read predicates and deltas
            blockPredictor    = reader.ReadByte();
            blockPredictor    = (byte)Clamp(blockPredictor, 0, 6);
            channel[0].Coeff1 = (short)MSAdaptationCoeff1[blockPredictor];
            channel[0].Coeff2 = (short)MSAdaptationCoeff2[blockPredictor];

            if (isStereo)
            {
                blockPredictor    = reader.ReadByte();
                blockPredictor    = (byte)Clamp(blockPredictor, 0, 6);
                channel[1].Coeff1 = (short)MSAdaptationCoeff1[blockPredictor];
                channel[1].Coeff2 = (short)MSAdaptationCoeff2[blockPredictor];
            }
            channel[0].Delta = reader.ReadInt16();
            if (isStereo)
            {
                channel[1].Delta = reader.ReadInt16();
            }

            //  read first samples and write them to result
            channel[0].Sample1 = reader.ReadInt16();
            if (isStereo)
            {
                channel[1].Sample1 = reader.ReadInt16();
            }

            channel[0].Sample2 = reader.ReadInt16();
            if (isStereo)
            {
                channel[1].Sample2 = reader.ReadInt16();
            }

            // output the samples
            if (isStereo)
            {
                writer.Write(channel[0].Sample2);
                writer.Write(channel[1].Sample2);
                writer.Write(channel[0].Sample1);
                writer.Write(channel[1].Sample1);
            }
            else
            {
                writer.Write(channel[0].Sample2);
                writer.Write(channel[0].Sample1);
            }

            // decode the rest of the samples
            for (int index = 0; index < totalSamples; index += 2)
            {
                try
                {
                    byte nibble = reader.ReadByte();
                    writer.Write(AdpcmMsExpandNibble(ref channel[0], (byte)(nibble >> 4)));
                    writer.Write(AdpcmMsExpandNibble(ref channel[isStereo ? 1 : 0], (byte)(nibble & 0x0f)));
                }
                catch (System.IO.EndOfStreamException)
                {
                    Log.Verbose("DecodeAdpcmMs: Reached end of stream - returning.");
                    break;
                }
            }
        }
Example #18
0
        private void decodingLoop(CancellationToken cancellationToken)
        {
            var packet       = ffmpeg.av_packet_alloc();
            var receiveFrame = ffmpeg.av_frame_alloc();

            const int max_pending_frames = 3;

            try
            {
                while (!cancellationToken.IsCancellationRequested)
                {
                    switch (State)
                    {
                    case DecoderState.Ready:
                    case DecoderState.Running:
                        if (decodedFrames.Count < max_pending_frames)
                        {
                            decodeNextFrame(packet, receiveFrame);
                        }
                        else
                        {
                            // wait until existing buffers are consumed.
                            State = DecoderState.Ready;
                            Thread.Sleep(1);
                        }

                        break;

                    case DecoderState.EndOfStream:
                        // While at the end of the stream, avoid attempting to read further as this comes with a non-negligible overhead.
                        // A Seek() operation will trigger a state change, allowing decoding to potentially start again.
                        Thread.Sleep(50);
                        break;

                    default:
                        Debug.Fail($"Video decoder should never be in a \"{State}\" state during decode.");
                        return;
                    }

                    while (!decoderCommands.IsEmpty)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            return;
                        }

                        if (decoderCommands.TryDequeue(out var cmd))
                        {
                            cmd();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Logger.Error(e, "VideoDecoder faulted");
                State = DecoderState.Faulted;
            }
            finally
            {
                ffmpeg.av_packet_free(&packet);
                ffmpeg.av_frame_free(&receiveFrame);

                if (State != DecoderState.Faulted)
                {
                    State = DecoderState.Stopped;
                }
            }
        }
Example #19
0
        /*
         * Dk3
         */
        private static void DecodeAdpcmDk3(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // https://wiki.multimedia.cx/index.php/Duck_DK3_IMA_ADPCM

            DecoderState       state = decoder.State;
            AdpcmImaWavChannel sum;
            AdpcmImaWavChannel diff;

            // A block of DK3 has a 16-byte preamble with the following information:
            // bytes 0-1     unknown
            // bytes 2-3     sample rate
            // bytes 4-9     unknown
            // bytes 10-11   initial sum channel predictor
            // bytes 12-13   initial diff channel predictor
            // byte 14       initial sum channel index
            // byte 15       initial diff channel index

            reader.ReadBytes(10); // skip

            sum.Predictor  = reader.ReadInt16();
            diff.Predictor = reader.ReadInt16();
            sum.StepIndex  = reader.ReadByte();
            diff.StepIndex = reader.ReadByte();

            int diffValue = diff.Predictor;

            // Each set of 3 nibbles decodes to 4 16-bit PCM samples using this process
            // (note that the diff value is initialized to the same value as the diff predictor)
            /* we process 6 nibbles at once */
            byte buff = 0;

            for (int i = 16; i < state.BlockAlign; i++)
            {
                // get next ADPCM nibble in stream
                buff = reader.ReadByte();

                /* first 3 nibbles */
                AdpcmImaWavExpandNibble(ref sum, (buff) & 0x0f);
                AdpcmImaWavExpandNibble(ref diff, (buff) >> 4);

                diffValue = (diffValue + diff.Predictor) / 2;

                writer.Write(sum.Predictor + diffValue);
                writer.Write(sum.Predictor - diffValue);

                buff = reader.ReadByte();

                AdpcmImaWavExpandNibble(ref sum, (buff) & 0x0f);

                writer.Write(sum.Predictor + diffValue);
                writer.Write(sum.Predictor - diffValue);

                /* now last 3 nibbles */
                AdpcmImaWavExpandNibble(ref sum, (buff) >> 4);

                buff = reader.ReadByte();
                if (i < state.BlockAlign)
                {
                    AdpcmImaWavExpandNibble(ref diff, (buff) & 0x0f);

                    diffValue = (diffValue + diff.Predictor) / 2;

                    writer.Write(sum.Predictor + diffValue);
                    writer.Write(sum.Predictor - diffValue);

                    AdpcmImaWavExpandNibble(ref sum, (buff) >> 4);

                    buff = reader.ReadByte();

                    writer.Write(sum.Predictor + diffValue);
                    writer.Write(sum.Predictor - diffValue);
                }
            }
        }
 public EslFrameDecoder(DecoderState initialState,
                        bool treatUnknownHeadersAsBody) : base(initialState)
 {
     _treatUnknownHeadersAsBody = treatUnknownHeadersAsBody;
 }
Example #21
0
        /*****************************************************************************
        * OpenDecoder: probe the decoder and return score
        *****************************************************************************/
        public static bool OpenDecoder(ref Decoder decoder)
        {
            var format = decoder.AudioFormat;

            var state = new DecoderState();

            state.Prev            = null;
            state.SamplesPerBlock = 0;
            state.Codec           = AdpcmCodecType.ADPCM_MS;

            switch ((short)format.Encoding)
            {
            //case 0x00a4: // Apple QuickTime IMA ADPCM, FOURCCs: ima4
            case 0x0002:     // Microsoft ADPCM
            case 0x0011:     // IMA ADPCM
            case 0x0061:     // Duck DK4 IMA ADPCM
            case 0x0062:     // Duck DK3 IMA ADPCM
                             //case 0x0000: // EA ADPCM, XA ADPCM, FOURCCs: XAJ0
                break;

            default:
                return(false);
            }

            if (format.SampleRate <= 0)
            {
                Console.Error.WriteLine("Bad samplerate {0}", format.SampleRate);
                return(false);
            }

            int  channels    = format.Channels;
            byte maxChannels = 5;

            switch ((short)format.Encoding)
            {
            // case 0x00a4: // Apple QuickTime IMA ADPCM, FOURCCs: ima4
            // state.Codec = AdpcmCodecType.ADPCM_IMA_QT;
            // maxChannels = 2;
            // break;
            case 0x0002:     // Microsoft ADPCM
                state.Codec = AdpcmCodecType.ADPCM_MS;
                maxChannels = 2;
                break;

            case 0x0011:     // IMA ADPCM
                state.Codec = AdpcmCodecType.ADPCM_IMA_WAV;
                maxChannels = 2;
                break;

            case 0x0061:     // Duck DK4 IMA ADPCM
                state.Codec = AdpcmCodecType.ADPCM_DK4;
                maxChannels = 2;
                break;

            case 0x0062:     // Duck DK3 IMA ADPCM
                state.Codec = AdpcmCodecType.ADPCM_DK3;
                maxChannels = 2;
                break;
                // case 0x0000: // EA ADPCM, XA ADPCM, FOURCCs: XAJ0
                // state.Codec = AdpcmCodecType.ADPCM_EA;
                // break;
            }

            if (channels > maxChannels || channels == 0)
            {
                Console.Error.WriteLine("Invalid number of channels {0}", channels);
                return(false);
            }

            if (format.BlockAlign <= 0)
            {
                state.BlockAlign = (state.Codec == AdpcmCodecType.ADPCM_IMA_QT) ? 34 * channels : 1024;
                Log.Verbose("Warning: block size undefined, using {0}", state.BlockAlign);
            }
            else
            {
                state.BlockAlign = format.BlockAlign;
            }

            // calculate samples per block
            switch (state.Codec)
            {
            case AdpcmCodecType.ADPCM_IMA_QT:
                state.SamplesPerBlock = 64;
                break;

            case AdpcmCodecType.ADPCM_IMA_WAV:
                if (state.BlockAlign >= 4 * channels)
                {
                    state.SamplesPerBlock =
                        2 * (state.BlockAlign - 4 * channels) / channels;
                }
                break;

            case AdpcmCodecType.ADPCM_MS:
                if (state.BlockAlign >= 7 * channels)
                {
                    state.SamplesPerBlock =
                        2 * (state.BlockAlign - 7 * channels) / channels + 2;
                }
                break;

            case AdpcmCodecType.ADPCM_DK4:
                if (state.BlockAlign >= 4 * channels)
                {
                    state.SamplesPerBlock =
                        2 * (state.BlockAlign - 4 * channels) / channels + 1;
                }
                break;

            case AdpcmCodecType.ADPCM_DK3:
                channels = 2;
                if (state.BlockAlign >= 16)
                {
                    state.SamplesPerBlock = (4 * (state.BlockAlign - 16) + 2) / 3;
                }
                break;

            case AdpcmCodecType.ADPCM_EA:
                if (state.BlockAlign >= channels)
                {
                    state.SamplesPerBlock =
                        2 * (state.BlockAlign - channels) / channels;
                }
                break;
            }

            if (state.SamplesPerBlock == 0)
            {
                Console.Error.WriteLine("Error computing number of samples per block");
                return(false);
            }

            Log.Verbose("Adpcm OpenDecoder: samplerate: {0}Hz, channels: {1}, bits/sample: {2}, blockAlign: {3}, samplesPerBlock: {4}", format.SampleRate, format.Channels, format.BitsPerSample, state.BlockAlign, state.SamplesPerBlock);

            decoder.State = state;
            return(true);
        }
Example #22
0
        private void decodingLoop(CancellationToken cancellationToken)
        {
            var packet = ffmpeg.av_packet_alloc();

            const int max_pending_frames = 3;

            try
            {
                while (true)
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        return;
                    }

                    if (decodedFrames.Count < max_pending_frames)
                    {
                        int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

                        if (readFrameResult >= 0)
                        {
                            state = DecoderState.Running;

                            if (packet->stream_index == stream->index)
                            {
                                if (ffmpeg.avcodec_send_packet(stream->codec, packet) < 0)
                                {
                                    throw new Exception("Error sending packet.");
                                }

                                var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                                if (result == 0)
                                {
                                    var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                                    if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                                    {
                                        skipOutputUntilTime = null;

                                        SwsContext *swsCtx = null;

                                        try
                                        {
                                            swsCtx = ffmpeg.sws_getContext(codecParams.width, codecParams.height, (AVPixelFormat)frame->format, codecParams.width, codecParams.height, AVPixelFormat.AV_PIX_FMT_RGBA, 0, null, null, null);
                                            ffmpeg.sws_scale(swsCtx, frame->data, frame->linesize, 0, frame->height, ffmpegFrame->data, ffmpegFrame->linesize);
                                        }
                                        finally
                                        {
                                            ffmpeg.sws_freeContext(swsCtx);
                                        }

                                        if (!availableTextures.TryDequeue(out var tex))
                                        {
                                            tex = new Texture(codecParams.width, codecParams.height, true);
                                        }

                                        var upload = new ArrayPoolTextureUpload(tex.Width, tex.Height);

                                        // todo: can likely make this more efficient
                                        new Span <Rgba32>(ffmpegFrame->data[0], uncompressedFrameSize / 4).CopyTo(upload.RawData);

                                        tex.SetData(upload);
                                        decodedFrames.Enqueue(new DecodedFrame {
                                            Time = frameTime, Texture = tex
                                        });
                                    }

                                    lastDecodedFrameTime = (float)frameTime;
                                }
                            }
                        }
                        else if (readFrameResult == ffmpeg.AVERROR_EOF)
                        {
                            if (Looping)
                            {
                                Seek(0);
                            }
                            else
                            {
                                state = DecoderState.EndOfStream;
                            }
                        }
                        else
                        {
                            state = DecoderState.Ready;
                            Thread.Sleep(1);
                        }
                    }
                    else
                    {
                        // wait until existing buffers are consumed.
                        state = DecoderState.Ready;
                        Thread.Sleep(1);
                    }

                    while (!decoderCommands.IsEmpty)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            return;
                        }

                        if (decoderCommands.TryDequeue(out var cmd))
                        {
                            cmd();
                        }
                    }
                }
            }
            catch (Exception)
            {
                state = DecoderState.Faulted;
            }
            finally
            {
                ffmpeg.av_packet_free(&packet);

                if (state != DecoderState.Faulted)
                {
                    state = DecoderState.Stopped;
                }
            }
        }
Example #23
0
            public void ReceivePeak(int sampleNum)
            {
                if (prevPeakSample < 0)
                {
                    prevPeakSample = sampleNum;
                    return;
                }
                int samplesFromPrev = sampleNum - prevPeakSample;

                if (samplesFromPrev > stopSamples || (State != DecoderState.NoCard && samplesFromPrev > 1.5 * clockSamples))
                {
                    decoder.symbolDecoder.Stop();
                    State          = DecoderState.NoCard;
                    clockSamples   = -1;
                    countedPeaks   = 0;
                    prevPeakSample = sampleNum;
                    return;
                }
                switch (State)
                {
                case DecoderState.NoCard:
                    if (clockSamples < 0)
                    {
                        clockSamples = samplesFromPrev;
                        return;
                    }
                    clockSamples   = (clockSamples * 3 + samplesFromPrev) / 4;
                    prevPeakSample = sampleNum;
                    ++countedPeaks;
                    if (countedPeaks == 8)
                    {
                        State           = DecoderState.Sync;
                        currentBitIsOne = false;
                    }
                    return;

                case DecoderState.Sync:
                    if (samplesFromPrev < 0.75 * clockSamples)
                    {
                        currentBitIsOne = true;
                        State           = DecoderState.Decoding;
                    }
                    else
                    {
                        currentBitIsOne = false;
                        clockSamples    = (clockSamples * 3 + samplesFromPrev) / 4;
                        prevPeakSample  = sampleNum;
                    }
                    return;

                case DecoderState.Decoding:
                    if (samplesFromPrev < 0.75 * clockSamples)
                    {
                        currentBitIsOne = true;
                        State           = DecoderState.Decoding;
                    }
                    else
                    {
                        decoder.symbolDecoder.ReceiveBit(currentBitIsOne ? 1 : 0);
                        currentBitIsOne = false;
                        clockSamples    = (clockSamples * 3 + samplesFromPrev) / 4;
                        prevPeakSample  = sampleNum;
                    }
                    return;
                }
            }
Example #24
0
 public void StartDecoding()
 {
     this._decoderState        = DecoderState.Ok;
     this._decoderBufferAmount = 0L;
 }
Example #25
0
		/// <summary>
		/// Specifies buffer decoding amount.
		/// </summary>
		/// <param name="amount"></param>
		public void SetBufferDecoding(long amount) 
		{
			_decoderState = DecoderState.Buffer;
			_decoderBufferAmount = amount;
		}
Example #26
0
 /// <summary>
 /// Specifies buffer decoding amount.
 /// </summary>
 /// <param name="amount"></param>
 public void SetBufferDecoding(long amount)
 {
     _decoderState        = DecoderState.Buffer;
     _decoderBufferAmount = amount;
 }
Example #27
0
		/// <summary>
		/// Starts decoding. Sets state to "ready" and clears buffer amount.
		/// </summary>
		public void StartDecoding() 
		{
			_decoderState = DecoderState.Ok;
			_decoderBufferAmount = 0;
		}
Example #28
0
        private void decodeNextFrame(AVPacket *packet, AVFrame *receiveFrame)
        {
            // read data from input into AVPacket.
            // only read if the packet is empty, otherwise we would overwrite what's already there which can lead to visual glitches.
            int readFrameResult = 0;

            if (packet->buf == null)
            {
                readFrameResult = ffmpeg.av_read_frame(formatContext, packet);
            }

            if (readFrameResult >= 0)
            {
                State = DecoderState.Running;

                bool unrefPacket = true;

                if (packet->stream_index == stream->index)
                {
                    // send the packet for decoding.
                    int sendPacketResult = ffmpeg.avcodec_send_packet(codecContext, packet);

                    // Note: EAGAIN can be returned if there's too many pending frames, which we have to read,
                    // otherwise we would get stuck in an infinite loop.
                    if (sendPacketResult == 0 || sendPacketResult == -AGffmpeg.EAGAIN)
                    {
                        readDecodedFrames(receiveFrame);

                        // keep the packet data for next frame if we didn't send it successfully.
                        if (sendPacketResult != 0)
                        {
                            unrefPacket = false;
                        }
                    }
                    else
                    {
                        Logger.Log($"Failed to send avcodec packet: {getErrorMessage(sendPacketResult)}");
                    }
                }

                if (unrefPacket)
                {
                    ffmpeg.av_packet_unref(packet);
                }
            }
            else if (readFrameResult == AGffmpeg.AVERROR_EOF)
            {
                if (Looping)
                {
                    Seek(0);
                }
                else
                {
                    // This marks the video stream as no longer relevant (until a future potential Seek operation).
                    State = DecoderState.EndOfStream;
                }
            }
            else if (readFrameResult == -AGffmpeg.EAGAIN)
            {
                State = DecoderState.Ready;
                Thread.Sleep(1);
            }
            else
            {
                Logger.Log($"Failed to read data into avcodec packet: {getErrorMessage(readFrameResult)}");
                Thread.Sleep(1);
            }
        }