Exemple #1
0
        /*
         * Dk4
         */
        private static void DecodeAdpcmDk4(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // https://wiki.multimedia.cx/index.php/Duck_DK4_IMA_ADPCM

            DecoderState state = decoder.State;

            AdpcmImaWavChannel[] channel = new AdpcmImaWavChannel[2];

            bool isStereo = decoder.AudioFormat.Channels == 2 ? true : false;

            // If the DK4 data is stereo, a chunk begins with two preambles, one for the left audio channel and one for the right audio channel:
            // bytes 0-1:  initial predictor (in little-endian format) for left channel
            // byte 2:     initial index for left channel
            // byte 3:     unknown, usually 0 and is probably reserved
            // bytes 4-5:  initial predictor (in little-endian format) for right channel
            // byte 6:     initial index (for right channel)
            // byte 7:     unknown, usually 0 and is probably reserved

            channel[0].Predictor = reader.ReadInt16();
            channel[0].StepIndex = reader.ReadByte();
            channel[0].StepIndex = Clamp(channel[0].StepIndex, 0, 88);
            reader.ReadByte();

            if (isStereo)
            {
                channel[1].Predictor = reader.ReadInt16();
                channel[1].StepIndex = reader.ReadByte();
                channel[1].StepIndex = Clamp(channel[1].StepIndex, 0, 88);
                reader.ReadByte();
            }

            /* first output predictor */
            writer.Write(channel[0].Predictor);
            if (isStereo)
            {
                writer.Write(channel[1].Predictor);
            }

            for (int nibbles = 0;
                 nibbles < state.BlockAlign - 4 * (isStereo ? 2 : 1);
                 nibbles++)
            {
                byte buffer = reader.ReadByte();
                writer.Write(AdpcmImaWavExpandNibble(ref channel[0], (buffer) >> 4));
                writer.Write(AdpcmImaWavExpandNibble(ref channel[isStereo ? 1 : 0], (buffer) & 0x0f));

                reader.ReadByte();
            }
        }
Exemple #2
0
        /*
         * Ima4 in QT file
         */
        private static void DecodeAdpcmImaQT(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // https://wiki.multimedia.cx/index.php/Apple_QuickTime_IMA_ADPCM
            // In any given IMA-encoded QuickTime file,
            // the size of an individual block of IMA nibbles is stored in the bytes/packet field present
            // in the extended audio information portion in an audio stsd atom.
            // However, this size always seems to be 34 bytes/block.
            // Sometimes, IMA-encoded Quicktime files are missing the extended wave information header.
            // In this case, assume that each IMA block is 34 bytes.

            AdpcmImaWavChannel[] channel = new AdpcmImaWavChannel[2];

            int channels = decoder.AudioFormat.Channels;

            for (int i = 0; i < channels; i++)
            {
                // The first 2 bytes of a block specify a preamble with the initial predictor and step index.
                // The 2 bytes are read from the stream as a big-endian 16-bit number which has the following bit structure:
                // pppppppp piiiiiii
                // Bits 15-7 of the preamble are the top 9 bits of the initial signed predictor;
                // Bits 6-0 of the initial predictor are always 0.
                // Bits 6-0 of the preamble specify the initial step index.
                // Note that this gives a range of 0..127 which should be clamped to 0..88 for good measure.
                byte buffer0 = reader.ReadByte();
                byte buffer1 = reader.ReadByte();
                channel[i].Predictor = (short)((((buffer0 << 1) | (buffer1 >> 7))) << 7);
                channel[i].StepIndex = buffer1 & 0x7f;
                channel[i].StepIndex = Clamp(channel[i].StepIndex, 0, 88);

                // The remaining bytes in the IMA block (of which there are usually 32) are the ADPCM nibbles.
                // In Quicktime IMA data, the bottom nibble of a byte is decoded first, then the top nibble:
                for (int nibbles = 0; nibbles < 64; nibbles += 2)
                {
                    byte buffer = reader.ReadByte();
                    writer.Write(AdpcmImaWavExpandNibble(ref channel[i], (buffer) & 0x0f));
                    writer.Write(AdpcmImaWavExpandNibble(ref channel[i], (buffer >> 4) & 0x0f));
                }
            }
        }
Exemple #3
0
        private static short AdpcmImaWavExpandNibbleOriginal(ref AdpcmImaWavChannel channel, int nibble)
        {
            // Compute difference and new predicted value
            // Computes 'vpdiff = (delta+0.5)*step/4',
            // but see comment in adpcm_coder.
            int diff = IMAStepTable[channel.StepIndex] >> 3;

            if ((nibble & 0x04) != 0)
            {
                diff += IMAStepTable[channel.StepIndex];
            }
            if ((nibble & 0x02) != 0)
            {
                diff += IMAStepTable[channel.StepIndex] >> 1;
            }
            if ((nibble & 0x01) != 0)
            {
                diff += IMAStepTable[channel.StepIndex] >> 2;
            }
            if ((nibble & 0x08) != 0)
            {
                channel.Predictor -= diff;
            }
            else
            {
                channel.Predictor += diff;
            }


            // Clamp result to 16-bit, -32768 - 32767
            channel.Predictor = Clamp(channel.Predictor, short.MinValue, short.MaxValue);

            // Find new index value (for later)
            channel.StepIndex += IMAIndexTable[nibble];
            channel.StepIndex  = Clamp(channel.StepIndex, 0, 88);

            return((short)channel.Predictor);
        }
Exemple #4
0
        private static short AdpcmImaWavExpandNibble(ref AdpcmImaWavChannel channel, int nibble)
        {
            int step = IMAStepTable[channel.StepIndex];

            // perform direct multiplication instead of series of jumps proposed by
            // the reference ADPCM implementation since modern CPUs can do the mults
            // quickly enough
            int diff = ((((nibble & 7) << 1) + 1) * step) >> 3;

            if ((nibble & 8) != 0)
            {
                diff = -diff;
            }

            channel.Predictor = ((int)channel.Predictor) + diff;

            // Clamp result to 16-bit, -32768 - 32767
            channel.Predictor = Clamp(channel.Predictor, short.MinValue, short.MaxValue);

            channel.StepIndex = channel.StepIndex + IMAIndexTable[nibble];
            channel.StepIndex = Clamp(channel.StepIndex, 0, 88);

            return((short)channel.Predictor);
        }
Exemple #5
0
        private static void DecodeAdpcmImaWav(Decoder decoder, BinaryReader reader, BinaryWriter writer)
        {
            // reference implementations:
            // https://wiki.multimedia.cx/index.php/IMA_ADPCM
            // https://github.com/Nanook/TheGHOST/blob/master/ImaAdpcmPlugin/Ima.cs
            // https://github.com/rochars/imaadpcm/blob/master/index.js

            DecoderState state = decoder.State;

            AdpcmImaWavChannel[] channel = new AdpcmImaWavChannel[2];
            int  nibbles  = 0;
            bool isStereo = decoder.AudioFormat.Channels == 2 ? true : false;

            // https://www.microchip.com/forums/m698891.aspx
            // Each block starts with a header consisting of the following 4 bytes:
            //  16 bit audio sample (2 bytes, little endian)
            //   8 bit step table index
            //   dummy byte (set to zero)
            channel[0].Predictor = reader.ReadInt16();
            channel[0].StepIndex = reader.ReadByte();
            channel[0].StepIndex = Clamp(channel[0].StepIndex, 0, 88);
            reader.ReadByte();

            if (isStereo)
            {
                channel[1].Predictor = reader.ReadInt16();
                channel[1].StepIndex = reader.ReadByte();
                channel[1].StepIndex = Clamp(channel[1].StepIndex, 0, 88);
                reader.ReadByte();
            }

            // Note that we encode two samples per byte,
            // but there are an odd number samples per block.
            // One of the samples is in the ADPCM block header.
            // So, a block looks like this:

            // Example: BlockAlign 2048, SamplesPerBlock 4089
            // 4 bytes, Block header including 1 sample
            // 2048-4 = 2044 bytes with 4089-1 = 4088 samples
            // Total of 4089 samples per block.

            // Example: BlockAlign 512, SamplesPerBlock 505
            // 4 bytes, Block header including 1 sample
            // 512-4 = 508 bytes with 505-1 = 504 samples
            // Total of 505 samples per block.

            if (isStereo)
            {
                int     offset = 0;
                short[] sample = new short[2 * (state.BlockAlign - 8)];
                for (nibbles = 2 * (state.BlockAlign - 8);
                     nibbles > 0;
                     nibbles -= 16)
                {
                    try
                    {
                        for (int i = 0; i < 4; i++)
                        {
                            byte buffer = reader.ReadByte();
                            sample[offset + i * 4 + 0] = AdpcmImaWavExpandNibble(ref channel[0], buffer & 0x0f);
                            sample[offset + i * 4 + 2] = AdpcmImaWavExpandNibble(ref channel[0], buffer >> 4);
                        }

                        for (int i = 0; i < 4; i++)
                        {
                            byte buffer = reader.ReadByte();
                            sample[offset + i * 4 + 1] = AdpcmImaWavExpandNibble(ref channel[1], buffer & 0x0f);
                            sample[offset + i * 4 + 3] = AdpcmImaWavExpandNibble(ref channel[1], buffer >> 4);
                        }
                    }
                    catch (System.IO.EndOfStreamException)
                    {
                        Log.Verbose("DecodeAdpcmImaWav: Reached end of stream - returning.");
                        break;
                    }

                    offset += 16;
                }

                for (int i = 0; i < sample.Length; i++)
                {
                    writer.Write(sample[i]);
                }
            }
            else
            {
                for (nibbles = 2 * (state.BlockAlign - 4);
                     nibbles > 0;
                     nibbles -= 2)
                {
                    try
                    {
                        byte buffer = reader.ReadByte();
                        writer.Write(AdpcmImaWavExpandNibble(ref channel[0], (buffer) & 0x0f));
                        writer.Write(AdpcmImaWavExpandNibble(ref channel[0], (buffer) >> 4));
                    }
                    catch (System.IO.EndOfStreamException)
                    {
                        Log.Verbose("DecodeAdpcmImaWav: Reached end of stream - returning.");
                        break;
                    }
                }
            }
        }