Esempio n. 1
0
        void CollectAudioData()
        {
            var audio1     = new MemoryStream();         // left channel / mono
            var audio2     = new MemoryStream();         // right channel
            var adpcmIndex = 0;
            var compressed = false;

            for (var i = 0; i < frames; i++)
            {
                stream.Seek(offsets[i], SeekOrigin.Begin);
                var end = (i < frames - 1) ? offsets[i + 1] : stream.Length;

                while (stream.Position < end)
                {
                    var type = stream.ReadASCII(4);
                    if (type == "SN2J")
                    {
                        var jmp = int2.Swap(stream.ReadUInt32());
                        stream.Seek(jmp, SeekOrigin.Current);
                        type = stream.ReadASCII(4);
                    }

                    var length = int2.Swap(stream.ReadUInt32());

                    switch (type)
                    {
                    case "SND0":
                    case "SND2":
                        if (audioChannels == 0)
                        {
                            throw new NotSupportedException();
                        }
                        else if (audioChannels == 1)
                        {
                            var rawAudio = stream.ReadBytes((int)length);
                            audio1.WriteArray(rawAudio);
                        }
                        else
                        {
                            var rawAudio = stream.ReadBytes((int)length / 2);
                            audio1.WriteArray(rawAudio);
                            rawAudio = stream.ReadBytes((int)length / 2);
                            audio2.WriteArray(rawAudio);
                            if (length % 2 != 0)
                            {
                                stream.ReadBytes(2);
                            }
                        }

                        compressed = type == "SND2";
                        break;

                    default:
                        if (length + stream.Position > stream.Length)
                        {
                            throw new NotSupportedException("Vqa uses unknown Subtype: {0}".F(type));
                        }
                        stream.ReadBytes((int)length);
                        break;
                    }

                    // Chunks are aligned on even bytes; advance by a byte if the next one is null
                    if (stream.Peek() == 0)
                    {
                        stream.ReadByte();
                    }
                }
            }

            if (audioChannels == 1)
            {
                audioData = compressed ? ImaAdpcmReader.LoadImaAdpcmSound(audio1.ToArray(), ref adpcmIndex) : audio1.ToArray();
            }
            else
            {
                byte[] leftData, rightData;
                if (!compressed)
                {
                    leftData  = audio1.ToArray();
                    rightData = audio2.ToArray();
                }
                else
                {
                    adpcmIndex = 0;
                    leftData   = ImaAdpcmReader.LoadImaAdpcmSound(audio1.ToArray(), ref adpcmIndex);
                    adpcmIndex = 0;
                    rightData  = ImaAdpcmReader.LoadImaAdpcmSound(audio2.ToArray(), ref adpcmIndex);
                }

                audioData = new byte[rightData.Length + leftData.Length];
                var rightIndex = 0;
                var leftIndex  = 0;
                for (var i = 0; i < audioData.Length;)
                {
                    audioData[i++] = leftData[leftIndex++];
                    audioData[i++] = leftData[leftIndex++];
                    audioData[i++] = rightData[rightIndex++];
                    audioData[i++] = rightData[rightIndex++];
                }
            }

            hasAudio = audioData.Length > 0;
        }
Esempio n. 2
0
 public static byte[] LoadSound(byte[] raw, ref int index)
 {
     return(ImaAdpcmReader.LoadImaAdpcmSound(raw, ref index));
 }
Esempio n. 3
0
            protected override bool BufferData(Stream baseStream, Queue <byte> data)
            {
                // Decode each block of IMA ADPCM data
                // Each block starts with a initial state per-channel
                for (var c = 0; c < channels; c++)
                {
                    predictor[c] = baseStream.ReadInt16();
                    index[c]     = baseStream.ReadUInt8();
                    baseStream.ReadUInt8(); // Unknown/Reserved

                    // Output first sample from input
                    data.Enqueue((byte)predictor[c]);
                    data.Enqueue((byte)(predictor[c] >> 8));
                    outOffset += 2;

                    if (outOffset >= outputSize)
                    {
                        return(true);
                    }
                }

                // Decode and output remaining data in this block
                var blockOffset = 0;

                while (blockOffset < blockDataSize)
                {
                    for (var c = 0; c < channels; c++)
                    {
                        // Decode 4 bytes (to 16 bytes of output) per channel
                        var chunk   = baseStream.ReadBytes(4);
                        var decoded = ImaAdpcmReader.LoadImaAdpcmSound(chunk, ref index[c], ref predictor[c]);

                        // Interleave output, one sample per channel
                        var interleaveChannelOffset = 2 * c;
                        for (var i = 0; i < decoded.Length; i += 2)
                        {
                            var interleaveSampleOffset = interleaveChannelOffset + i;
                            interleaveBuffer[interleaveSampleOffset]     = decoded[i];
                            interleaveBuffer[interleaveSampleOffset + 1] = decoded[i + 1];
                            interleaveChannelOffset += 2 * (channels - 1);
                        }

                        blockOffset += 4;
                    }

                    var outputRemaining = outputSize - outOffset;
                    var toCopy          = Math.Min(outputRemaining, interleaveBuffer.Length);
                    for (var i = 0; i < toCopy; i++)
                    {
                        data.Enqueue(interleaveBuffer[i]);
                    }

                    outOffset += 16 * channels;

                    if (outOffset >= outputSize)
                    {
                        return(true);
                    }
                }

                return(++currentBlock >= numBlocks);
            }