Exemplo n.º 1
0
        private static void ValidateAt9File(RiffParser parser)
        {
            if (parser.RiffChunk.Type != "WAVE")
            {
                throw new InvalidDataException("Not a valid WAVE file");
            }

            WaveFmtChunk      fmt = parser.GetSubChunk <WaveFmtChunk>("fmt ") ?? throw new InvalidDataException("File must have a valid fmt chunk");
            At9WaveExtensible ext = fmt.Ext as At9WaveExtensible ?? throw new InvalidDataException("File must have a format chunk extension");

            if (parser.GetSubChunk <At9FactChunk>("fact") == null)
            {
                throw new InvalidDataException("File must have a valid fact chunk");
            }
            if (parser.GetSubChunk <At9DataChunk>("data") == null)
            {
                throw new InvalidDataException("File must have a valid data chunk");
            }

            if (fmt.ChannelCount == 0)
            {
                throw new InvalidDataException("Channel count must not be zero");
            }

            if (ext.SubFormat != MediaSubtypes.MediaSubtypeAtrac9)
            {
                throw new InvalidDataException($"Must contain ATRAC9 data. Has unsupported SubFormat {ext.SubFormat}");
            }
        }
Exemplo n.º 2
0
        protected override WaveStructure ReadFile(Stream stream, bool readAudioData = true)
        {
            var structure = new WaveStructure();
            var parser    = new RiffParser {
                ReadDataChunk = readAudioData
            };

            parser.ParseRiff(stream);

            ValidateWaveFile(parser);

            WaveFmtChunk  fmt  = parser.GetSubChunk <WaveFmtChunk>("fmt ");
            WaveDataChunk data = parser.GetSubChunk <WaveDataChunk>("data");
            WaveSmplChunk smpl = parser.GetSubChunk <WaveSmplChunk>("smpl");

            int bytesPerSample = fmt.BitsPerSample.DivideByRoundUp(8);

            structure.RiffSubChunks = parser.GetAllSubChunks();
            structure.SampleCount   = data.SubChunkSize / bytesPerSample / fmt.ChannelCount;
            structure.SampleRate    = fmt.SampleRate;
            structure.BitsPerSample = fmt.BitsPerSample;
            structure.ChannelCount  = fmt.ChannelCount;

            if (smpl?.Loops?.FirstOrDefault() != null)
            {
                structure.LoopStart = smpl.Loops[0].Start;
                structure.LoopEnd   = smpl.Loops[0].End;
                structure.Looping   = structure.LoopEnd > structure.LoopStart;
            }

            if (!readAudioData)
            {
                return(structure);
            }

            switch (fmt.BitsPerSample)
            {
            case 16:
                structure.AudioData16 = data.Data.InterleavedByteToShort(fmt.ChannelCount);
                break;

            case 8:
                structure.AudioData8 = data.Data.DeInterleave(bytesPerSample, fmt.ChannelCount);
                break;
            }
            return(structure);
        }
Exemplo n.º 3
0
        private static void ValidateWaveFile(RiffParser parser)
        {
            if (parser.RiffChunk.Type != "WAVE")
            {
                throw new InvalidDataException("Not a valid WAVE file");
            }

            WaveFmtChunk fmt = parser.GetSubChunk <WaveFmtChunk>("fmt ") ?? throw new InvalidDataException("File must have a valid fmt chunk");

            if (parser.GetSubChunk <WaveDataChunk>("data") == null)
            {
                throw new InvalidDataException("File must have a valid data chunk");
            }

            int bytesPerSample = fmt.BitsPerSample.DivideByRoundUp(8);

            if (fmt.FormatTag != WaveFormatTags.WaveFormatPcm && fmt.FormatTag != WaveFormatTags.WaveFormatExtensible)
            {
                throw new InvalidDataException($"Must contain PCM data. Has unsupported format {fmt.FormatTag}");
            }

            if (fmt.BitsPerSample != 16 && fmt.BitsPerSample != 8)
            {
                throw new InvalidDataException($"Must have 8 or 16 bits per sample, not {fmt.BitsPerSample} bits per sample");
            }

            if (fmt.ChannelCount == 0)
            {
                throw new InvalidDataException("Channel count must not be zero");
            }

            if (fmt.BlockAlign != bytesPerSample * fmt.ChannelCount)
            {
                throw new InvalidDataException("File has invalid block alignment");
            }

            if (fmt.Ext != null && fmt.Ext.SubFormat != MediaSubtypes.MediaSubtypePcm)
            {
                throw new InvalidDataException($"Must contain PCM data. Has unsupported SubFormat {fmt.Ext.SubFormat}");
            }
        }
Exemplo n.º 4
0
        public At9DataChunk(RiffParser parser, BinaryReader reader) : base(reader)
        {
            // Do not trust the BlockAlign field in the fmt chunk to equal the superframe size.
            // Some AT9 files have an invalid number in there.
            // Calculate the size using the ATRAC9 DataConfig instead.

            At9WaveExtensible ext = parser.GetSubChunk <WaveFmtChunk>("fmt ")?.Ext as At9WaveExtensible ??
                                    throw new InvalidDataException("fmt chunk must come before data chunk");

            At9FactChunk fact = parser.GetSubChunk <At9FactChunk>("fact") ??
                                throw new InvalidDataException("fact chunk must come before data chunk");

            var config = new Atrac9Config(ext.ConfigData);

            FrameCount = (fact.SampleCount + fact.EncoderDelaySamples).DivideByRoundUp(config.SuperframeSamples);
            int dataSize = FrameCount * config.SuperframeBytes;

            if (dataSize > reader.BaseStream.Length - reader.BaseStream.Position)
            {
                throw new InvalidDataException("Required AT9 length is greater than the number of bytes remaining in the file.");
            }

            AudioData = reader.BaseStream.DeInterleave(dataSize, config.SuperframeBytes, FrameCount);
        }
Exemplo n.º 5
0
        protected override At9Structure ReadFile(Stream stream, bool readAudioData = true)
        {
            var structure = new At9Structure();
            var parser    = new RiffParser {
                ReadDataChunk = readAudioData
            };

            parser.RegisterSubChunk("fact", At9FactChunk.ParseAt9);
            parser.RegisterSubChunk("data", At9DataChunk.ParseAt9);
            parser.FormatExtensibleParser = At9WaveExtensible.ParseAt9;
            parser.ParseRiff(stream);

            ValidateAt9File(parser);

            var fmt  = parser.GetSubChunk <WaveFmtChunk>("fmt ");
            var ext  = (At9WaveExtensible)fmt.Ext;
            var fact = parser.GetSubChunk <At9FactChunk>("fact");
            var data = parser.GetSubChunk <At9DataChunk>("data");
            var smpl = parser.GetSubChunk <WaveSmplChunk>("smpl");

            structure.Config          = new Atrac9Config(ext.ConfigData);
            structure.SampleCount     = fact.SampleCount;
            structure.EncoderDelay    = fact.EncoderDelaySamples;
            structure.Version         = ext.VersionInfo;
            structure.AudioData       = data.AudioData;
            structure.SuperframeCount = data.FrameCount;

            if (smpl?.Loops?.FirstOrDefault() != null)
            {
                structure.LoopStart = smpl.Loops[0].Start - structure.EncoderDelay;
                structure.LoopEnd   = smpl.Loops[0].End - structure.EncoderDelay;
                structure.Looping   = structure.LoopEnd > structure.LoopStart;
            }

            return(structure);
        }