Exemplo n.º 1
0
        public void Configure(IAudioFrameHeader frameHeader)
        {
            AacFrameHeader aacFrameHeader = (AacFrameHeader)frameHeader;

            this.CodecPrivateData  = AacConfigurator.BuildCodecPrivateData(aacFrameHeader);
            this.Name              = frameHeader.Name;
            this.Channels          = (int)aacFrameHeader.ChannelConfig;
            this.SamplingFrequency = frameHeader.SamplingFrequency;
            this.SetConfigured();
        }
Exemplo n.º 2
0
        private static string BuildCodecPrivateData(AacFrameHeader aacFrameHeader)
        {
            Func <AacFrameHeader, string> privateDataFactory = AacDecoderSettings.Parameters.CodecPrivateDataFactory;

            if (null != privateDataFactory)
            {
                return(privateDataFactory(aacFrameHeader));
            }
            AacDecoderParameters.WaveFormatEx configurationFormat = AacDecoderSettings.Parameters.ConfigurationFormat;
            SM.Media.Mmreg.WaveFormatEx       waveFormatEx;
            switch (configurationFormat)
            {
            case AacDecoderParameters.WaveFormatEx.RawAac:
                if (!AacDecoderSettings.Parameters.UseRawAac)
                {
                    throw new NotSupportedException("AacDecoderSettings.Parameters.UseRawAac must be enabled when using AacDecoderParameters.WaveFormatEx.RawAac");
                }
                RawAacWaveInfo rawAacWaveInfo1 = new RawAacWaveInfo();
                rawAacWaveInfo1.nChannels      = aacFrameHeader.ChannelConfig;
                rawAacWaveInfo1.nSamplesPerSec = (uint)aacFrameHeader.SamplingFrequency;
                RawAacWaveInfo rawAacWaveInfo2 = rawAacWaveInfo1;
                TimeSpan       duration        = aacFrameHeader.Duration;
                double         num1;
                if (duration.TotalSeconds > 0.0)
                {
                    double num2 = (double)aacFrameHeader.FrameLength;
                    duration = aacFrameHeader.Duration;
                    double totalSeconds = duration.TotalSeconds;
                    num1 = num2 / totalSeconds;
                }
                else
                {
                    num1 = 0.0;
                }
                int num3 = (int)(uint)num1;
                rawAacWaveInfo2.nAvgBytesPerSec       = (uint)num3;
                rawAacWaveInfo1.pbAudioSpecificConfig = aacFrameHeader.AudioSpecificConfig;
                waveFormatEx = (SM.Media.Mmreg.WaveFormatEx)rawAacWaveInfo1;
                break;

            case AacDecoderParameters.WaveFormatEx.HeAac:
                HeAacWaveInfo heAacWaveInfo = new HeAacWaveInfo();
                heAacWaveInfo.wPayloadType          = AacDecoderSettings.Parameters.UseRawAac ? (ushort)0 : (ushort)1;
                heAacWaveInfo.nChannels             = aacFrameHeader.ChannelConfig;
                heAacWaveInfo.nSamplesPerSec        = (uint)aacFrameHeader.SamplingFrequency;
                heAacWaveInfo.pbAudioSpecificConfig = aacFrameHeader.AudioSpecificConfig;
                waveFormatEx = (SM.Media.Mmreg.WaveFormatEx)heAacWaveInfo;
                break;

            default:
                throw new NotSupportedException("Unknown WaveFormatEx type: " + (object)configurationFormat);
            }
            return(WaveFormatExExtensions.ToCodecPrivateData(waveFormatEx));
        }
        public static byte[] DefaultAudioSpecificConfigFactory(AacFrameHeader aacFrameHeader)
        {
            int num = aacFrameHeader.Profile + 1;

            if (1 == num && AacAudioSpecificConfig.RemapObjectType1.HasValue)
            {
                num = AacAudioSpecificConfig.RemapObjectType1.Value;
                Debug.WriteLine("AacConfigurator.AudioSpecificConfig: Changing AAC object type from 1 to {0}.", (object)num);
            }
            return(new byte[2]
            {
                (byte)(num << 3 | aacFrameHeader.FrequencyIndex >> 1 & 7),
                (byte)(aacFrameHeader.FrequencyIndex << 7 | (int)aacFrameHeader.ChannelConfig << 3)
            });
        }
Exemplo n.º 4
0
        public static byte[] DefaultAudioSpecificConfigFactory(AacFrameHeader aacFrameHeader)
        {
            var objectType = aacFrameHeader.Profile + 1;

            if (1 == objectType && RemapObjectType1.HasValue)
            {
                objectType = RemapObjectType1.Value;
                Debug.WriteLine("AacConfigurator.AudioSpecificConfig: Changing AAC object type from 1 to {0}.", objectType);
            }

            return new[]
                   {
                       (byte) ((objectType << 3) | ((aacFrameHeader.FrequencyIndex >> 1) & 0x07)),
                       (byte) ((aacFrameHeader.FrequencyIndex << 7) | (aacFrameHeader.ChannelConfig << 3))
                   };
        }
Exemplo n.º 5
0
        static string BuildCodecPrivateData(AacFrameHeader aacFrameHeader)
        {
            var factory = AacDecoderSettings.Parameters.CodecPrivateDataFactory;

            if (null != factory)
                return factory(aacFrameHeader);

            WaveFormatEx w;

            var waveFormatEx = AacDecoderSettings.Parameters.ConfigurationFormat;

            switch (waveFormatEx)
            {
                case AacDecoderParameters.WaveFormatEx.RawAac:
                    if (!AacDecoderSettings.Parameters.UseRawAac)
                        throw new NotSupportedException("AacDecoderSettings.Parameters.UseRawAac must be enabled when using AacDecoderParameters.WaveFormatEx.RawAac");

                    w = new RawAacWaveInfo
                    {
                        nChannels = aacFrameHeader.ChannelConfig,
                        nSamplesPerSec = (uint)aacFrameHeader.SamplingFrequency,
                        nAvgBytesPerSec = (uint)(aacFrameHeader.Duration.TotalSeconds <= 0 ? 0 : aacFrameHeader.FrameLength / aacFrameHeader.Duration.TotalSeconds),
                        pbAudioSpecificConfig = aacFrameHeader.AudioSpecificConfig
                    };

                    break;
                case AacDecoderParameters.WaveFormatEx.HeAac:
                    w = new HeAacWaveInfo
                    {
                        wPayloadType = (ushort)(AacDecoderSettings.Parameters.UseRawAac ? HeAacWaveInfo.PayloadType.Raw : HeAacWaveInfo.PayloadType.ADTS),
                        nChannels = aacFrameHeader.ChannelConfig,
                        nSamplesPerSec = (uint)aacFrameHeader.SamplingFrequency,
                        pbAudioSpecificConfig = aacFrameHeader.AudioSpecificConfig
                    };

                    break;
                default:
                    throw new NotSupportedException("Unknown WaveFormatEx type: " + waveFormatEx);
            }

            return w.ToCodecPrivateData();
        }