コード例 #1
0
        // need to convert the audio data to 44100Hz because having several input of different sampling rate in not working on iOS < 7.0
        private void AdaptAudioDataImpl()
        {
            if(UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) // input of different sampling rate work properly on iOS >= 7.0
                return;

            if(WaveFormat.SampleRate >= AudioVoice.AudioUnitOutputSampleRate) // down sampling is not supported
                return;

            // allocate the new audio buffer 
            var sampleRateRatio = WaveFormat.SampleRate / (float)AudioVoice.AudioUnitOutputSampleRate;
            var newWaveDataSize = (int)Math.Floor(WaveDataSize / sampleRateRatio);
            var newWaveDataPtr = Utilities.AllocateMemory(newWaveDataSize);

            // up-sample the audio data
            if (Math.Abs(sampleRateRatio - 0.5f) < MathUtil.ZeroTolerance)
                UpSampleByTwo(WaveDataPtr, newWaveDataPtr, newWaveDataSize, WaveFormat.Channels, false);
            else
                UpSample(WaveDataPtr, newWaveDataPtr, newWaveDataSize, sampleRateRatio, WaveFormat.Channels, false);

            // update the wave data buffer
            Utilities.FreeMemory(nativeDataBuffer);
            nativeDataBuffer = newWaveDataPtr;
            WaveDataPtr = newWaveDataPtr;
            WaveDataSize = newWaveDataSize;
            WaveFormat = new WaveFormat(AudioVoice.AudioUnitOutputSampleRate, WaveFormat.Channels);
        }
コード例 #2
0
        /// <summary>
        /// Create a dynamic sound effect instance with the given sound properties.
        /// </summary>
        /// <param name="engine">The engine in which the dynamicSoundEffectInstance is created</param>
        /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio content. Must between 8000 Hz and 48000 Hz</param>
        /// <param name="channels">Number of channels in the audio data.</param>
        /// <param name="encoding">Encoding of a sound data sample</param>
        /// <returns>A new DynamicSoundEffectInstance instance ready to filled with data and then played</returns>
        /// <exception cref="ArgumentOutOfRangeException">This exception is thrown for one of the following reason:
        /// <list type="bullet">
        /// <item>The value specified for sampleRate is less than 8000 Hz or greater than 48000 Hz. </item>
        /// <item>The value specified for channels is something other than mono or stereo. </item>
        /// <item>The value specified for data encoding is something other than 8 or 16 bits. </item>
        /// </list>
        ///  </exception>
        /// <exception cref="ArgumentNullException"><paramref name="engine"/> is null.</exception>
        public DynamicSoundEffectInstance(AudioEngine engine, int sampleRate, AudioChannels channels, AudioDataEncoding encoding)
        {
            AttachEngine(engine);

            if (sampleRate < 8000 || 48000 < sampleRate)
                throw new ArgumentOutOfRangeException("sampleRate");

            if(channels != AudioChannels.Mono && channels != AudioChannels.Stereo)
                throw new ArgumentOutOfRangeException("channels");

            if(encoding != AudioDataEncoding.PCM_8Bits && encoding != AudioDataEncoding.PCM_16Bits)
                throw new ArgumentOutOfRangeException("encoding");

            waveFormat = new WaveFormat(sampleRate, (int)encoding, (int)channels);

            Interlocked.Increment(ref totalNbOfInstances);
            Interlocked.Increment(ref numberOfInstances);

            // first instance of dynamic sound effect instance => we create the workerThead and the associated event.
            if (numberOfInstances == 1)
            {
                instancesNeedingBuffer = new ThreadSafeQueue<DynamicSoundEffectInstance>(); // to be sure that there is no remaining request from previous sessions
                awakeWorkerThread = new AutoResetEvent(false);
                CreateWorkerThread();
            }
            
            Name = "Dynamic Sound Effect Instance - "+totalNbOfInstances;

            CreateVoice(WaveFormat);

            InitializeDynamicSound();

            AudioEngine.RegisterSound(this);

            ResetStateToDefault();
        }
コード例 #3
0
 private void CreateVoice(WaveFormat waveFormat1)
 {
     throw new System.NotImplementedException();
 }
コード例 #4
0
ファイル: WaveFormat.cs プロジェクト: h78hy78yhoi8j/xenko
 /// <summary>
 /// Helper function to marshal WaveFormat to an IntPtr
 /// </summary>
 /// <param name="format">WaveFormat</param>
 /// <returns>IntPtr to WaveFormat structure (needs to be freed by callee)</returns>
 public static IntPtr MarshalToPtr(WaveFormat format)
 {
     if (format == null) return IntPtr.Zero;
     return format.MarshalToPtr();
 }
コード例 #5
0
ファイル: WaveFormat.cs プロジェクト: h78hy78yhoi8j/xenko
        /// <summary>
        /// Helper function to retrieve a WaveFormat structure from a pointer
        /// </summary>
        /// <param name="pointer">Pointer to the WaveFormat rawdata</param>
        /// <returns>WaveFormat structure</returns>
        public unsafe static WaveFormat MarshalFrom(IntPtr pointer)
        {
            if (pointer == IntPtr.Zero) return null;

            var pcmWaveFormat = *(__PcmNative*)pointer;
            var encoding = pcmWaveFormat.waveFormatTag;

            // Load simple PcmWaveFormat if channels <= 2 and encoding is Pcm, IeeFloat, Wmaudio2, Wmaudio3
            // See http://msdn.microsoft.com/en-us/library/microsoft.directx_sdk.xaudio2.waveformatex%28v=vs.85%29.aspx
            if (pcmWaveFormat.channels <= 2 && (encoding == WaveFormatEncoding.Pcm || encoding == WaveFormatEncoding.IeeeFloat))
            {
                var waveFormat = new WaveFormat();
                waveFormat.__MarshalFrom(ref pcmWaveFormat);
                return waveFormat;
            }

            if (encoding == WaveFormatEncoding.Extensible)
            {
                var waveFormat = new WaveFormatExtensible();
                waveFormat.__MarshalFrom(ref *(WaveFormatExtensible.__Native*)pointer);
                return waveFormat;
            }

            if (encoding == WaveFormatEncoding.Adpcm)
            {
                var waveFormat = new WaveFormatAdpcm();
                waveFormat.__MarshalFrom(ref *(WaveFormatAdpcm.__Native*)pointer);
                return waveFormat;
            }

            throw new InvalidOperationException(string.Format("Unsupported WaveFormat [{0}]", encoding));
        }
コード例 #6
0
ファイル: WaveFormat.cs プロジェクト: h78hy78yhoi8j/xenko
 /// <summary>
 /// Creates a new 32 bit IEEE floating point wave format
 /// </summary>
 /// <param name="sampleRate">sample rate</param>
 /// <param name="channels">number of channels</param>
 public static WaveFormat CreateIeeeFloatWaveFormat(int sampleRate, int channels)
 {
     var wf = new WaveFormat
     {
         waveFormatTag = WaveFormatEncoding.IeeeFloat,
         channels = (short)channels,
         bitsPerSample = 32,
         sampleRate = sampleRate,
         blockAlign = (short)(4 * channels)
     };
     wf.averageBytesPerSecond = sampleRate * wf.blockAlign;
     wf.extraSize = 0;
     return wf;
 }
コード例 #7
0
ファイル: WaveFormat.cs プロジェクト: h78hy78yhoi8j/xenko
 /// <summary>
 /// Creates a WaveFormat with custom members
 /// </summary>
 /// <param name="tag">The encoding</param>
 /// <param name="sampleRate">Sample Rate</param>
 /// <param name="channels">Number of channels</param>
 /// <param name="averageBytesPerSecond">Average Bytes Per Second</param>
 /// <param name="blockAlign">Block Align</param>
 /// <param name="bitsPerSample">Bits Per Sample</param>
 /// <returns></returns>
 public static WaveFormat CreateCustomFormat(WaveFormatEncoding tag, int sampleRate, int channels, int averageBytesPerSecond, int blockAlign, int bitsPerSample)
 {
     var waveFormat = new WaveFormat
     {
         waveFormatTag = tag,
         channels = (short)channels,
         sampleRate = sampleRate,
         averageBytesPerSecond = averageBytesPerSecond,
         blockAlign = (short)blockAlign,
         bitsPerSample = (short)bitsPerSample,
         extraSize = 0
     };
     return waveFormat;
 }
コード例 #8
0
 internal virtual void CreateVoice(WaveFormat format)
 {
     // nothing to do here
 }
コード例 #9
0
ファイル: AudioVoice.iOS.cs プロジェクト: cg123/xenko
        public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat)
        {
            if (engine == null) throw new ArgumentNullException("engine");
            if (desiredFormat == null) throw new ArgumentNullException("desiredFormat");

            audioEngine = engine;
            soundEffectInstance = effectInstance;
            waveFormat = desiredFormat;
            BusIndexMixer = uint.MaxValue;

            if (desiredFormat.BitsPerSample != 16)
                throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported.");

            lock (StaticMembersLock)
            {
                if (nbOfInstances == 0)
                {
                    // Create the Audio Graph
                    audioGraph = new AUGraph();

                    // Open the graph (does not initialize it yet)
                    audioGraph.Open();
                    
                    // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer 
                    var remoteInOutComponentDesc = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote);
                    var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
                    var mixer3DComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial);

                    // Add the Audio Unit nodes to the AudioGraph
                    var outputUnitNodeId = audioGraph.AddNode(remoteInOutComponentDesc);
                    var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc);
                    var id3DMixerNode = audioGraph.AddNode(mixer3DComponentDesc);

                    // Connect the nodes together
                    CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed.");
                    CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed.");

                    // Get the MixerUnit objects
                    unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode);
                    unit3DMixer = audioGraph.GetNodeInfo(id3DMixerNode);
                    
                    // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization)
                    var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate;
                    unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);
                    unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);

                    // set the element count to the max number of possible tracks before initializing the audio graph
                    CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks+1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks+1)); // +1 for the 3DMixer output
                    CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks));

                    // set a null renderer callback to the channel and 3d mixer input bus
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback");
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback");
                    }
                    
                    // Initialize the graph (validation of the topology)
                    CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed.");

                    // Start audio rendering
                    CheckGraphError(audioGraph.Start(), "Audio Graph could not start.");

                    // disable all the input bus at the beginning
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput.");
                        CheckUnitStatus(unit3DMixer.SetParameter(AudioUnitParameterType.Mixer3DEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput.");
                    }

                    // At initialization all UnitElement are available.
                    availableMixerBusIndices = new Queue<uint>();
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                        availableMixerBusIndices.Enqueue(i);
                }
                ++nbOfInstances;

                // Create a AudioDataRendererInfo for the sounds.
                pAudioDataRendererInfo = (AudioDataRendererInfo*)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo));
                pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle;
                pAudioDataRendererInfo->Handle3DMixer = unit3DMixer.Handle;
            }
        }
コード例 #10
0
 internal void CreateVoice(WaveFormat format)
 {
     throw new NotImplementedException();
 }
コード例 #11
0
 internal void CreateVoice(WaveFormat format)
 {
     SourceVoice = new SourceVoice(AudioEngine.XAudio2, format.ToSharpDX(), VoiceFlags.None, 2f, true); // '2f' -> allow to modify pitch up to one octave, 'true' -> enable callback
     SourceVoice.StreamEnd += Stop;
 }
コード例 #12
0
 private void CreateVoice(WaveFormat waveFormat)
 {
     AudioVoice = new AudioVoice(AudioEngine, this, waveFormat);
 }
コード例 #13
0
 public static SharpDX.Multimedia.WaveFormat ToSharpDX(this Wave.WaveFormat format)
 {
     return(new SharpDX.Multimedia.WaveFormat(format.SampleRate, format.BitsPerSample, format.Channels));
 }
コード例 #14
0
 internal override void CreateVoice(WaveFormat format)
 {
 }