Exemple #1
0
            unsafe void TapProcess(MTAudioProcessingTap tap, nint numberFrames, MTAudioProcessingTapFlags flags,
                                   AudioBuffers bufferList,
                                   out nint numberFramesOut,
                                   out MTAudioProcessingTapFlags flagsOut)
            {
                numberFramesOut = 0;
                flagsOut        = (MTAudioProcessingTapFlags)0;

                // Skip processing when format not supported.
                if (!context.SupportedTapProcessingFormat)
                {
                    Console.WriteLine("Unsupported tap processing format.");
                    return;
                }

                if (IsBandpassFilterEnabled)
                {
                    // Apply bandpass filter Audio Unit.
                    if (context.AudioUnit != null)
                    {
                        var audioTimeStamp = new AudioTimeStamp
                        {
                            SampleTime = context.SampleCount,
                            Flags      = AudioTimeStamp.AtsFlags.SampleTimeValid
                        };

                        var f      = (AudioUnitRenderActionFlags)0;
                        var status = context.AudioUnit.Render(ref f, audioTimeStamp, 0, (uint)numberFrames, bufferList);
                        if (status != AudioUnitStatus.NoError)
                        {
                            Console.WriteLine("AudioUnitRender(): {0}", status);
                            return;
                        }

                        // Increment sample count for audio unit.
                        context.SampleCount += numberFrames;

                        // Set number of frames out.
                        numberFramesOut = numberFrames;
                    }
                }
                else
                {
                    // Get actual audio buffers from MTAudioProcessingTap (AudioUnitRender() will fill bufferListInOut otherwise).
                    CMTimeRange tr;
                    var         status = tap.GetSourceAudio(numberFrames, bufferList, out flagsOut, out tr, out numberFramesOut);
                    if (status != MTAudioProcessingTapError.None)
                    {
                        Console.WriteLine("MTAudioProcessingTapGetSourceAudio: {0}", status);
                        return;
                    }
                }
                try
                {
                    UpdateVolumes(bufferList, numberFrames);
                }
                catch (Exception ex)
                {
                }
            }
Exemple #2
0
        public unsafe void Initialization()
        {
            TestRuntime.AssertSystemVersion(PlatformName.MacOSX, 10, 9, throwIfOtherPlatform: false);

            var cb = new MTAudioProcessingTapCallbacks(
#if XAMCORE_2_0
                delegate(MTAudioProcessingTap tap, nint numberFrames, MTAudioProcessingTapFlags flags, AudioBuffers bufferList, out nint numberFramesOut, out MTAudioProcessingTapFlags flagsOut) {
#else
                delegate(MTAudioProcessingTap tap, long numberFrames, MTAudioProcessingTapFlags flags, AudioBuffers bufferList, out long numberFramesOut, out MTAudioProcessingTapFlags flagsOut) {
#endif
                numberFramesOut = 2;
                flagsOut        = MTAudioProcessingTapFlags.StartOfStream;
            });

            cb.Initialize = delegate(MTAudioProcessingTap tap, out void *tapStorage) {
                tapStorage = (void *)44;
            };

            IntPtr handle;

            using (var res = new MTAudioProcessingTap(cb, MTAudioProcessingTapCreationFlags.PreEffects))
            {
                handle = res.Handle;
                Assert.AreEqual(44, (int)res.GetStorage());
                Assert.That(CFGetRetainCount(handle), Is.EqualTo((nint)1), "RC");
            }
        }
    }
Exemple #3
0
        public unsafe void Initialization()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(6, 0))
            {
                Assert.Inconclusive("MediaToolbox is new in 6.0");
            }

            var cb = new MTAudioProcessingTapCallbacks(
#if XAMCORE_2_0
                delegate(MTAudioProcessingTap tap, nint numberFrames, MTAudioProcessingTapFlags flags, AudioBuffers bufferList, out nint numberFramesOut, out MTAudioProcessingTapFlags flagsOut) {
#else
                delegate(MTAudioProcessingTap tap, long numberFrames, MTAudioProcessingTapFlags flags, AudioBuffers bufferList, out long numberFramesOut, out MTAudioProcessingTapFlags flagsOut) {
#endif
                numberFramesOut = 2;
                flagsOut        = MTAudioProcessingTapFlags.StartOfStream;
            });

            cb.Initialize = delegate(MTAudioProcessingTap tap, out void *tapStorage) {
                tapStorage = (void *)44;
            };

            IntPtr handle;

            using (var res = new MTAudioProcessingTap(cb, MTAudioProcessingTapCreationFlags.PreEffects))
            {
                handle = res.Handle;
                Assert.AreEqual(44, (int)res.GetStorage());
                Assert.That(CFGetRetainCount(handle), Is.EqualTo((nint)1), "RC");
            }
        }
    }
        unsafe void TapPrepare(MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
        {
            // Store sample rate for CenterFrequency property
            context.SampleRate = processingFormat.SampleRate;

            /* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
            VerifyProcessingFormat(processingFormat);

            if (processingFormat.FormatFlags.HasFlag(AudioFormatFlags.IsNonInterleaved))
            {
                context.IsNonInterleaved = true;
            }

            /* Create bandpass filter Audio Unit */

            var audioComponentDescription = AudioComponentDescription.CreateEffect(AudioTypeEffect.BandPassFilter);
            // TODO: https://trello.com/c/GZUGUyH0
            var audioComponent = AudioComponent.FindNextComponent(null, ref audioComponentDescription);

            if (audioComponent == null)
            {
                return;
            }

            AudioUnitStatus error = AudioUnitStatus.NoError;

            AudioUnit.AudioUnit audioUnit = audioComponent.CreateAudioUnit();
            try {
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Input);
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Output);
            } catch (AudioUnitException) {
                error = AudioUnitStatus.FormatNotSupported;
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetRenderCallback(Render, AudioUnitScopeType.Input);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetMaximumFramesPerSlice((uint)maxFrames, AudioUnitScopeType.Global);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = (AudioUnitStatus)audioUnit.Initialize();
            }

            if (error != AudioUnitStatus.NoError)
            {
                audioUnit.Dispose();
                audioUnit = null;
            }

            context.AudioUnit = audioUnit;
        }
Exemple #5
0
            unsafe void Unprepare(MTAudioProcessingTap tap)
            {
                /* Release bandpass filter Audio Unit */

                if (context.AudioUnit == null)
                {
                    return;
                }

                context.AudioUnit.Dispose();
            }
        unsafe void TapInitialization(MTAudioProcessingTap tap, out void *tapStorage)
        {
            context = new AVAudioTapProcessorContext {
                SupportedTapProcessingFormat = false,
                IsNonInterleaved             = false,
                SampleRate         = double.NaN,
                SampleCount        = 0,
                LeftChannelVolume  = 0,
                RightChannelVolume = 0
            };

            // We don't use tapStorage we store all data within context field
            tapStorage = (void *)IntPtr.Zero;
        }
		unsafe AVAudioMix CreateAudioMix ()
		{
			AVMutableAudioMix audioMix = AVMutableAudioMix.Create ();
			AVMutableAudioMixInputParameters audioMixInputParameters = AVMutableAudioMixInputParameters.FromTrack (audioAssetTrack);
			var callbacks = new MTAudioProcessingTapCallbacks (TapProcess) {
				Initialize = TapInitialization,
				Finalize = Finalaze,
				Prepare = TapPrepare,
				Unprepare = Unprepare,
			};

			audioProcessingTap = new MTAudioProcessingTap (callbacks, MTAudioProcessingTapCreationFlags.PreEffects);
			audioMixInputParameters.AudioTapProcessor = audioProcessingTap;

			audioMix.InputParameters = new AVAudioMixInputParameters[] { audioMixInputParameters };

			return audioMix;
		}
        unsafe AVAudioMix CreateAudioMix()
        {
            AVMutableAudioMix audioMix = AVMutableAudioMix.Create();
            AVMutableAudioMixInputParameters audioMixInputParameters = AVMutableAudioMixInputParameters.FromTrack(audioAssetTrack);
            var callbacks = new MTAudioProcessingTapCallbacks(TapProcess)
            {
                Initialize = TapInitialization,
                Finalize   = Finalaze,
                Prepare    = TapPrepare,
                Unprepare  = Unprepare,
            };

            audioProcessingTap = new MTAudioProcessingTap(callbacks, MTAudioProcessingTapCreationFlags.PreEffects);
            audioMixInputParameters.AudioTapProcessor = audioProcessingTap;

            audioMix.InputParameters = new AVAudioMixInputParameters[] { audioMixInputParameters };

            return(audioMix);
        }
Exemple #9
0
            unsafe void TapPrepare(MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
            {
                // Store sample rate for CenterFrequency property
                context.SampleRate = processingFormat.SampleRate;

                /* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
                VerifyProcessingFormat(processingFormat);

                if (processingFormat.FormatFlags.HasFlag(AudioFormatFlags.IsNonInterleaved))
                {
                    context.IsNonInterleaved = true;
                }

                /* Create bandpass filter Audio Unit */

                var audioComponentDescription = AudioComponentDescription.CreateEffect(AudioTypeEffect.NBandEq);
                // TODO: https://trello.com/c/GZUGUyH0
                var audioComponent = AudioComponent.FindNextComponent(null, ref audioComponentDescription);

                if (audioComponent == null)
                {
                    return;
                }

                var error     = AudioUnitStatus.NoError;
                var audioUnit = audioComponent.CreateAudioUnit();

                try
                {
                    audioUnit.SetFormat(processingFormat, AudioUnitScopeType.Input);
                    audioUnit.SetFormat(processingFormat, AudioUnitScopeType.Output);
                }
                catch (AudioUnitException)
                {
                    error = AudioUnitStatus.FormatNotSupported;
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = audioUnit.SetRenderCallback(Render, AudioUnitScopeType.Input);
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = audioUnit.SetMaximumFramesPerSlice((uint)maxFrames, AudioUnitScopeType.Global);
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = (AudioUnitStatus)audioUnit.Initialize();
                }

                if (error != AudioUnitStatus.NoError)
                {
                    audioUnit.Dispose();
                    audioUnit = null;
                }

                context.AudioUnit = audioUnit;
                uint value = 10;
                uint size  = sizeof(uint);
                var  stat  = AudioUnitSetProperty(audioUnit.Handle, AUNGraphicParams.NumberOfBands, AudioUnitScopeType.Global, 0,
                                                  ref value, size);

                for (var i = 0; i < Parent.Bands.Length; i++)
                {
                    var band = Parent.Bands[i];
                    var freq = context.AudioUnit.SetParameter(AudioUnitParameterType.AUNBandEQFrequency + i, band.Center,
                                                              AudioUnitScopeType.Global);
                    var bypass = context.AudioUnit.SetParameter(AudioUnitParameterType.AUNBandEQBypassBand + i, 0,
                                                                AudioUnitScopeType.Global);
                    SetBand(i, band.Gain);
                    Console.WriteLine(freq);
                }
            }
Exemple #10
0
 unsafe void Finalize(MTAudioProcessingTap tap)
 {
 }
		unsafe void Unprepare (MTAudioProcessingTap tap)
		{
			/* Release bandpass filter Audio Unit */

			if (context.AudioUnit == null)
				return;

			context.AudioUnit.Dispose ();
		}
		unsafe void TapPrepare (MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
		{
			// Store sample rate for CenterFrequency property
			context.SampleRate = processingFormat.SampleRate;

			/* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
			VerifyProcessingFormat (processingFormat);

			if (processingFormat.FormatFlags.HasFlag (AudioFormatFlags.IsNonInterleaved))
				context.IsNonInterleaved = true;

			/* Create bandpass filter Audio Unit */

			var audioComponentDescription = AudioComponentDescription.CreateEffect (AudioTypeEffect.BandPassFilter);
			// TODO: https://trello.com/c/GZUGUyH0
			var audioComponent = AudioComponent.FindNextComponent (null, ref audioComponentDescription);
			if (audioComponent == null)
				return;

			AudioUnitStatus error = AudioUnitStatus.NoError;
			AudioUnit.AudioUnit audioUnit = audioComponent.CreateAudioUnit ();
			try {
				audioUnit.SetAudioFormat (processingFormat, AudioUnitScopeType.Input);
				audioUnit.SetAudioFormat (processingFormat, AudioUnitScopeType.Output);
			} catch (AudioUnitException) {
				error = AudioUnitStatus.FormatNotSupported;
			}

			if (error == AudioUnitStatus.NoError)
				error = audioUnit.SetRenderCallback (Render, AudioUnitScopeType.Input);

			if (error == AudioUnitStatus.NoError)
				error = audioUnit.SetMaximumFramesPerSlice ((uint)maxFrames, AudioUnitScopeType.Global);

			if (error == AudioUnitStatus.NoError)
				error = (AudioUnitStatus)audioUnit.Initialize ();

			if (error != AudioUnitStatus.NoError) {
				audioUnit.Dispose ();
				audioUnit = null;
			}

			context.AudioUnit = audioUnit;
		}
		unsafe void Finalaze (MTAudioProcessingTap tap)
		{
		}
		unsafe void TapInitialization (MTAudioProcessingTap tap, out void* tapStorage)
		{
			context = new AVAudioTapProcessorContext {
				SupportedTapProcessingFormat = false,
				IsNonInterleaved = false,
				SampleRate = double.NaN,
				SampleCount = 0,
				LeftChannelVolume = 0,
				RightChannelVolume = 0
			};

			// We don't use tapStorage we store all data within context field
			tapStorage = (void*)IntPtr.Zero;
		}
		unsafe void TapProcess (MTAudioProcessingTap tap, nint numberFrames, MTAudioProcessingTapFlags flags,
		                        AudioBuffers bufferList,
		                        out nint numberFramesOut,
		                        out MTAudioProcessingTapFlags flagsOut)
		{
			numberFramesOut = 0;
			flagsOut = (MTAudioProcessingTapFlags)0;

			// Skip processing when format not supported.
			if (!context.SupportedTapProcessingFormat) {
				Console.WriteLine ("Unsupported tap processing format.");
				return;
			}

			if (IsBandpassFilterEnabled) {
				// Apply bandpass filter Audio Unit.
				if (context.AudioUnit != null) {
					var audioTimeStamp = new AudioTimeStamp {
						SampleTime = context.SampleCount,
						Flags = AudioTimeStamp.AtsFlags.SampleTimeValid
					};

					var f = (AudioUnitRenderActionFlags)0;
					var status = context.AudioUnit.Render (ref f, audioTimeStamp, 0, (uint)numberFrames, bufferList);
					if (status != AudioUnitStatus.NoError) {
						Console.WriteLine ("AudioUnitRender(): {0}", status);
						return;
					}

					// Increment sample count for audio unit.
					context.SampleCount += numberFrames;

					// Set number of frames out.
					numberFramesOut = numberFrames;
				}
			} else {
				// Get actual audio buffers from MTAudioProcessingTap (AudioUnitRender() will fill bufferListInOut otherwise).
				CMTimeRange tr;
				MTAudioProcessingTapError status = tap.GetSourceAudio (numberFrames, bufferList, out flagsOut, out tr, out numberFramesOut);
				if (status != MTAudioProcessingTapError.None) {
					Console.WriteLine ("MTAudioProcessingTapGetSourceAudio: {0}", status);
					return;
				}
			}

			UpdateVolumes (bufferList, numberFrames);
		}