Ejemplo n.º 1
0
		public unsafe void ProcessWithEvents (AudioTimeStamp timestamp, int frameCount, AURenderEventEnumerator events)
		{
			var now = (nint)timestamp.SampleTime;
			int framesRemaining = frameCount;
			while (framesRemaining > 0) {
				if (events.IsAtEnd) {
					int bufferOffset = frameCount - framesRemaining;
					Process (framesRemaining, bufferOffset);
					return;
				}

				var framesThisSegment = (int)(events.Current.Head.EventSampleTime - now);

				if (framesThisSegment > 0) {
					int bufferOffset = frameCount - framesRemaining;
					Process (framesThisSegment, bufferOffset);
					framesRemaining -= framesThisSegment;
					now += framesThisSegment;
				}

				foreach (AURenderEvent e in events.EnumeratorCurrentEvents (now))
					HandleOneEvent (e);
				events.MoveNext ();
			}
		}
Ejemplo n.º 2
0
        static int renderCallback(IntPtr inRefCon,
            ref AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AUGraph)handler.Target;

            // evoke event handler with an argument
            if (inst.RenderCallback != null)
            {
                var args = new AudioGraphEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst.RenderCallback(inst, args);
            }

            return 0; // noerror
        }
Ejemplo n.º 3
0
        static int renderCallback(IntPtr inRefCon,
            ref AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            //System.Diagnostics.Debug.WriteLine(_ioActionFlags);
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AudioUnit)handler.Target;
            
            // evoke event handler with an argument
            if (inst._renderEvent != null) 
            {
                var args = new AudioUnitEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst._renderEvent(inst, args);
            }

            return 0; // noerror
        }
Ejemplo n.º 4
0
		public AudioUnitStatus PullInput (ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timestamp, uint frameCount, int inputBusNumber, AURenderPullInputBlock pullInputBlock)
		{
			if (pullInputBlock == null)
				return AudioUnitStatus.NoConnection;

			PrepareInputBufferList ();
			AudioUnitStatus s = pullInputBlock (ref actionFlags, ref timestamp, frameCount, inputBusNumber, MutableAudioBufferList);
			return s;
		}
        AudioUnitStatus RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// getting microphone input signal
			audioUnit.Render (ref actionFlags, timeStamp, 1, numberFrames, data);

			// Getting a pointer to a buffer to be filled
			IntPtr outL = data [0].Data;
			IntPtr outR = data [1].Data;

			// Getting signal level
			// https://en.wikipedia.org/wiki/Root_mean_square
			float sqrSum = 0;
			for (int j = 0;  j < numberFrames; j++) {
				float v = Marshal.ReadInt16(outL, j * sizeof(Int16));
				sqrSum += (v * v);
			}
			SignalLevel = (float)Math.Sqrt (sqrSum / numberFrames);

			if (triggered <= 0 && SignalLevel > Threshold)
				triggered = FramesToPlay;

			// playing sound
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				var outRPtr = (int*)outR.ToPointer ();

				for (int i = 0; i < numberFrames; i++) {
					triggered = Math.Max (0, triggered - 1);

					if (triggered > 0) {
						var buf0 = (int*)buffer [0].Data;
						var buf1 = (int*)buffer [numberOfChannels - 1].Data;

						++CurrentFrame;
						*outLPtr++ = buf0 [currentFrame];
						*outRPtr++ = buf1 [currentFrame];
					} else {
						// 0-filling
						*outLPtr++ = 0;
						*outRPtr++ = 0;
					}
				}
			}

			return AudioUnitStatus.NoError;
		}
Ejemplo n.º 6
0
        static int device_renderCallback(IntPtr inRefCon,
            ref AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            System.Diagnostics.Debug.WriteLine("o");

            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (RemoteOutput)handler.Target;
            var waveDef = inst._waveDef[_inBusNumber];

            double dphai = 2 * Math.PI * waveDef.frequency / waveDef.sampleRate;
            double phase = waveDef.phase;

            // Getting a pointer to a buffer to be filled
            IntPtr outL = _ioData.mBuffers[0].mData;
            IntPtr outR = _ioData.mBuffers[1].mData;

            // filling sin waveform.
            // AudioUnitSampleType is different between a simulator (float32) and a real device (int32).
            unsafe
            {
                var outLPtr = (int*)outL.ToPointer();
                var outRPtr = (int*)outR.ToPointer();
                for (int i = 0; i < _inNumberFrames; i++)
                {
                    int sample = (int)(Math.Sin(phase) * int.MaxValue / 128); // signal waveform format is fixed-point (8.24)
                    *outLPtr++ = sample;
                    *outRPtr++ = sample;
                    phase += dphai;
                }
            }
            waveDef.phase = phase % (2 * Math.PI);
            return 0;
        }
Ejemplo n.º 7
0
		public unsafe void ProcessWithEvents (AudioTimeStamp timestamp, int frameCount, AURenderEventEnumerator events)
		{
			var now = (nint)timestamp.SampleTime;
			int framesRemaining = frameCount;
			AURenderEvent* theEvent = events.UnsafeFirst;
			while (framesRemaining > 0) {
				if (theEvent == null) {
					int bufferOffset = frameCount - framesRemaining;
					Process (framesRemaining, bufferOffset);
					return;
				}

				int framesThisSegment = (int)(theEvent->Head.EventSampleTime - now);

				if (framesThisSegment > 0) {
					int bufferOffset = frameCount - framesRemaining;
					Process (framesThisSegment, bufferOffset);
					framesRemaining -= framesThisSegment;
					now += framesThisSegment;
				}

				PerformAllSimultaneousEvents (now, &theEvent);
			}
		}
Ejemplo n.º 8
0
		uint TapProc (AudioQueueProcessingTap audioQueueTap, uint numberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data)
		{
			AudioQueueProcessingTapFlags source_flags;
			uint source_frames;

			if (audioQueueTap.GetSourceAudio (numberOfFrames, ref timeStamp, out source_flags, out source_frames, data) != AudioQueueStatus.Ok)
				throw new ApplicationException ();

			preRenderData = data [0].Data;
			data.SetData (0, IntPtr.Zero);

			var renderTimeStamp = new AudioTimeStamp ();
			renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;
			AudioUnitRenderActionFlags action_flags = 0;

			var res = genericOutputUnit.Render (ref action_flags, renderTimeStamp, 0, numberOfFrames, data);
			if (res != AudioUnitStatus.NoError)
				throw new ApplicationException ();

			return source_frames;
		}
Ejemplo n.º 9
0
 public void Render(AudioUnitRenderActionFlags flags, AudioTimeStamp timeStamp, UInt32 outputBusnumber, UInt32 numberFrames, AudioBufferList data)
 {
     int err = AudioUnitRender (_audioUnit,
         ref flags,
         ref timeStamp,
         outputBusnumber,
         numberFrames,
         data);
     if (err != 0)
         throw new InvalidOperationException(String.Format("Error code:{0}", err));
 }
Ejemplo n.º 10
0
 static extern AudioUnitStatus AudioUnitRender(IntPtr inUnit, ref AudioUnitRenderActionFlags ioActionFlags, ref AudioTimeStamp inTimeStamp,
                                               uint inOutputBusNumber, uint inNumberFrames, IntPtr ioData);
Ejemplo n.º 11
0
 public AudioUnitStatus Render(ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     return(AudioUnitRender(handle, ref actionFlags, ref timeStamp, busNumber, numberFrames, (IntPtr)data));
 }
Ejemplo n.º 12
0
        AudioUnitStatus _audioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            // getting microphone input signal
            _audioUnit.Render(ref actionFlags,
                              timeStamp,
                              1, // Remote input
                              numberFrames,
                              data);

            // Getting a pointer to a buffer to be filled
            IntPtr outL = data[0].Data;
            IntPtr outR = data[1].Data;

            // Getting signal level and trigger detection
            unsafe
            {
                var outLPtr = (int *)outL.ToPointer();
                for (int i = 0; i < numberFrames; i++)
                {
                    // LPF
                    float diff = Math.Abs(*outLPtr) - _signalLevel;
                    if (diff > 0)
                    {
                        _signalLevel += diff / 1000f;
                    }
                    else
                    {
                        _signalLevel += diff / 10000f;
                    }

                    diff = Math.Abs(diff);

                    // sound triger detection
                    if (_triggered <= 0 && diff > _threshold)
                    {
                        _triggered = _playingDuration;
                    }
                }
            }

            // playing sound
            unsafe
            {
                var outLPtr = (int *)outL.ToPointer();
                var outRPtr = (int *)outR.ToPointer();

                for (int i = 0; i < numberFrames; i++)
                {
                    _triggered = Math.Max(0, _triggered - 1);

                    if (_triggered <= 0)
                    {
                        // 0-filling
                        *outLPtr++ = 0;
                        *outRPtr++ = 0;
                    }
                    else
                    {
                        var buf0 = (int *)_buffer[0].Data;
                        var buf1 = (_numberOfChannels == 2) ? (int *)_buffer[1].Data : buf0;

                        if (_currentFrame >= _totalFrames)
                        {
                            _currentFrame = 0;
                        }

                        ++_currentFrame;
                        *outLPtr++ = buf0[_currentFrame];
                        *outRPtr++ = buf1[_currentFrame];
                    }
                }
            }

            return(AudioUnitStatus.NoError);
        }
 protected override void OnInputCompleted(IntPtr audioQueueBuffer, AudioTimeStamp timeStamp, AudioStreamPacketDescription[] packetDescriptions)
 {
     throw new NotImplementedException();
 }
		AudioUnitStatus Render (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// Just return audio buffers from MTAudioProcessingTap.
			MTAudioProcessingTapFlags flags;
			CMTimeRange range;
			nint n;
			var error = (AudioUnitStatus)(int)audioProcessingTap.GetSourceAudio ((nint)numberFrames, data, out flags, out range, out n);
			if (error != AudioUnitStatus.NoError)
				Console.WriteLine ("{0} audioProcessingTap.GetSourceAudio failed", error);
			return error;
		}
Ejemplo n.º 15
0
 AudioUnitStatus ConvertInputRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     renderTimeStamp.SampleTime += numberFrames;
     for (int channel = 0; channel < data.Count; channel++)
     {
         data.SetData(channel, preRenderData [channel]);
     }
     return(AudioUnitStatus.NoError);
 }
		unsafe void TapProcess (MTAudioProcessingTap tap, nint numberFrames, MTAudioProcessingTapFlags flags,
		                        AudioBuffers bufferList,
		                        out nint numberFramesOut,
		                        out MTAudioProcessingTapFlags flagsOut)
		{
			numberFramesOut = 0;
			flagsOut = (MTAudioProcessingTapFlags)0;

			// Skip processing when format not supported.
			if (!context.SupportedTapProcessingFormat) {
				Console.WriteLine ("Unsupported tap processing format.");
				return;
			}

			if (IsBandpassFilterEnabled) {
				// Apply bandpass filter Audio Unit.
				if (context.AudioUnit != null) {
					var audioTimeStamp = new AudioTimeStamp {
						SampleTime = context.SampleCount,
						Flags = AudioTimeStamp.AtsFlags.SampleTimeValid
					};

					var f = (AudioUnitRenderActionFlags)0;
					var status = context.AudioUnit.Render (ref f, audioTimeStamp, 0, (uint)numberFrames, bufferList);
					if (status != AudioUnitStatus.NoError) {
						Console.WriteLine ("AudioUnitRender(): {0}", status);
						return;
					}

					// Increment sample count for audio unit.
					context.SampleCount += numberFrames;

					// Set number of frames out.
					numberFramesOut = numberFrames;
				}
			} else {
				// Get actual audio buffers from MTAudioProcessingTap (AudioUnitRender() will fill bufferListInOut otherwise).
				CMTimeRange tr;
				MTAudioProcessingTapError status = tap.GetSourceAudio (numberFrames, bufferList, out flagsOut, out tr, out numberFramesOut);
				if (status != MTAudioProcessingTapError.None) {
					Console.WriteLine ("MTAudioProcessingTapGetSourceAudio: {0}", status);
					return;
				}
			}

			UpdateVolumes (bufferList, numberFrames);
		}
Ejemplo n.º 17
0
        AudioUnitStatus _audioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            if (_processBlockEventHandler != null)
            {
                if (_processBlockArgs == null ||
                    _processBlockArgs.ChannelCount != _audioFormat.ChannelsPerFrame ||
                    _processBlockArgs.NumberOfFrames != numberFrames)
                {
                    _processBlockArgs = new NativeProcessBlockEventArgs((int)numberFrames, _audioFormat.ChannelsPerFrame);
                }

                _processBlockArgs.AudioBuffers = data;
                _processBlockEventHandler(this, _processBlockArgs);

                //_processBlockArgs.ReturnInterleavedBuffer ();
                _processBlockArgs.ReturnNonInterleavedBuffer();
            }

            return(AudioUnitStatus.NoError);
        }
Ejemplo n.º 18
0
        /// <summary>
        /// Render callback for the output node. Can simulataneously write to a file.
        /// </summary>
        /// <returns>The render delegate.</returns>
        /// <param name="actionFlags">Action flags.</param>
        /// <param name="timeStamp">Time stamp.</param>
        /// <param name="busNumber">Bus number.</param>
        /// <param name="numberFrames">Number frames.</param>
        /// <param name="data">Data.</param>
        AudioUnitStatus OutputRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            // propagate tempo change on start of a new cycle
            if (_tempoChanged)
            {
                PropagateTempoChange(_tempoChangeRatio);

                _tempoChanged = false;
            }

            var e = MixerNode.Render(ref actionFlags, timeStamp, busNumber, numberFrames, data);

            cycle++;

            // check for a queued layer change
            if (Metronome.Instance.NeedToChangeLayer == true)
            {
                Metronome.Instance.CycleToChange     = cycle;
                Metronome.Instance.NeedToChangeLayer = false;
                Metronome.Instance.ChangeLayerTurnstyle.Set();
            }
            else if (Metronome.Instance.NeedToChangeLayer == null)
            {
                // top off the fat forward
                double cycleDiff = cycle - Metronome.Instance.CycleToChange;

                Metronome.Instance.FastForwardChangedLayers(cycleDiff);

                foreach (KeyValuePair <int, Layer> pair in Metronome.Instance.LayersToChange)
                {
                    Layer copy = pair.Value;
                    Layer real = Metronome.Instance.Layers[pair.Key];

                    int  numberRemoved = 0;
                    bool isMuted       = false;
                    // remove old sources
                    foreach (IStreamProvider src in real.GetAllStreams())
                    {
                        RemoveStream(src);
                        src.Dispose();
                        numberRemoved++;
                        isMuted = src.IsMuted;
                    }

                    // transfer sources to real layer
                    real.AudioSources    = copy.AudioSources;
                    real.BaseAudioSource = copy.BaseAudioSource;
                    real.PitchSource     = copy.PitchSource;
                    real.BaseSourceName  = copy.BaseSourceName;
                    real.HasHiHatOpen    = copy.HasHiHatOpen;
                    real.HasHiHatClosed  = copy.HasHiHatClosed;
                    real.Beat            = copy.Beat;

                    foreach (IStreamProvider src in real.GetAllStreams().OrderBy(x => x.Info.HiHatStatus != StreamInfoProvider.HiHatStatuses.Down))
                    {
                        src.IsMuted = isMuted;
                        src.Layer   = real;
                        if (numberRemoved <= 0)
                        {
                            // it crashes if we try to add a rendercallback for preexisting bus
                            Metronome.Instance.AddAudioSource(src);
                        }
                        else
                        {
                            Streams.Add(src);
                        }

                        numberRemoved--;
                    }

                    copy.AudioSources    = null;
                    copy.BaseAudioSource = null;
                    copy.PitchSource     = null;
                    copy.Beat            = null;
                    Metronome.Instance.Layers.Remove(copy);

                    foreach (IStreamProvider src in Streams)
                    {
                        // keep muting consistent when shuffling buffer indexs
                        if (src.IsMuted)
                        {
                            EnableInput(src, false);
                        }
                        else
                        {
                            EnableInput(src, true);
                        }

                        SetPan(src, src.Pan);
                        SetInputVolume(src, (float)src.Volume);
                    }
                }

                Metronome.Instance.LayersToChange.Clear();
                Metronome.Instance.NeedToChangeLayer = false;


                // trigger beat changed event
                AppKit.NSApplication.SharedApplication.BeginInvokeOnMainThread(
                    () => { Metronome.Instance.OnBeatChanged(null); });
            }

            // check if recording to file
            if (_fileRecordingQueued)
            {
                // convert the buffer
                using (AudioBuffers convBuffer = new AudioBuffers(1))
                {
                    convBuffer[0] = new AudioBuffer()
                    {
                        DataByteSize   = data[0].DataByteSize,
                        NumberChannels = 1,
                        Data           = Marshal.AllocHGlobal(sizeof(float) * data[0].DataByteSize)
                    };

                    _converter.ConvertComplexBuffer((int)numberFrames, data, convBuffer);

                    _file.Write(numberFrames, convBuffer);
                }
            }

            return(AudioUnitStatus.OK);
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Renders the mixer node. Orchestrates dynamic changes to tempo and beatcode.
        /// </summary>
        /// <returns>The render delegate.</returns>
        /// <param name="actionFlags">Action flags.</param>
        /// <param name="timeStamp">Time stamp.</param>
        /// <param name="busNumber">Bus number.</param>
        /// <param name="numberFrames">Number frames.</param>
        /// <param name="data">Data.</param>
        unsafe AudioUnitStatus MixerRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            if (busNumber >= Streams.Count)
            {
                // this prevents the buffer from doubling up with unused buses
                return(AudioUnitStatus.InvalidElement);
            }

            var outLeft  = (float *)data[0].Data;
            var outRight = (float *)data[1].Data;

            // if theres a count-off, we read from the countoff source
            if (CountOffSampleDuration > 0)
            {
                // skip all inputs but the last one so that non-count off cycle starts with bus 0
                if (busNumber != Streams.Count - 1)
                {
                    return(AudioUnitStatus.InvalidElement);
                }

                var stream = Streams[(int)busNumber];

                //if (stream.IsMuted)
                //{
                //    EnableInput(stream, true);
                //}

                _countOff.Read(outLeft, outRight, numberFrames);

                CountOffSampleDuration -= numberFrames;

                // set elapsed bpm and cycles to 0
                if (CountOffSampleDuration == 0)
                {
                    Metronome.Instance.ElapsedBpm -= Metronome.Instance.ConvertSamplesToBpm(_countOffTotal);
                    cycle = -1;
                    EnableInput(stream, !stream.IsMuted);
                }

                return(AudioUnitStatus.OK);
            }

            IStreamProvider source = Streams[(int)busNumber];

            source.Read(outLeft, outRight, numberFrames);

            return(AudioUnitStatus.OK);
        }
Ejemplo n.º 20
0
 AudioUnitStatus InputCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, global::AudioUnit.AudioUnit audioUnit)
 {
     inputCallbackEvent.Set();
     return(AudioUnitStatus.NoError);
 }
Ejemplo n.º 21
0
        static AudioUnitStatus RenderCallbackImpl(IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
        {
            GCHandle gch = GCHandle.FromIntPtr(clientData);
            var      au  = (AUGraph)gch.Target;

            RenderDelegate callback;

            if (!au.nodesCallbacks.TryGetValue(busNumber, out callback))
            {
                return(AudioUnitStatus.InvalidParameter);
            }

            using (var buffers = new AudioBuffers(data)) {
                return(callback(actionFlags, timeStamp, busNumber, numberFrames, buffers));
            }
        }
Ejemplo n.º 22
0
        public AudioUnitStatus InternalRenderBlockProc(ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timestamp, uint frameCount, nint outputBusNumber, AudioBuffers outputData, AURenderEventEnumerator realtimeEventListHead, AURenderPullInputBlock pullInputBlock)
        {
            var transportStateFlags = (AUHostTransportStateFlags)0;

            double currentSamplePosition  = 0;
            double cycleStartBeatPosition = 0;
            double cycleEndBeatPosition   = 0;

            var callBack = TransportStateBlock;

            if (callBack != null)
            {
                callBack(ref transportStateFlags, ref currentSamplePosition, ref cycleStartBeatPosition, ref cycleEndBeatPosition);
            }

            var state = Kernel;
            var input = inputBus;

            var             pullFlags = (AudioUnitRenderActionFlags)0;
            AudioUnitStatus err       = input.PullInput(ref pullFlags, timestamp, frameCount, 0, pullInputBlock);

            if (err != AudioUnitStatus.NoError)
            {
                return(err);
            }

            AudioBuffers inAudioBufferList = input.MutableAudioBufferList;

            if (outputData [0].Data == IntPtr.Zero)
            {
                for (int i = 0; i < outputData.Count; i++)
                {
                    outputData.SetData(i, inAudioBufferList [i].Data);
                }
            }

            state.SetBuffers(inAudioBufferList, outputData);
            state.ProcessWithEvents(timestamp, (int)frameCount, realtimeEventListHead);

            return(AudioUnitStatus.NoError);
        }
Ejemplo n.º 23
0
        void StreamDownloaded(IAsyncResult result)
        {
            var  request = result.AsyncState as HttpWebRequest;
            bool pushed  = false;

            try {
                var    response       = request.EndGetResponse(result);
                var    responseStream = response.GetResponseStream();
                Stream inputStream;
                var    buffer = new byte [8192];
                int    l = 0, n;

                InvokeOnMainThread(delegate {
                    viewController.PushViewController(playController, true);
                });

                pushed = true;

                if (saveCopy)
                {
                    inputStream = MakeQueueStream(responseStream);
                }
                else
                {
                    inputStream = responseStream;
                }

                //
                // Create StreamingPlayer, the using statement will automatically
                // force the resources to be disposed and the playback to stop.
                //
                using (player = new StreamingPlayback()){
                    AudioQueueTimeline timeline   = null;
                    double             sampleRate = 0;

                    player.OutputReady += delegate {
                        timeline   = player.OutputQueue.CreateTimeline();
                        sampleRate = player.OutputQueue.SampleRate;
                    };
                    InvokeOnMainThread(delegate {
                        if (updatingTimer != null)
                        {
                            updatingTimer.Invalidate();
                        }

                        updatingTimer = NSTimer.CreateRepeatingScheduledTimer(0.5, delegate {
                            var queue = player.OutputQueue;
                            if (queue == null || timeline == null)
                            {
                                return;
                            }

                            bool disc           = false;
                            AudioTimeStamp time = new AudioTimeStamp();
                            queue.GetCurrentTime(timeline, ref time, ref disc);

                            playbackTime.Text = FormatTime(time.SampleTime / sampleRate);
                        });
                    });
                    while ((n = inputStream.Read(buffer, 0, buffer.Length)) != 0)
                    {
                        l += n;
                        player.ParseBytes(buffer, n, false, l == (int)response.ContentLength);

                        InvokeOnMainThread(delegate {
                            progress.Progress = l / (float)response.ContentLength;
                        });
                    }
                }
            } catch (Exception e) {
                InvokeOnMainThread(delegate {
                    if (pushed)
                    {
                        viewController.PopToRootViewController(true);
                        pushed = false;
                    }
                    status.Text = "Error fetching response stream\n" + e;
                    Console.WriteLine(e);
                });
            }

            //
            // Restore the default AudioSession, this allows the iPhone
            // to go to sleep now that we are done playing the audio
            //
            AudioSession.Category = AudioSessionCategory.MediaPlayback;
            if (pushed)
            {
                viewController.PopToRootViewController(true);
                status.Text = "Finished playback";
            }
        }
		uint TapProc (AudioQueueProcessingTap audioQueueTap, uint inNumberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data)
		{
			AudioQueueProcessingTapFlags sourceFlags;
			uint sourceFrames;

			if (audioQueueTap.GetSourceAudio (inNumberOfFrames, ref timeStamp, out sourceFlags, out sourceFrames, data) != AudioQueueStatus.Ok)
				throw new ApplicationException ();

			for (int channel = 0; channel < data.Count; channel++) {
				preRenderData[channel] = data [channel].Data;
				data.SetData (channel, IntPtr.Zero);
			}

			renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;
			AudioUnitRenderActionFlags actionFlags = 0;

			AudioUnitStatus res = genericOutputUnit.Render (ref actionFlags, renderTimeStamp, 0, inNumberOfFrames, data);
			if (res != AudioUnitStatus.NoError)
				throw new ApplicationException ();

			return sourceFrames;
		}
Ejemplo n.º 25
0
        static AudioUnitStatus RenderCallbackImpl(IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
        {
            GCHandle gch = GCHandle.FromIntPtr(clientData);
            var      au  = (AudioUnit)gch.Target;

            return(au.render(actionFlags, timeStamp, busNumber, numberFrames, new AudioBuffers(data)));
        }
		AudioUnitStatus ConvertInputRenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			renderTimeStamp.SampleTime += numberFrames;
			for (int channel = 0; channel < data.Count; channel++) {
				data.SetData (channel, preRenderData [channel]);
			}
			return AudioUnitStatus.NoError;
		}
Ejemplo n.º 27
0
 static extern int AudioUnitRender(IntPtr inUnit,
                                   ref AudioUnitRenderActionFlags ioActionFlags,
                                   ref AudioTimeStamp inTimeStamp,
                                   int inOutputBusNumber,
                                   int inNumberFrames,
                                   AudioBufferList ioData);
Ejemplo n.º 28
0
 AudioUnitStatus ConvertInputRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     data.SetData(0, preRenderData);
     return(AudioUnitStatus.NoError);
 }
Ejemplo n.º 29
0
 AudioUnitStatus MixerRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     mixerRenderCallbackCount++;
     return(AudioUnitStatus.NoError);
 }
Ejemplo n.º 30
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            MemoryStream ms = new MemoryStream();

            String s = "a000";
            byte[] bufWriter = Encoding.ASCII.GetBytes(s.ToCharArray(), 0, 4);
            ms.Write(bufWriter, 0, 4);

            bufWriter = BitConverter.GetBytes(AudioSessionId);
            if (BitConverter.IsLittleEndian) Array.Reverse(bufWriter);
            ms.Write(bufWriter, 0, 4);

            long time = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;

            //Console.WriteLine ((time - lasttime) + " ms delay");
            lasttime = time;
            bufWriter = BitConverter.GetBytes(time);
            if (BitConverter.IsLittleEndian) Array.Reverse(bufWriter);
            ms.Write(bufWriter, 0, 8);

            var buffer = new AudioBuffer()
                {
                    NumberChannels = 1,
                    DataByteSize = (int)numberFrames * 2,
                    Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
                };

            var bufferList = new AudioBuffers(1);
            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];
            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            ms.Write (send, 0, send.Length);

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
                Console.Write("\\x" + b);
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);

            byte[] sendbuf = ms.ToArray();
            if (sendbuf.Length > 4096) throw new Exception("Packet size too large!");
            Task tk = Task.Factory.StartNew(() =>
                {
                    try
                    {
                        var aSender = audioCaller.BeginSend(sendbuf, sendbuf.Length, null, null);
                        aSender.AsyncWaitHandle.WaitOne(TimeSpan.FromSeconds(3));
                        if (aSender.IsCompleted) audioCaller.EndSend(aSender);
                    }
                    catch
                    {

                    }
                });

            return AudioUnitStatus.OK;
        }
Ejemplo n.º 31
0
 static extern int AudioUnitRender(IntPtr inUnit,
     ref AudioUnitRenderActionFlags ioActionFlags,
     ref AudioTimeStamp inTimeStamp,
     UInt32 inOutputBusNumber,
     UInt32 inNumberFrames,
     AudioBufferList ioData
     );
		private void RepeatingAction (AudioQueueTimeline timeline, double sampleRate)
		{
			var queue = player.OutputQueue;
			if (queue == null || timeline == null)
				return;

			bool disc = false;
			var time = new AudioTimeStamp ();
			queue.GetCurrentTime (timeline, ref time, ref disc);

			playbackTime.Text = FormatTime (time.SampleTime / sampleRate);
		}
Ejemplo n.º 33
0
        unsafe AudioUnitStatus AudioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags,
                                                        AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            _clock.Restart();
            AudioUnitStatus err = AudioUnitStatus.OK;

            if (_audioInputIsAvailable && _numInputChannels > 0)
            {
                err = _audioUnit.Render(ref actionFlags, timeStamp, 1, numberFrames, data);
            }

            var dataPtr = data[0].Data;

            if (_callback != null)
            {
                if (numberFrames > _sampleBuffer.NumSamples)
                {
                    PrepareFloatBuffers((int)numberFrames);
                }

                if (_audioInputIsAvailable && _numInputChannels > 0)
                {
                    var shortData = (short *)dataPtr.ToPointer();
                    if (_numInputChannels >= 2)
                    {
                        float *leftInput  = _inputChannels[0];
                        float *rightInput = _inputChannels[1];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++  = *shortData++ *ShortToFloat;
                            *rightInput++ = *shortData++ *ShortToFloat;
                        }
                    }
                    else
                    {
                        float *leftInput = _inputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++ = *shortData++ *ShortToFloat;
                            ++shortData;
                        }
                    }
                }
                else
                {
                    for (var i = _numInputChannels; --i >= 0;)
                    {
                        _sampleBuffer.ClearChannel(i);
                    }
                }

                _callback.AudioDeviceIOCallback(
                    _inputChannels, _numInputChannels,
                    _outputChannels, _numOutputChannels,
                    (int)numberFrames
                    );

                if (_out != null)
                {
                    byte *bytes = (byte *)_outputChannels[0];
                    for (var i = 0; i < numberFrames * sizeof(float); ++i)
                    {
                        _out.WriteByte(*bytes++);
                    }
                }

                {
                    var    shortData   = (short *)dataPtr.ToPointer();
                    float *leftOutput  = _outputChannels[0];
                    float *rightOutput = _outputChannels[1];
                    if (_numOutputChannels >= 2)
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = (short)(*leftOutput++ *FloatToShort);
                            *shortData++ = (short)(*rightOutput++ *FloatToShort);
                        }
                    }
                    else if (_numOutputChannels == 1)
                    {
                        float *output = _outputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            short sample      = (short)(*output++ *FloatToShort);
                            *     shortData++ = sample;
                            *     shortData++ = sample;
                        }
                    }
                    else
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = 0;
                            *shortData++ = 0;
                        }
                    }
                }
            }
            else
            {
                var shortData = (short *)dataPtr.ToPointer();
                for (var i = 0; i < numberFrames; ++i)
                {
                    *shortData++ = 0;
                    *shortData++ = 0;
                }
            }

            _clock.Stop();

            return(err);
        }
Ejemplo n.º 34
0
        AudioUnitStatus renderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            var err = rioUnit.Render (ref actionFlags, timeStamp, 1, numberFrames, data);

            if (err != AudioUnitStatus.OK) {
                return err;
            }

            if (FFTBufferManager == null)
                return AudioUnitStatus.OK;

            if (FFTBufferManager.NeedsNewAudioData) {
                FFTBufferManager.GrabAudioData (data);
            }

            Silence (data,(int)numberFrames);

            return AudioUnitStatus.OK;
        }
Ejemplo n.º 35
0
		AudioUnitStatus ConvertInputRenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			data.SetData (0, preRenderData);
			return AudioUnitStatus.NoError;
		}
Ejemplo n.º 36
0
		void StreamDownloaded (IAsyncResult result)
		{
			var request = result.AsyncState as HttpWebRequest;
			bool pushed = false;
			try {
				var response = request.EndGetResponse (result);
				var responseStream = response.GetResponseStream ();
				Stream inputStream;
				var buffer = new byte [8192];
				int l = 0, n;
				
				InvokeOnMainThread (delegate {
					viewController.PushViewController (playController, true);
				});
				
				pushed = true;
				
				if (saveCopy)
					inputStream = MakeQueueStream (responseStream);
				else
					inputStream = responseStream;
				
				// 
				// Create StreamingPlayer, the using statement will automatically
				// force the resources to be disposed and the playback to stop.
				//
				using (player = new StreamingPlayback ()){
					AudioQueueTimeline timeline = null;
					double sampleRate = 0;
					
					player.OutputReady += delegate {
						timeline = player.OutputQueue.CreateTimeline ();
						sampleRate = player.OutputQueue.SampleRate;
					};
					InvokeOnMainThread (delegate {
						if (updatingTimer != null)
							updatingTimer.Invalidate ();
								
						updatingTimer = NSTimer.CreateRepeatingScheduledTimer (0.5, delegate {
							var queue = player.OutputQueue;
							if (queue == null || timeline == null)
								return;

							bool disc = false;
							AudioTimeStamp time = new AudioTimeStamp ();
							queue.GetCurrentTime (timeline, ref time, ref disc);
							
							playbackTime.Text = FormatTime (time.SampleTime / sampleRate);
						});
					});
					while ((n = inputStream.Read (buffer, 0, buffer.Length)) != 0){
						l += n;
						player.ParseBytes (buffer, n, false, l == (int)response.ContentLength);
						
						InvokeOnMainThread (delegate {
							progress.Progress = l / (float) response.ContentLength;
						});
					}
					
				}
			} catch (Exception e){
				InvokeOnMainThread (delegate {
					if (pushed){
						viewController.PopToRootViewController (true);
						pushed = false;
					}
					status.Text = "Error fetching response stream\n" + e;
					Console.WriteLine (e);
				});
			}
	
			//
			// Restore the default AudioSession, this allows the iPhone
			// to go to sleep now that we are done playing the audio
			//
			AudioSession.Category = AudioSessionCategory.MediaPlayback;
			if (pushed){
				viewController.PopToRootViewController (true);
				status.Text = "Finished playback";
			}
		}
		unsafe AudioUnitStatus HandleRenderDelegate (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			var sndbuf = soundBuffer [busNumber];

			var sample = sndbuf.SampleNum;      // frame number to start from
			var bufSamples = sndbuf.TotalFrames;  // total number of frames in the sound buffer
			var input = (int*) sndbuf.Data;

			var outA = (int*) data [0].Data; // output audio buffer for L channel
			var outB = (int*) data [1].Data; // output audio buffer for R channel

			// for demonstration purposes we've configured 2 stereo input busses for the mixer unit
			// but only provide a single channel of data from each input bus when asked and silence for the other channel
			// alternating as appropriate when asked to render bus 0 or bus 1's input
			for (var i = 0; i < numberFrames; ++i) {

				if (busNumber == 1) {
					outA [i] = 0;
					outB [i] = input [sample++];
				} else {
					outA [i] = input[sample++];
					outB [i] = 0;
				}

				if (sample > bufSamples) {
					// start over from the beginning of the data, our audio simply loops
					Debug.Print ("Looping data for bus {0} after {1} source frames rendered", busNumber, sample - 1);
					sample = 0;
				}
			}

			// keep track of where we are in the source data buffer
			sndbuf.SampleNum = sample;

			return AudioUnitStatus.OK;
		}
        unsafe AudioUnitStatus HandleRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            var sndbuf = soundBuffer [busNumber];

            var sample     = sndbuf.SampleNum;              // frame number to start from
            var bufSamples = sndbuf.TotalFrames;            // total number of frames in the sound buffer
            var input      = (int *)sndbuf.Data;

            var outA = (int *)data [0].Data;             // output audio buffer for L channel
            var outB = (int *)data [1].Data;             // output audio buffer for R channel

            // for demonstration purposes we've configured 2 stereo input busses for the mixer unit
            // but only provide a single channel of data from each input bus when asked and silence for the other channel
            // alternating as appropriate when asked to render bus 0 or bus 1's input
            for (var i = 0; i < numberFrames; ++i)
            {
                if (busNumber == 1)
                {
                    outA [i] = 0;
                    outB [i] = input [sample++];
                }
                else
                {
                    outA [i] = input[sample++];
                    outB [i] = 0;
                }

                if (sample > bufSamples)
                {
                    // start over from the beginning of the data, our audio simply loops
                    Debug.Print("Looping data for bus {0} after {1} source frames rendered", busNumber, sample - 1);
                    sample = 0;
                }
            }

            // keep track of where we are in the source data buffer
            sndbuf.SampleNum = sample;

            return(AudioUnitStatus.OK);
        }
Ejemplo n.º 39
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            var buffer = new AudioBuffer()
                {
                    NumberChannels = 1,
                    DataByteSize = (int)numberFrames * 2,
                    Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
                };

            var bufferList = new AudioBuffers(1);
            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];
            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            var handler = DataAvailable;
            if (handler != null)
                handler(this, send);

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
                Console.Write("\\x" + b);
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);

            return AudioUnitStatus.OK;
        }
		AudioUnitStatus _audioUnit_RenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// getting microphone input signal
			_audioUnit.Render (ref actionFlags,
                timeStamp,
                1, // Remote input
               	numberFrames,
                data);

			// Getting a pointer to a buffer to be filled
			IntPtr outL = data [0].Data;
			IntPtr outR = data [1].Data;

			// Getting signal level and trigger detection
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				for (int i = 0; i < numberFrames; i++) {
					// LPF
					float diff = Math.Abs (*outLPtr) - _signalLevel;
					if (diff > 0)
						_signalLevel += diff / 1000f;
					else
						_signalLevel += diff / 10000f;
                    
					diff = Math.Abs (diff);
                    
					// sound triger detection
					if (_triggered <= 0 && diff > _threshold) {
						_triggered = _playingDuration;
					}
				}
			}                        

			// playing sound
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				var outRPtr = (int*)outR.ToPointer ();                
                
				for (int i = 0; i < numberFrames; i++) {                    
					_triggered = Math.Max (0, _triggered - 1);

					if (_triggered <= 0) {
						// 0-filling
						*outLPtr++ = 0;
						*outRPtr++ = 0;
					} else {
						var buf0 = (int*)_buffer [0].Data;
						var buf1 = (_numberOfChannels == 2) ? (int*)_buffer [1].Data : buf0;

						if (_currentFrame >= _totalFrames) {
							_currentFrame = 0;
						}
                        
						++_currentFrame;
						*outLPtr++ = buf0 [_currentFrame];
						*outRPtr++ = buf1 [_currentFrame];
					}
				}
			}

			return AudioUnitStatus.NoError;
		}