void PrepareAudioQueue(MonoTouch.CoreFoundation.CFUrl url)
        {
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _samplingRate,
                Format = AudioFormatType.LinearPCM,
                FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsBigEndian | AudioFormatFlags.LinearPCMIsPacked,
                FramesPerPacket = 1,
                ChannelsPerFrame = 1, // monoral
                BitsPerChannel = 16, // 16-bit
                BytesPerPacket = 2,
                BytesPerFrame = 2,
                Reserved = 0
            };

            _audioFile = AudioFile.Create(url, AudioFileType.AIFF, audioFormat, AudioFileFlags.EraseFlags);
            
            _queue = new InputAudioQueue(audioFormat);
            _queue.InputCompleted += new EventHandler<InputCompletedEventArgs>(_queue_InputCompleted);

            _startingPacketCount = 0;
            _numPacketsToWrite = 1024;
            _bufferByteSize = (int)(_numPacketsToWrite * audioFormat.BytesPerPacket);

            // preparing queue buffer
            IntPtr bufferPtr;
            for (int index = 0; index < 3; index++)
            {
                //_queue.AllocateBuffer(_bufferByteSize, out bufferPtr);
                _queue.AllocateBufferWithPacketDescriptors(_bufferByteSize, _numPacketsToWrite, out bufferPtr);
                _queue.EnqueueBuffer(bufferPtr, _bufferByteSize, null);
            }
        }
        public void StartRecording(Action <byte[]> callback)
        {
            if (IsRecording)
            {
                return;              // already running dont start again
            }
            IsRecording = true;

            var audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate       = 11025,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = 2,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2,
                Reserved         = 0
            };

            audioQueue = new InputAudioQueue(audioFormat);
            audioQueue.InputCompleted += (sender, e) =>
            {
                var buffer = (AudioQueueBuffer)System.Runtime.InteropServices.Marshal.PtrToStructure(e.IntPtrBuffer, typeof(AudioQueueBuffer));
                var send   = new byte[buffer.AudioDataByteSize];
                System.Runtime.InteropServices.Marshal.Copy(buffer.AudioData, send, 0, (int)buffer.AudioDataByteSize);
                callback(send);
            };

            var bufferByteSize = bufferLength * audioFormat.BytesPerPacket;

            IntPtr bufferPtr;

            for (var index = 0; index < 3; index++)
            {
                audioQueue.AllocateBufferWithPacketDescriptors(bufferByteSize, this.bufferLength, out bufferPtr);
                audioQueue.EnqueueBuffer(bufferPtr, bufferByteSize, null);
            }

            audioQueue.Start();
        }
Exemple #3
0
        /// <summary>
        /// Stops the audio stream.
        /// </summary>
        public Task Stop()
        {
            audioQueue.InputCompleted -= QueueInputCompleted;

            var result = audioQueue.Stop(true);

            audioQueue.Dispose();
            audioQueue = null;

            if (result == AudioQueueStatus.Ok)
            {
                OnActiveChanged?.Invoke(this, false);
            }
            else
            {
                System.Diagnostics.Debug.WriteLine("AudioStream.Stop() :: audioQueue.Stop returned non OK result: {0}", result);
            }

            return(Task.FromResult(true));
        }
Exemple #4
0
        public void autoStart()
        {
            AudioStreamBasicDescription basic = new AudioStreamBasicDescription();

            basic.SampleRate = 44100;

            basic.Format      = AudioFormatType.LinearPCM;
            basic.FormatFlags = AudioFormatFlags.LinearPCMIsBigEndian |
                                AudioFormatFlags.LinearPCMIsSignedInteger |
                                AudioFormatFlags.LinearPCMIsPacked;
            basic.BytesPerPacket   = 2;
            basic.BytesPerFrame    = 2;
            basic.FramesPerPacket  = 1;
            basic.ChannelsPerFrame = 1;
            basic.BitsPerChannel   = 16;

            mAudioQueue = new InputAudioQueue(basic);
            mAudioQueue.Start();
            mAudioQueue.EnableLevelMetering = true;

            started = true;
        }
Exemple #5
0
        /// <summary>
        /// Starts the recording.
        /// </summary>
        /// <param name="rate">The rate.</param>
        private void StartRecording(int rate)
        {
            if (Active)
            {
                Clear();
            }

            SampleRate = rate;

            var audioFormat = new AudioStreamBasicDescription
            {
                SampleRate  = SampleRate,
                Format      = AudioFormatType.LinearPCM,
                FormatFlags =
                    AudioFormatFlags.LinearPCMIsSignedInteger
                    | AudioFormatFlags.LinearPCMIsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = BitsPerSample,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2,
                Reserved         = 0
            };

            _audioQueue = new InputAudioQueue(audioFormat);
            _audioQueue.InputCompleted += QueueInputCompleted;

            var bufferByteSize = _bufferSize * audioFormat.BytesPerPacket;

            IntPtr bufferPtr;

            for (var index = 0; index < 3; index++)
            {
                _audioQueue.AllocateBufferWithPacketDescriptors(bufferByteSize, _bufferSize, out bufferPtr);
                _audioQueue.EnqueueBuffer(bufferPtr, bufferByteSize, null);
            }

            _audioQueue.Start();
        }
		/// <summary>
		/// Starts the recording.
		/// </summary>
		/// <param name="rate">The rate.</param>
		private void StartRecording(int rate)
		{
			if (Active)
			{
				Clear();
			}

			SampleRate = rate;

			var audioFormat = new AudioStreamBasicDescription
								  {
									  SampleRate = SampleRate,
									  Format = AudioFormatType.LinearPCM,
									  FormatFlags =
										  AudioFormatFlags.LinearPCMIsSignedInteger
										  | AudioFormatFlags.LinearPCMIsPacked,
									  FramesPerPacket = 1,
									  ChannelsPerFrame = 1,
									  BitsPerChannel = BitsPerSample,
									  BytesPerPacket = 2,
									  BytesPerFrame = 2,
									  Reserved = 0
								  };

			_audioQueue = new InputAudioQueue(audioFormat);
			_audioQueue.InputCompleted += QueueInputCompleted;

			var bufferByteSize = _bufferSize * audioFormat.BytesPerPacket;

			IntPtr bufferPtr;
			for (var index = 0; index < 3; index++)
			{
				_audioQueue.AllocateBufferWithPacketDescriptors(bufferByteSize, _bufferSize, out bufferPtr);
				_audioQueue.EnqueueBuffer(bufferPtr, bufferByteSize, null);
			}

			_audioQueue.Start();
		}
Exemple #7
0
        public void ProcessingTap()
        {
            var aq = new InputAudioQueue(AudioStreamBasicDescription.CreateLinearPCM());
            AudioQueueStatus ret;
            bool             called = false;

            using (var tap = aq.CreateProcessingTap(
                       delegate(AudioQueueProcessingTap audioQueueTap, uint numberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data) {
                called = true;
                return(33);
            }, AudioQueueProcessingTapFlags.PreEffects, out ret)) {
                Assert.AreEqual(AudioQueueStatus.Ok, ret, "#1");

                unsafe {
                    AudioQueueBuffer *buffer;
                    Assert.AreEqual(AudioQueueStatus.Ok, aq.AllocateBuffer(5000, out buffer), "#2");
                    Assert.AreEqual(AudioQueueStatus.Ok, aq.EnqueueBuffer(buffer), "#3");
                    //Assert.AreEqual (AudioQueueStatus.Ok, aq.Start (), "#4");
                }
            }

            //Assert.That (called, Is.True, "#10");
        }
        void InitAudioQueue()
        {
            // create our audio queue & configure buffers
            var audioFormat = AudioStreamBasicDescription.CreateLinearPCM(SampleRate, (uint)ChannelCount, (uint)BitsPerSample);

            audioQueue = new InputAudioQueue(audioFormat);
            audioQueue.InputCompleted += QueueInputCompleted;

            // calculate our buffer size and make sure it's not too big
            var bufferByteSize = (int)(TargetMeasurementTime / 1000F /*ms to sec*/ * SampleRate * audioFormat.BytesPerPacket);

            bufferByteSize = bufferByteSize < MaxBufferSize ? bufferByteSize : MaxBufferSize;

            for (var index = 0; index < CountAudioBuffers; index++)
            {
                var bufferPtr = IntPtr.Zero;

                BufferOperation(() => audioQueue.AllocateBuffer(bufferByteSize, out bufferPtr), () =>
                {
                    BufferOperation(() => audioQueue.EnqueueBuffer(bufferPtr, bufferByteSize, null), () => Debug.WriteLine("AudioQueue buffer enqueued :: {0} of {1}", index + 1, CountAudioBuffers));
                });
            }
        }
Exemple #9
0
        private void Init()
        {
            var audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = this.SampleRate,
                Format = AudioFormatType.LinearPCM,
                FormatFlags = AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked,
                FramesPerPacket = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel = this.BitsPerSample,
                BytesPerPacket = 2,
                BytesPerFrame = 2,
                Reserved = 0
            };

            audioQueue = new InputAudioQueue(audioFormat);
            audioQueue.InputCompleted += QueueInputCompleted;

            var bufferByteSize = this.bufferSize * audioFormat.BytesPerPacket;

            IntPtr bufferPtr;
            for (var index = 0; index < 3; index++)
            {
                audioQueue.AllocateBufferWithPacketDescriptors(bufferByteSize, this.bufferSize, out bufferPtr);
                audioQueue.EnqueueBuffer(bufferPtr, bufferByteSize, null);
            }
        }
		/// <summary>
		/// Clears this instance.
		/// </summary>
		private void Clear()
		{
			if (_audioQueue != null)
			{
				_audioQueue.Stop(true);
				_audioQueue.InputCompleted -= QueueInputCompleted;
				_audioQueue.Dispose();
				_audioQueue = null;
			}
		}
        public void Dispose()
        {
            _isRecording = false; 

            _queue.Stop(true);
            _audioFile.Dispose();
            _queue.Dispose();

            _queue = null;
            _audioFile = null;
        }
Exemple #12
0
        private void startTalking(UdpClient audioCaller)
        {
            //Stop old recording session

            //Generate new WaveFormat
            //    recorder.WaveFormat = new WaveFormat(16000, 16, 1);
            //    recorder.BufferMilliseconds = 50;
            //    recorder.DataAvailable += SendAudio; //Add event to SendAudio

            recorder = new InputAudioQueue (playerFormat);

            for (int i = 0; i < 3; i++) {
                IntPtr aBUff;
                recorder.AllocateBuffer (1280, out aBUff);
                recorder.EnqueueBuffer (aBUff, 1280, null);
            }
            isTalking = true;
            recorder.InputCompleted += SendAudio;
            recorder.Start ();
        }