示例#1
0
        /// <summary>
        /// Начать запись разговора
        /// </summary>
        protected async Task StartCallRecording(string initialChannelId, Guid initialCallId, string extension, ChannelRoleType role, string mainBridgeId, Guid?lineId = null)
        {
            if (!_asteriskOptions.RecordingEnabled)
            {
                return;
            }

            var recordsBridgeId = GetCommonRecordingBridgeId(mainBridgeId);
            var fullAudioRecord = new AudioRecord
            {
                FileName = $"{recordsBridgeId}",
                LineId   = lineId,
                CallId   = null
            };
            await AudioRecordRepository.AddAudioRecord(fullAudioRecord);

            await AriClient.CreateBridge(recordsBridgeId);

            await AddChannelToCommonRecordingBridge(initialChannelId, extension, role, mainBridgeId, initialCallId, lineId);

            var result = await AriClient.StartRecordingBridge(recordsBridgeId);

            if (result.IsFailure)
            {
                Logger.Warning($"StartCallRecording. Failed to start call recording. MainBridgeId: {mainBridgeId}");
                return;
            }

            await CreateSnoopChannelForRecording(initialChannelId, extension, role, false, initialCallId, lineId);

            Logger.Information($"StartRecordingBridge. Bridge recording started. {mainBridgeId}");
        }
        /// <summary>
        /// Stop recorder
        /// </summary>
        public void StopRecording()
        {
            isRecording = false;
            try
            {
                if (audioRecord.State == State.Initialized)
                {
                    // Stop Audio Recorder
                    audioRecord.Stop();
                    audioRecord.Release();
                    audioRecord = null;
                    // Stop thread
                    audioThread.Abort();
                    delegateThreadStart = null;
                    audioThread         = null;
                }

                // Create file path for .wav file
                wavFile = Android.OS.Environment.ExternalStorageDirectory.Path + "/AudioRecorderFile.wav";
                ConvertRawFileToWavFile(rawFile, wavFile);
                // Delete temp file
                new Java.IO.File(rawFile).Delete();
            }
            catch (Exception e)
            {
                throw new System.ArgumentException(e.Message);
            }
        }
示例#3
0
        /// <summary>
        /// Начать запись канала
        /// </summary>
        protected async Task <Result> StartRecordingChannel(string snoopChannelId, Guid callId, Guid?lineId)
        {
            if (!_asteriskOptions.RecordingEnabled)
            {
                return(Result.Failure(ErrorCodes.RecordingError));
            }

            var audioRecord = new AudioRecord
            {
                CallId   = callId,
                LineId   = lineId,
                FileName = snoopChannelId
            };
            await AudioRecordRepository.AddAudioRecord(audioRecord);

            var result = await AriClient.StartRecordingChannel(snoopChannelId);

            if (result.IsFailure)
            {
                Logger.Warning($"StartRecordingChannel. Could not start recording channel. CallId: {callId}");
                return(result);
            }

            Logger.Information($"StartRecordingChannel. Channel recording started. {snoopChannelId}");

            return(Result.Success());
        }
        private async Task RecordAudioAsync()
        {
            wavPath = Path.Combine(audioDir, Guid.NewGuid().ToString() + "_audio.wav");

            byte[] audioBuffer = new byte[8000];

            audioRecord = new AudioRecord(
                AudioSource.Mic,   // Hardware source of recording.
                sampleRate,        // Frequency
                channelIn,         // Mono or stereo
                encoding,          // Audio encoding
                audioBuffer.Length // Length of the audio clip.
                );

            var id = audioRecord.AudioSessionId;

            audioRecord.StartRecording();

            int totalAudioLen = 0;

            isRecording = true;


            using (System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.Create))
                using (BinaryWriter bWriter = new BinaryWriter(outputStream))
                {
                    //init a header with no length - it will be added later
                    WriteWaveFileHeader(bWriter, maxAudioFreamesLength);

                    /// Keep reading the buffer while there is audio input.
                    while (isRecording && totalAudioLen <= maxAudioFreamesLength)
                    {
                        totalAudioLen += await audioRecord.ReadAsync(audioBuffer, 0, audioBuffer.Length);

                        bWriter.Write(audioBuffer);

                        //analysis
                        var intbuffer = ByteArrayTo16Bit(audioBuffer);
                        var min       = intbuffer.Min();
                        var max       = intbuffer.Max();
                        var avg       = intbuffer.Average(x => (double)x);
                        var sos       = intbuffer.Select(x => (long)x)
                                        .Aggregate((prev, next) => prev + next * next);
                        var rms = Math.Sqrt((double)1 / intbuffer.Length * sos);
                        var fft = FFT(intbuffer);
                    }

                    isRecording = false;

                    //write lenght to header
                    outputStream.Close();
                    bWriter.Close();
                }

            audioRecord.Stop();
            audioRecord.Dispose();

            //this file is now fully written and can be sent to server for analysis
            OnAudioReadyForUpload(new AudioUploadEventArgs(DateTime.Now.ToUniversalTime(), wavPath));
        }
        public void StopRecording()
        {
            try
            {
                if (this._audioRecord == null)
                {
                    throw new MicrophoneServiceException("You have to start recording first !");
                }

                if (this.outputFilename == null)
                {
                    throw new MicrophoneServiceException("You have to start recording first !");
                }

                this._audioRecord.Stop();

                this._isRecording = false;
                this._cancellationToken.Cancel();

                this._audioRecord.Release();
                this._audioRecord = null;

                CreateOutputFileFromTempFile();
            }
            catch (Exception ex)
            {
                throw new MicrophoneServiceException(ex.Message);
            }
        }
        public Task <AudioRecordResult> Record(AudioRecordOptions options = null)
        {
            _options = options ?? AudioRecordOptions.Empty;
            _tcs     = new TaskCompletionSource <AudioRecordResult>();

            _bufferSize = AudioRecord.GetMinBufferSize(_options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING) * 3;

            _recorder = new AudioRecord(AudioSource.VoiceRecognition, _options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, _bufferSize);

            if (_recorder.State == State.Initialized)
            {
                _recorder.StartRecording();
            }
            else
            {
                return(Task.FromResult(new AudioRecordResult($"AudioRecord initialisation returned unexpected state ({_recorder.State})")));
            }

            _isRecording  = true;
            _timeoutToken = new CancellationTokenSource();

            Task.Run(() => RecordAudio());
            Task.Run(() => Timeout());

            return(_tcs.Task);
        }
示例#7
0
        //@Override
        //@SuppressLint({ "InlinedApi", "NewApi" })
        protected override void encodeWithMediaCodec()
        {
            int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2;

            ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate);

            mAudioRecord = new AudioRecord(Android.Media.AudioSource.Mic, mQuality.samplingRate, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, bufferSize);
            mMediaCodec  = MediaCodec.CreateEncoderByType("audio/mp4a-latm");
            MediaFormat format = new MediaFormat();

            format.SetString(MediaFormat.KeyMime, "audio/mp4a-latm");
            format.SetInteger(MediaFormat.KeyBitRate, mQuality.bitRate);
            format.SetInteger(MediaFormat.KeyChannelCount, 1);
            format.SetInteger(MediaFormat.KeySampleRate, mQuality.samplingRate);
            format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize);
            mMediaCodec.Configure(format, null, null, MediaCodecConfigFlags.Encode);
            mAudioRecord.StartRecording();
            mMediaCodec.Start();

            MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);

            Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers();

            mThread = new Thread(this);


            mThread.Start();

            // The packetizer encapsulates this stream in an RTP stream and send it over the network
            mPacketizer.setInputStream(inputStream);
            mPacketizer.start();

            mStreaming = true;
        }
示例#8
0
        public void StartRecording()
        {
            System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.Create);
            bWriter     = new BinaryWriter(outputStream);
            audioBuffer = new byte[44100 * 5]; // 44100 sample rate * 10 sek (max time)

            audRecorder = new AudioRecord(AudioSource.Mic,
                                          44100,
                                          ChannelIn.Mono,
                                          Android.Media.Encoding.Pcm16bit,
                                          audioBuffer.Length);

//            long longSampleRate = 44100;
//            int channels = 2;
//            long byteRate = 16*longSampleRate*channels/8;
//
//            long totalAudioLen = audioBuffer.Length;
//            long totalDataLen = totalAudioLen + 36;
//
//            WriteWaveFileHeader(bWriter,
//                totalAudioLen,
//                totalDataLen,
//                longSampleRate,
//                channels,
//                byteRate);

            IsRecording = true;
            audRecorder.StartRecording();

            SaveBinaryAudio(outputStream);
        }
示例#9
0
 /// <summary>
 /// 开始录音
 /// </summary>
 public void BeginRecording()
 {
     // 伴奏开始
     accompany.Play();
     // 打开麦克风开始录音
     AudioRecord.BeginRecording(recording, duration, loop, frequency);
 }
示例#10
0
        public Task <bool> BeginCaptureAudio()
        {
            if (_recorder != null)
            {
                _recorder.Release();
                _recorder.Dispose();
            }

            _audioCaptureStream = new MemoryStream();
            MediaHelper.WriteWavHeader(_audioCaptureStream, MediaHelper.DefaultAudioSamplingRate);

            _recorder = new AudioRecord(AudioSource.Mic, MediaHelper.DefaultAudioSamplingRate, ChannelIn.Mono, Encoding.Pcm16bit, _audioBuffer.Length);

            if (_recorder.State != State.Initialized)
            {
                _recorder = null;
                return(Task.FromResult(false));
            }

            _recorder.StartRecording();
            _trimAudioZeros = true;

            ReadAudioBufferAsync();
            return(Task.FromResult(true));
        }
        private void StartRecording()
        {
            WvlLogger.Log(LogType.TraceAll, "StartRecording()");

            //recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
            // RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize);

            recorder = new AudioRecord(AudioSource.Mic, RECORDER_SAMPLERATE, (ChannelIn)RECORDER_CHANNELS, (Android.Media.Encoding)RECORDER_AUDIO_ENCODING, bufferSize);

            int i = (int)recorder.State;

            if (i == 1)
            {
                recorder.StartRecording();
            }

            isRecording = true;

            /*
             * recordingThread = new Thread(new Runnable() {
             * @Override
             * public void run()
             * {
             *  writeAudioDataToFile();
             * }
             * },"AudioRecorder Thread");
             *      recordingThread.start();
             */

            recordingThread = new System.Threading.Thread(new ThreadStart(
                                                              WriteAudioDataToFile
                                                              ));
            recordingThread.Start();
        }
示例#12
0
        public Stream EndCaptureAudio()
        {
            if (_recorder != null)
            {
                var read   = _recorder.Read(_audioBuffer, 0, _audioBuffer.Length);
                var offset = TrimAudioZeros(read);

                _recorder.Stop();

                var audioStream = _audioCaptureStream;
                _audioCaptureStream = null;

                if (read > offset)
                {
                    audioStream.Write(_audioBuffer, offset, read - offset);
                }

                _recorder.Release();
                _recorder.Dispose();
                _recorder = null;

                MediaHelper.UpdateWavHeader(audioStream);

                audioStream.Seek(0, SeekOrigin.Begin);
                return(audioStream);
            }
            return(null);
        }
示例#13
0
 public void onPeriodicNotification(AudioRecord recorder)
 {
     audioRecorder.read(buffer, 0, buffer.length); // Fill buffer
     try {
         randomAccessWriter.write(buffer);         // Write buffer to file
         payloadSize += buffer.length;
         if (bSamples == 16)
         {
             for (int i = 0; i < buffer.length / 2; i++) // 16bit sample size
             {
                 short curSample = getShort(buffer[i * 2], buffer[i * 2 + 1]);
                 if (curSample > cAmplitude) // Check amplitude
                 {
                     cAmplitude = curSample;
                 }
             }
         }
         else // 8bit sample size
         {
             for (int i = 0; i < buffer.length; i++)
             {
                 if (buffer[i] > cAmplitude) // Check amplitude
                 {
                     cAmplitude = buffer[i];
                 }
             }
         }
     } catch (IOException e) {
         Log.e(ExtAudioRecorder.class.getName(), "Error occured in updateListener, recording is aborted");
示例#14
0
 public Microphone(int bufferSize = -1)
 {
     SupportedSampleRates = (new[] { 8000, 11025, 16000, 22050, 44100 })
                            .Where(rate => AudioRecord.GetMinBufferSize(rate, ChannelIn.Mono, Encoding.Pcm16bit) > 0)
                            .ToList();
     _bufferSize = bufferSize;
 }
示例#15
0
    public void start()
    {
        if (running)
        {
            Logging.warn("Audio recorder is already running.");
            return;
        }
        stopRecording = false;
        running       = true;

        bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, Encoding.Pcm16bit) * 10;

        audioRecorder = new AudioRecord(
            // Hardware source of recording.
            AudioSource.Mic,
            // Frequency
            44100,
            // Mono or stereo
            ChannelIn.Mono,
            // Audio encoding
            Encoding.Pcm16bit,
            // Length of the audio clip.
            bufferSize
            );


        audioRecorder.StartRecording();

        Thread recordingThread = new Thread(readLoop);

        recordingThread.Start();
    }
示例#16
0
        protected int getMinBufferSize()
        {
            int bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE,
                                                          CHANNEL, FORMAT) * 10;

            return(bufferSize);
        }
示例#17
0
        public void Stop()
        {
            lock (stateLocker)
            {
                if (recorder != null)
                {
                    if (recorder.RecordingState == RecordState.Recording)
                    {
                        recorder.Stop();
                        recorder.Release();
                        recorder        = null;
                        recordingThread = null;
                    }
                }

                if (player != null)
                {
                    if (player.PlayState == PlayState.Playing)
                    {
                        player.Stop();
                        player.Release();
                        player = null;
                    }
                }

                OnMessage?.Invoke(this, "Stopped");
            }
        }
        protected void StartRecorder()
        {
            endRecording = false;
            isRecording  = true;

            audioBuffer = new Byte[100000];
            audioRecord = new AudioRecord(
                // Hardware source of recording.
                AudioSource.Mic,
                // Frequency
                11025,
                // Mono or stereo
                ChannelIn.Mono,
                // Audio encoding
                Android.Media.Encoding.Pcm16bit,
                // Length of the audio clip.
                audioBuffer.Length
                );

            audioRecord.StartRecording();

            // Off line this so that we do not block the UI thread.
            Thread thread = new Thread(new ThreadStart(ReadAudio));

            thread.Start();
        }
示例#19
0
    private void initRecorder()
    {
        Encoding encoding = Encoding.Pcm16bit;

        shortsBuffer = new short[bufferSize];
        buffer       = new byte[bufferSize];

        audioRecorder = new AudioRecord(
            // Hardware source of recording.
            AudioSource.VoiceCommunication,
            // Frequency
            sampleRate,
            // Mono or stereo
            ChannelIn.Mono,
            // Audio encoding
            encoding,
            // Length of the audio clip.
            bufferSize * 5
            );
        audioRecorder.StartRecording();

        if (AcousticEchoCanceler.IsAvailable)
        {
            echoCanceller = AcousticEchoCanceler.Create(audioRecorder.AudioSessionId);
        }
        if (NoiseSuppressor.IsAvailable)
        {
            noiseSuppressor = NoiseSuppressor.Create(audioRecorder.AudioSessionId);
        }
    }
        public void StartRecord()
        {
            if (audioRecord == null)
            {
                audioRecord = new AudioRecord(AudioSource.Mic, 44100, ChannelIn.Mono, Encoding.Pcm16bit, 2048 * sizeof(byte));
                if (audioRecord.State != State.Initialized)
                {
                    throw new InvalidOperationException("This device doesn't support AudioRecord");
                }
            }

            //audioRecord.SetRecordPositionUpdateListener()

            audioRecord.StartRecording();

            while (audioRecord.RecordingState == RecordState.Recording)
            {
                try
                {
                    OnNext();
                }
                catch (Exception ex)
                {
                    throw ex;
                }
            }
        }
示例#21
0
        public void Run()
        {
            int len = 0, bufferIndex = 0;

            try
            {
                Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers();
                int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2;
                while (!Thread.Interrupted())
                {
                    bufferIndex = mMediaCodec.DequeueInputBuffer(10000);
                    if (bufferIndex >= 0)
                    {
                        inputBuffers[bufferIndex].Clear();
                        len = mAudioRecord.Read(inputBuffers[bufferIndex], bufferSize);
                        if ((len == (int)RecordStatus.ErrorInvalidOperation) || (len == (int)RecordStatus.ErrorBadValue))
                        {
                            Log.Error(TAG, "An error occured with the AudioRecord API !");
                        }
                        else
                        {
                            //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity());
                            mMediaCodec.QueueInputBuffer(bufferIndex, 0, len, Java.Lang.JavaSystem.NanoTime() / 1000, 0);
                        }
                    }
                }
            }
            catch (RuntimeException e)
            {
                e.PrintStackTrace();
            }
        }
示例#22
0
    void readLoop()
    {
        byte[] buffer = new byte[bufferSize];
        while (!stopRecording)
        {
            try
            {
                int num_bytes = audioRecorder.Read(buffer, 0, buffer.Length);

                byte[] data_to_send = new byte[num_bytes];
                Array.Copy(buffer, data_to_send, num_bytes);

                Task.Run(() =>
                {
                    OnSoundDataReceived(data_to_send);
                });
            }
            catch (Exception e)
            {
                Logging.error("Exception occured while recording audio stream: " + e);
                break;
            }
            Thread.Sleep(10);
        }
        audioRecorder.Stop();
        audioRecorder.Release();
        audioRecorder.Dispose();
        audioRecorder = null;
        running       = false;
    }
示例#23
0
        protected async Task StartRecorderAsync()
        {
            endRecording = false;
            isRecording  = true;

            RaiseRecordingStateChangedEvent();

            audioBuffer = new Byte[100000];
            audioRecord = new AudioRecord(
                // Hardware source of recording.
                AudioSource.Mic,
                // Frequency
                11025,
                // Mono or stereo
                ChannelIn.Mono,
                // Audio encoding
                Android.Media.Encoding.Pcm16bit,
                // Length of the audio clip.
                audioBuffer.Length
                );

            audioRecord.StartRecording();

            // Off line this so that we do not block the UI thread.
            await ReadAudioAsync();
        }
        public ChatRoomViewModel(MainViewModel mainVM, string ip, int remotePort, Server server)
        {
            this.mainVM = mainVM;
            this.server = server;
            uniqueID    = (int)(GetHashCode() & 0xFFFFFFFC);

            myModel = new UserModel(uniqueID, mainVM.Name);
            client  = new Client(ip, remotePort, uniqueID, this);
            client.SendData(ConvertClass.ObjectToByteArray(myModel), 0, uniqueID + 0);
            users            = new List <UserModel>();
            webcam           = new Webcam((new FilterInfoCollection(FilterCategory.VideoInputDevice))[0]);
            webcam.NewFrame += (sender, e) =>
            {
                if (myModel.IsScreenDemonstration != true)
                {
                    client.SendData(ConvertClass.ConvertBitmapToByte((Bitmap)e.Frame.Clone()), 0, uniqueID + 1);
                }
            };
            audioRecord = new AudioRecord(0);
            audioRecord.DataAvailable += (sender, e) =>
            {
                client.SendData(e.Buffer, 0, uniqueID + 2);
            };
            demonstration           = new ScreenDemonstration();
            demonstration.NewFrame += (sender, e) =>
            {
                client.SendData(ConvertClass.ConvertBitmapToByte((Bitmap)e.Frame.Clone()), 0, uniqueID + 1);
            };
        }
示例#25
0
        private void InitialiseAudioRecording()
        {
            audioRecorder = FindAudioRecordNew();

            audioBuffer = new byte[audioRecorder.BufferSizeInFrames];
            audioRecorder.StartRecording();
        }
示例#26
0
文件: Audio.cs 项目: s1080170/android
        /*********************************************************************************
        *
        *
        *********************************************************************************/
        public void ButtonRec_Click(object sender, EventArgs e)
        {
            Int32 bufferSize = AudioRecord.GetMinBufferSize(mSamplingRate, ChannelIn.Mono, mFormat);

            System.Diagnostics.Debug.WriteLine("AudioRecord : GetMinBufferSize={0}", bufferSize);

            RecordBuffer.Instance.Frames = mFrameSize;

            mAudioRecord = new AudioRecord(
                //AudioSource.Default,
                //AudioSource.Camcorder,
                AudioSource.Mic,
                //AudioSource.VoiceCommunication,
                //AudioSource.VoiceRecognition,
                //AudioSource.VoiceUplink,
                mSamplingRate,
                ChannelIn.Mono,
                mFormat,
                bufferSize);

            // 音声データを幾つずつ処理するか( = 1フレームのデータの数)
            mAudioRecord.SetPositionNotificationPeriod(RecordBuffer.Instance.Frames);

            // コールバックを指定
            mAudioRecord.SetRecordPositionUpdateListener(new OnRecordPositionUpdateListener());

            mAudioRecord.StartRecording();

            Byte[] dummy = new Byte[1];
            mAudioRecord.Read(dummy, 0, dummy.Length);
        }
示例#27
0
 private int determineMinimumBufferSize(int sampleRate, Encoding encoding)
 {
     /*int minBufferSize =
      * AudioRecord.GetMinBufferSize(sampleRate,
      * AudioFormat.CHANNEL_IN_MONO, encoding);
      * return minBufferSize;*/
     return(AudioRecord.GetMinBufferSize(sampleRate, ChannelIn.Mono, encoding));
 }
示例#28
0
        private int GetRate()
        {
            var rate = new int[] { 4000, 8000, 11025, 16000, 22050, 44100 }
            .Where(k => AudioRecord.GetMinBufferSize(k, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit) != -2)
            .Last();

            return(rate);
        }
示例#29
0
 public Task Stop()
 {
     return(Task.Run(() =>
     {
         this.audioSource.Stop();
         this.audioSource = null;
     }));
 }
示例#30
0
        private static void Record_DataReceived(AudioRecord record, byte[] audioData)
        {
            List <double> db;

            Audio.CalculateDb(audioData, 2, PCMFormats.S16_LE, out db);

            Console.WriteLine("左声道 = {0}%, 右声道 = {1}%", db[0], db[1]);
        }
		public override bool initCapturer()
		{

			// get the minimum buffer size that can be used
			int minRecBufSize = AudioRecord.getMinBufferSize(m_captureSettings.SampleRate, NUM_CHANNELS_CAPTURING == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT);

			// double size to be more safe
			int recBufSize = minRecBufSize * 2;

			// release the object
			if (m_audioRecord != null)
			{
				m_audioRecord.release();
				m_audioRecord = null;
			}

			try
			{
				m_audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, m_captureSettings.SampleRate, NUM_CHANNELS_CAPTURING == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, recBufSize);

			}
			catch (Exception e)
			{
				Log.e(LOG_TAG, e.Message);
				return false;
			}

			// check that the audioRecord is ready to be used
			if (m_audioRecord.State != AudioRecord.STATE_INITIALIZED)
			{
				Log.i(LOG_TAG, "Audio capture is not initialized " + m_captureSettings.SampleRate);

				return false;
			}

			m_shutdownCaptureThread = false;
			(new Thread(m_captureThread)).Start();

			return true;
		}
		public override bool destroyCapturer()
		{
			m_captureLock.@lock();
			// release the object
			m_audioRecord.release();
			m_audioRecord = null;
			m_shutdownCaptureThread = true;
			m_captureEvent.signal();

			m_captureLock.unlock();
			return true;
		}