public CloneableAudioRecord([GeneratedEnum] AudioSource audioSource,
                             int sampleRateInHz,
                             [GeneratedEnum] ChannelIn channelConfig,
                             [GeneratedEnum] Android.Media.Encoding audioFormat,
                             int bufferSizeInBytes) : base(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes)
 {
 }
        public string Prepare()
        {
            ChannelIn ch = ChannelIn.Mono;

            if (CHANNELS == 2)
            {
                ch = ChannelIn.Stereo;
            }

            bufferSize = AudioRecord.GetMinBufferSize
                             (SAMPLING_RATE, ch, Android.Media.Encoding.Pcm16bit) * 3;

            var recordingId = Guid.NewGuid().ToString();
            var fileName    = $"{recordingId}.{FILE_EXTENSION}";

            endRecording = false;

            audioBuffer = new System.Byte[bufferSize];

            try
            {
                filePath = GetFullPathNameForRecording(fileName);

                audioRecord = new AudioRecord(
                    // Hardware source of recording.
                    AudioSource.Mic,
                    // Frequency
                    SAMPLING_RATE,
                    // Mono or stereo
                    ch,
                    // Audio encoding
                    Android.Media.Encoding.Pcm16bit,
                    // Length of the audio clip.
                    audioBuffer.Length
                    );

                output = new AudioFile()
                {
                    FileName         = fileName,
                    BitDepth         = BIT_RATE,
                    SampleRate       = SAMPLING_RATE,
                    NumberOfChannels = CHANNELS,
                    ContentType      = MIME_TYPE,
                };

                currentState = State.Prepared;
                return(recordingId);
            }
            catch (IllegalStateException e)
            {
                throw new RecordingException(e.ToString());
            }
        }
예제 #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioStream"/> class.
        /// </summary>
        /// <param name="sampleRate">Sample rate.</param>
        /// <param name="channels">The <see cref="ChannelIn"/> value representing the number of channels to record.</param>
        /// <param name="audioFormat">The format of the recorded audio.</param>
        public AudioStream(int sampleRate = 44100, ChannelIn channels = ChannelIn.Mono, Encoding audioFormat = Encoding.Pcm16bit)
        {
            bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channels, audioFormat);

            if (bufferSize < 0)
            {
                throw new Exception("Invalid buffer size calculated; audio settings used may not be supported on this device");
            }

            SampleRate       = sampleRate;
            this.channels    = channels;
            this.audioFormat = audioFormat;
        }
        public static AudioRecord FindAudioRecord(ref int sampleRate, ref Android.Media.Encoding audioFormat, ref ChannelIn channelConfig, ref int bufferSize)
        {
            foreach (int sr in _sampleRates)
            {
                foreach (var af in new Android.Media.Encoding[] { Android.Media.Encoding.Pcm16bit, Android.Media.Encoding.Pcm8bit })
                {
                    foreach (var cc in new ChannelIn[] { ChannelIn.Stereo, ChannelIn.Mono })
                    {
                        try
                        {
                            //                            Log.Debug(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
                            //                                + channelConfig);
                            int bs = AudioRecord.GetMinBufferSize(sr, cc, af);

                            if (bs > 0)
                            {
                                // check if we can instantiate and have a success
                                AudioRecord recorder = new AudioRecord(AudioSource.Default, sr, cc, af, bs);

                                if (recorder.State == State.Initialized)
                                {
                                    bufferSize = bs;
                                    sampleRate = sr;
                                    audioFormat = af;
                                    channelConfig = cc;

                                    return recorder;
                                }      
                            }
                        }
                        catch (Exception e)
                        {
                            //                            Log.e(C.TAG, rate + "Exception, keep trying.", e);
                        }
                    }
                }
            }
            return null;
        }
        public static Supermortal.Common.PCL.Enums.ChannelConfiguration AndroidChannelConfigurationToChannelConfiguration(ChannelIn channelConfig)
        {
            if (channelConfig == ChannelIn.Mono)
                return Supermortal.Common.PCL.Enums.ChannelConfiguration.Mono;
            if (channelConfig == ChannelIn.Stereo)
                return Supermortal.Common.PCL.Enums.ChannelConfiguration.Stereo;

            return Supermortal.Common.PCL.Enums.ChannelConfiguration.Unknown;
        }
예제 #6
0
        /// <summary>
        /// Starts the recording process.
        /// </summary>
        private async Task StartRecording()
        {
            if (isRecording)
            {
                // Already recording
                return;
            }
            else
            {
                #region UWP
#if NETFX_CORE
                // Prepare MediaRecorder and encoding profile
                await SetupRecording();

                var profile = MediaEncodingProfile.CreateWav(AudioEncodingQuality.Auto);
                profile.Audio = AudioEncodingProperties.CreatePcm((uint)Parameters.SamplingRate, (uint)Parameters.Channels, 16);

                // Start recording
                isRecording = true;
                await audioCapture.StartRecordToStreamAsync(profile, buffer);

                return;
#endif
                #endregion
                #region ANDROID
#if __ANDROID__
                // Setup recorder
                ChannelIn channels = Parameters.Channels == 1 ? ChannelIn.Mono : ChannelIn.Stereo;
                recorder = new AudioRecord(
                    AudioSource.Mic,
                    (int)Parameters.SamplingRate,
                    channels,
                    Android.Media.Encoding.Pcm16bit,
                    bufferLimit
                    );


                // Start recording
                isRecording = true;
                int totalBytesRead = 0;
                buffer = new byte[bufferLimit];

                // lock buffer so that no other thread can acces the buffer (ie. replay while recording)
                // and create inconsistent audio data
                lock (bufferLock)
                {
                    recorder.StartRecording();
                    // Record audio until buffer is full
                    while (totalBytesRead < bufferLimit)
                    {
                        try
                        {
                            totalBytesRead = recorder.Read(buffer, 0, bufferLimit);
                            if (totalBytesRead < 0)
                            {
                                throw new Exception(String.Format("Exception code: {0}", totalBytesRead));
                            }
                        }
                        catch (Exception e)
                        {
                            // Invalidate audio buffer
                            buffer = null;
                            break;
                        }
                    }
                }
#endif
                #endregion
            }
        }