示例#1
0
    private int lastStoryTagTurn = -1; // Used for ensuring only 1 Story Tag is added per turn

    void Awake()
    {
        print("GameManager Awake");

        if (instance != null && instance != this)
        {
            print("Destroying duplicate GameManager");
            Destroy(gameObject);
            return;
        }
        instance = this;

        turnsPlayed          = -1;
        currentRound         = 0;
        storyTagNegativeIter = 0;
        storyTagPositiveIter = 0;

        audioRecorder = new AudioRecorder();
        playerList    = new List <NetworkPlayerScript>();

        story = new Story();

        if (isStoryView)
        {
            isActivePlayer = false;
        }

        LoadScene(SceneConstants.OFFLINE); // OFFLINE currently same as START
    }
示例#2
0
 public AudioService(IOptionsService optionsService)
 {
     _optionsService = optionsService;
     _audioRecorder  = new AudioRecorder();
     _audioRecorder.RecordingStatusChangeEvent += AudioRecorderOnRecordingStatusChangeHandler;
     _audioRecorder.ProgressEvent += AudioRecorderOnProgressHandler;
 }
示例#3
0
        public override View Draw(FormMode mode, NetworkMode network)
        {
            recorder      = new AudioRecorder(this);
            this.XControl = recorder;

            return(base.Draw(mode, network));
        }
        private void Recorder_Stopped(IAudioRecorder arg1, ErrorEventArgs arg2)
        {
            Recorder.Stopped -= Recorder_Stopped;
            Directory.CreateDirectory(System.IO.Path.GetDirectoryName(PathToCurrentAudioSegment));             // make sure audio directory exists
            int millisecondsToTrimFromEndForMouseClick = 100;

            try
            {
                var minimum = TimeSpan.FromMilliseconds(300);           // this is arbitrary
                AudioRecorder.TrimWavFile(PathToTemporaryWav, PathToCurrentAudioSegment, new TimeSpan(), TimeSpan.FromMilliseconds(millisecondsToTrimFromEndForMouseClick), minimum);
                RobustFile.Delete(PathToTemporaryWav);                  // Otherwise, these continue to clutter up the temp directory.
            }
            catch (Exception error)
            {
                Logger.WriteEvent(error.Message);
                RobustFile.Copy(PathToTemporaryWav, PathToCurrentAudioSegment, true);
            }

            //We don't actually need the mp3 now, so let people play with recording even without LAME (previously it could crash BL-3159).
            //We could put this off entirely until we make the ePUB.
            //I'm just gating this for now because maybe the thought was that it's better to do it a little at a time?
            //That's fine so long as it doesn't make the UI unresponsive on slow machines.
            if (LameEncoder.IsAvailable())
            {
                _mp3Encoder.Encode(PathToCurrentAudioSegment, PathToCurrentAudioSegment.Substring(0, PathToCurrentAudioSegment.Length - 4), new NullProgress());
                // Note: we need to keep the .wav file as well as the mp3 one. The mp3 format (or alternative mp4)
                // is required for ePUB. The wav file is a better permanent record of the recording; also,
                // it is used for playback.
            }
        }
示例#5
0
        public override void HotkeyTriggered()
        {
            if (audioInputDevice != null)
            {
                if (recorder == null)
                {
                    if (audioInputDevice.Recorders.Count > 0)
                    {
                        recorder = audioInputDevice.Recorders[0];
                    }
                }

                if (recorder == null)
                {
                    return;
                }

                byte[] memorySoundData = recorder.TempMemoryRecordedBytes;

                if (audioOutputDevice != null)
                {
                    audioOutputDevice.AssociatedEngine.PlayRawWaveSound(memorySoundData, recorder.WaveSource.WaveFormat);
                }

                if (audioOutputDeviceTwo != null)
                {
                    audioOutputDeviceTwo.AssociatedEngine.PlayRawWaveSound(memorySoundData,
                                                                           recorder.WaveSource.WaveFormat);
                }
            }
        }
        /// <summary>
        /// Initializes the Recorder.
        /// Invokes "FileFormatUpdated" and "RecordingQualityUpdated" to other application's modules.
        /// </summary>
        public void Init()
        {
            var bitrateType = AudioBitRateType.Medium;
            var fileFormat  = FileFormatType.MP4;

            try
            {
                Directory.CreateDirectory(PATH_TO_RECORDINGS);
                var formatCodec = FILE_FORMATS_DICTIONARY[fileFormat];
                _recorder = new AudioRecorder(formatCodec.Item2, formatCodec.Item1)
                {
                    AudioBitRate  = RECORDING_QUALITY_DICTIONARY[bitrateType],
                    AudioChannels = (int)AudioChannelType.Stereo
                };
                _recorder.Prepare();
            }
            catch (Exception exception)
            {
                ErrorHandler(exception.Message);
                return;
            }

            FileFormatUpdated?.Invoke(this, fileFormat);
            RecordingQualityUpdated?.Invoke(this, bitrateType);
        }
示例#7
0
        public override void HotkeyTriggered()
        {
            if (recorder == null)
            {
                if (audioDevice.Recorders.Count > 0)
                {
                    recorder = audioDevice.Recorders[0];
                }
                else
                {
                    if (audioDevice != null)
                    {
                        int finalSecondsToWait = 0;
                        if (!int.TryParse(ExtraData3, out finalSecondsToWait))
                        {
                            finalSecondsToWait = 30;
                        }
                        AudioRecorder newRecorder = new AudioRecorder(audioDevice, finalSecondsToWait);
                        recorder = newRecorder;
                    }
                }
            }

            if (recorder == null)
            {
                return;
            }

            recorder.StartRecording();
        }
    public void TestAudio()
    {
        const string fileName = "foo.wav";

        // Test recording, saving and loading an audio clip.
        StartCoroutine(audioRecorder.RecordForDuration(6, (clip) => {
            AudioRecorder.SaveAudioAtPath(fileName, clip);
            Logger.Log("in recorder callback");
            StartCoroutine(this.speechAceManager.AnalyzeTextSample(
                               fileName, "there once was a toad named toad",
                               (speechAceResult) => {
                Logger.Log("in SpeechACE callback");
                if (Constants.USE_ROS)
                {
                    this.rosManager.SendSpeechAceResultAction(speechAceResult).Invoke();
                }
                Logger.Log("happens immediately after recorder callback");
                //            AudioClip loadedClip = AudioRecorder.LoadAudioLocal(fileName);
                //            this.storyManager.audioManager.LoadAudio(loadedClip);
                //            this.storyManager.audioManager.PlayAudio();
                Logger.Log("pushing to S3 now");
                this.assetManager.S3UploadChildAudio(fileName);
                Logger.Log("happens immediately after calling upload");
            }));
        }));
    }
示例#9
0
        private void InitializeInputDevices()
        {
            // Initialize Imput Device Combo Box
            AudioRecorder recorder          = m_ProjectView.TransportBar.Recorder;
            string        defaultInputName  = "";
            string        defaultOutputName = "";

            m_cb_InputDevice.Items.Clear();
            m_cb_OutPutDevice.Items.Clear();
            foreach (InputDevice input in recorder.InputDevices)
            {
                m_cb_InputDevice.Items.Add(input.Name);
            }
            if (m_cb_InputDevice.Items.Count > 0)
            {
                m_cb_InputDevice.SelectedIndex = 0;
            }

            // Initialize Ouput Device Combo Box
            AudioPlayer player = m_ProjectView.TransportBar.AudioPlayer;

            foreach (OutputDevice output in player.OutputDevices)
            {
                m_cb_OutPutDevice.Items.Add(output.Name);
            }

            if (m_cb_OutPutDevice.Items.Count > 0)
            {
                m_cb_OutPutDevice.SelectedIndex = 0;
            }
        }
示例#10
0
    // Use this for initialization
    void Start()
    {
        audioRecorder = GetComponent <AudioRecorder>();

        dataStorageHandler = new DataStorageHandler <RecordingData>();
        audioClipConverter = new AudioClipConverter();
    }
        /// <summary>
        /// Prepares the audio recorder.
        /// </summary>
        /// <remarks>
        /// This method must be called before start().
        /// </remarks>
        /// <param name="preset">
        /// The preset to specify the quality of audio recording.
        /// </param>
        /// <exception cref="InvalidOperationException">If it is called after start().</exception>
        /// <exception cref="ArgumentNullException">preset is null</exception>
        public void Prepare(AudioRecorderPreset preset)
        {
            if (preset == null)
            {
                throw new ArgumentNullException(nameof(preset));
            }

            if (_recorder == null)
            {
                _recorder = new AudioRecorder(preset.Codec, preset.FileFormat);
                _recorder.StateChanged           += OnStateChanged;
                _recorder.ErrorOccurred          += OnErrorOccurred;
                _recorder.RecordingStatusChanged += OnRecordingStatusChanged;
                _recorder.RecordingLimitReached  += OnRecordingLimitReached;
            }
            else
            {
                _recorder.SetFormatAndCodec(preset.Codec, preset.FileFormat);
            }

            _recorder.AudioBitRate    = preset.BitRate;
            _recorder.AudioSampleRate = preset.SampleRate;

            _recorder.AudioDevice = RecorderAudioDevice.Mic;

            _recorder.Prepare();
        }
示例#12
0
        private void InitAudioDevices()
        {
            if (_audioRecorder != null)
            {
                _audioRecorder.Dispose();
                _audioRecorder = null;
            }

            switch (_config.OutputFormat)
            {
            case "wav":
                _audioRecorder = new WaveRecorder();
                break;

            case "mp3":
                _audioRecorder = new MP3Recorder(_config.MP3EncodingPreset);
                break;
            }

            devicesComboBox.Items.Clear();
            foreach (MMDevice device in _audioRecorder.Devices)
            {
                devicesComboBox.Items.Add(device);
            }

            devicesComboBox.SelectedIndex = _audioRecorder.DefaultDeviceNumber;
        }
示例#13
0
        /// <summary>
        /// Builds a relative panel containing the details of a voice memo
        /// </summary>
        /// <param name="voiceMemo">The voice memo to build the panel for</param>
        /// <param name="audioRecorder">The object that will play the voice memo's audio</param>
        /// <param name="DeleteCallBack">the callback function for when the voice memo's delete button is clicked</param>
        /// <returns></returns>
        public static RelativePanel BuildVoiceMemoPanel(VoiceMemo voiceMemo, AudioRecorder audioRecorder, Action DeleteCallBack = null)
        {
            var panel = new RelativePanel();

            panel.Margin = new Thickness(0, 10, 0, 10);
            var ellipse           = BuildMemoEllipse();
            var titleBlock        = BuildTitleBlock(voiceMemo);
            var durationBlock     = BuildDurationBlock(voiceMemo);
            var dateRecordedBlock = BuildDateRecordedBlock(voiceMemo);
            var deleteButton      = BuildDeleteButton(voiceMemo, audioRecorder, DeleteCallBack);
            var playbackButton    = BuildPlayBackButton(voiceMemo, audioRecorder);

            panel.Children.Add(ellipse);
            panel.Children.Add(titleBlock);
            panel.Children.Add(durationBlock);
            panel.Children.Add(dateRecordedBlock);
            panel.Children.Add(deleteButton);
            panel.Children.Add(playbackButton);
            // position the elements within the panel
            RelativePanel.SetRightOf(titleBlock, ellipse);
            RelativePanel.SetAlignVerticalCenterWith(titleBlock, ellipse);
            RelativePanel.SetBelow(durationBlock, titleBlock);
            RelativePanel.SetAlignLeftWith(durationBlock, titleBlock);
            RelativePanel.SetBelow(dateRecordedBlock, durationBlock);
            RelativePanel.SetAlignLeftWith(dateRecordedBlock, durationBlock);
            RelativePanel.SetBelow(deleteButton, dateRecordedBlock);
            RelativePanel.SetAlignBottomWithPanel(deleteButton, true);
            RelativePanel.SetAlignLeftWithPanel(deleteButton, true);
            RelativePanel.SetBelow(playbackButton, dateRecordedBlock);
            RelativePanel.SetAlignBottomWithPanel(playbackButton, true);
            RelativePanel.SetAlignRightWithPanel(playbackButton, true);
            return(panel);
        }
示例#14
0
 /// <summary>
 /// Create a recording session for a project starting from a given node.
 /// </summary>
 /// <param name="project">The project in which we are recording.</param>
 /// <param name="recorder">The audio recorder from the project.</param>
 public RecordingSession(ObiPresentation presentation, AudioRecorder recorder, Settings settings)
 {
     mPresentation = presentation;
     mRecorder     = recorder;
     if (!string.IsNullOrEmpty(settings.Audio_LocalRecordingDirectory))
     {
         mRecorder.RecordingDirectory = settings.Audio_LocalRecordingDirectory;
     }
     else
     {
         mRecorder.RecordingDirectory =
             presentation.DataProviderManager.DataFileDirectoryFullPath;
     }
     if (!Directory.Exists(mRecorder.RecordingDirectory))
     {
         Directory.CreateDirectory(mRecorder.RecordingDirectory);
     }
     mSessionOffset                    = 0;
     mPhraseMarks                      = null;
     mSectionMarks                     = null;
     mAudioList                        = new List <ManagedAudioMedia>();
     mRecordingUpdateTimer             = new Timer();
     mRecordingUpdateTimer.Tick       += new System.EventHandler(mRecordingUpdateTimer_tick);
     mRecordingUpdateTimer.Interval    = 1000;
     m_Settings                        = settings;
     m_PhraseIndexesToDelete           = new List <int>();
     mRecorder.PcmDataBufferAvailable += new AudioLib.AudioRecorder.PcmDataBufferAvailableHandler(DetectPhrasesOnTheFly);
 }
示例#15
0
        public void StartRecording()
        {
            // First make sure recording microphone is only on MP4
            recordMicrophone &= container == Container.MP4;
            // Create recording configurations // Clamp video width to 720
            var width       = 720;
            var height      = width * Screen.height / Screen.width;
            var framerate   = container == Container.GIF ? 10 : 30;
            var videoFormat = new VideoFormat(width, (int)height, framerate);
            var audioFormat = recordMicrophone ? AudioFormat.Unity: AudioFormat.None;

            // Start recording
            NatCorder.StartRecording(container, videoFormat, audioFormat, OnReplay);
            videoRecorder = CameraRecorder.Create(Camera.main);
            // If recording GIF, skip a few frames to give a real GIF look
            if (container == Container.GIF)
            {
                videoRecorder.recordEveryNthFrame = 5;
            }
            // Start microphone and create audio recorder
            if (recordMicrophone)
            {
                StartMicrophone();
                audioRecorder = AudioRecorder.Create(microphoneSource, true);
            }
        }
        /// <summary>
        /// 停止语音识别
        /// </summary>
        /// <returns></returns>
        public async Task StopAsync()
        {
            //recorder.Stop();
            //recorder.Save();
            //recorder.Close();

            timer.Stop();
            this._audioRecorder.StopRecording();
            string filename = "speechfile_end.wav";

            await this._audioRecorder.SaveAudioToFile(filename);

            this._audioRecorder.Close();
            this._audioRecorder = new AudioRecorder();

            string text = "";

            try
            {
                text = await SpeechRecognizeAsync(filename);
            }
            catch (Exception ex)
            {
                string str = ex.Message;
            }

            OnReceiveText(this, text); //产生事件

            // 识别完成后删除文件
            DelWavFile(filename);
            _is_recognizing = false;
        }
        /// <summary>
        /// Releases all resources used by the current instance
        /// </summary>
        protected virtual void Dispose(bool disposing)
        {
            if (!_disposed)
            {
                if (disposing)
                {
                    if (_recorder != null)
                    {
                        if (_recorder.State == RecorderState.Paused || _recorder.State == RecorderState.Recording)
                        {
                            _recorder.Cancel();
                        }
                        _recorder.StateChanged           -= OnStateChanged;
                        _recorder.ErrorOccurred          -= OnErrorOccurred;
                        _recorder.RecordingStatusChanged -= OnRecordingStatusChanged;
                        _recorder.RecordingLimitReached  -= OnRecordingLimitReached;
                        _recorder.Unprepare();
                        _recorder.Dispose();
                        _recorder = null;
                    }
                }

                _recorder = null;
                _disposed = true;
            }
        }
示例#18
0
        static void Main(string[] args)
        {
            AudioRecorder test = new AudioRecorder();

            test.BeginMonitoring(0);

            double loveGaugeVerageAverage = 0;
            int    loveCount = 0;

            while (true)
            {
                double average = test.getLoveGagueAverage();
                loveGaugeVerageAverage += (average - loveGaugeVerageAverage) * 0.05;

                if (test.getLoveGagueAverage() - loveGaugeVerageAverage >= 40)
                {
                    //Console.WriteLine("BCS3 detected your love!! <3");
                    loveCount += 1;
                    if (loveCount >= 100)
                    {
                        break;
                    }

                    Console.Write("[");
                    for (int i = 0; i < 100; i++)
                    {
                        Console.Write(i < loveCount ? "-" : " ");
                    }
                    Console.WriteLine("]");
                }
                Thread.Sleep(100);
            }

            Console.WriteLine("YOU DID IT!!!");
        }
示例#19
0
 public void NotifyUpdateVuMeter(AudioRecorder Recorder, UpdateVuMeterFromRecorder Update)
 {
     if (UpdateVuMeterEvent != null)
     {
         UpdateVuMeterEvent(Recorder, Update);
     }
 }
        /// ------------------------------------------------------------------------------------
        public SessionRecorderDlgViewModel()
        {
            // This code was used to do some testing of what NAudio returns. At some point,
            // in general, it may prove to lead to something useful for getting the supported
            // formats for a recording device.
            //var devices = new MMDeviceEnumerator();
            //var defaultDevice = devices.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console);
            //var recDev = RecordingDevice.Devices.First();
            //recDev.Capabilities = WaveIn.GetCapabilities(0);
            //recDev.GenericName = defaultDevice.FriendlyName;
            //Recorder = new AudioRecorder();
            //Recorder.SelectedDevice = recDev;

            Recorder = new AudioRecorder(60);             // 1 hour
            Recorder.SelectedDevice = RecordingDevice.Devices.First();
            Recorder.Stopped       += (sender, e) =>
            {
                if (UpdateAction != null)
                {
                    UpdateAction(sender, e);
                }
            };
            _path = Path.Combine(Path.GetTempPath(),
                                 string.Format("SayMoreSessionRecording_{0}.wav",
                                               DateTime.Now.ToString("yyyyMMdd_HHmmss")));

            if (File.Exists(_path))
            {
                try { File.Delete(_path); }
                catch { }
            }
        }
示例#21
0
        private void CMBBXpostType_SelectionChanged(object sender, SelectionChangedEventArgs e)
        {
            switch (CMBBXpostType.SelectedIndex)
            {
            case 0:
            {
                HoleType = HoleInfo.TYPE_TEXT;
                STKPNaudio.Visibility = Visibility.Collapsed;
                STKPNimage.Visibility = Visibility.Collapsed;
            } break;

            case 1:
            {
                HoleType = HoleInfo.TYPE_IMAGE;
                STKPNimage.Visibility = Visibility.Visible;
                STKPNaudio.Visibility = Visibility.Collapsed;
            } break;

            case 2:
            {
                HoleType = HoleInfo.TYPE_AUDIO;
                STKPNimage.Visibility = Visibility.Collapsed;
                STKPNaudio.Visibility = Visibility.Visible;
                recorder = new AudioRecorder();
            } break;

            default: break;
            }
        }
 /// <summary>
 /// Initializes a new instance of the <see cref="VoiceInputPage"/> class.
 /// </summary>
 /// <param name="currentUser">The current user.</param>
 public VoiceInputPage(User currentUser)
 {
     InitializeComponent();
     this.CurrentUser  = currentUser;
     this.recorder     = new AudioRecorder();
     this.parser       = new VoiceToCommandParser();
     this.actionParser = new ActionParser(this.CurrentUser);
 }
 public SpeechRecognizer(string url_server = "http://127.0.0.1:20000/", string token_client = "qwertasd")
 {
     url   = url_server;
     token = token_client;
     pool_audioRecorder[0] = new AudioRecorder();
     pool_audioRecorder[1] = new AudioRecorder();
     this._audioRecorder   = pool_audioRecorder[0];
 }
示例#24
0
 /// <summary>
 /// Plays back the passed <paramref name="VoiceMemoToPlay"/> using the passed <paramref name="audioRecorder"/>
 /// </summary>
 /// <param name="VoiceMemoToPlay"></param>
 /// <param name="audioRecorder"></param>
 private static async void PlayVoiceMemo(VoiceMemo VoiceMemoToPlay, AudioRecorder audioRecorder)
 {
     //don't let playback if in recording session
     if (!audioRecorder.IsRecording)
     {
         await audioRecorder.PlayFromDisk(VoiceMemoToPlay.FileName);
     }
 }
示例#25
0
        /// <summary>
        /// Builds and returns a button that deletes the passed <paramref name="VoiceMemoToAdd"/> from the database and the file system
        /// </summary>
        /// <param name="VoiceMemoToAdd"></param>
        /// <param name="audioRecorder"></param>
        /// <param name="Callback"></param>
        /// <returns></returns>
        private static Button BuildDeleteButton(VoiceMemo VoiceMemoToAdd, AudioRecorder audioRecorder, Action Callback = null)
        {
            var deleteButton = new Button();

            deleteButton.Content = "Delete";
            deleteButton.Click  += (sender, arguments) => DeleteVoiceMemoAsync(VoiceMemoToAdd, audioRecorder, Callback);
            return(deleteButton);
        }
示例#26
0
    public override async void Start()
    {
        base.Start();
        audioRecorder = audioRecorderInstance.GetComponent <AudioRecorder>();

        //await InitAnchorSession();
        //FindAnchors();
    }
示例#27
0
 public VoiceMemosPage()
 {
     this.InitializeComponent();
     this.HideInitialControls();
     //used to record, stop, and play voice note
     this._audioRecorder = new AudioRecorder();
     this.PopulateListOfVoiceMemos();
 }
示例#28
0
        /// <summary>
        /// Builds and returns a button that plays back the passed <paramref name="VoiceMemoToAdd"/> using the passed <paramref name="audioRecorder"/>
        /// </summary>
        /// <param name="VoiceMemoToAdd"></param>
        /// <param name="audioRecorder"></param>
        /// <returns></returns>
        private static Button BuildPlayBackButton(VoiceMemo VoiceMemoToAdd, AudioRecorder audioRecorder)
        {
            var playbackButton = new Button();

            playbackButton.Content = "Playback";
            playbackButton.Click  += (sender, arguments) => PlayVoiceMemo(VoiceMemoToAdd, audioRecorder);
            return(playbackButton);
        }
示例#29
0
        public AudioService(IMapper mapper)
        {
            _mapper = mapper;

            _audioRecorder = new AudioRecorder();
            _audioRecorder.RecordingStatusChangeEvent += AudioRecorderOnRecordingStatusChangeHandler;
            _audioRecorder.ProgressEvent += AudioRecorderOnProgressHandler;
        }
 public AudioService()
 {
     _recorder = AudioRecorderBuilder.With(Application.Context)
                 .FileName("AudioFile")
                 .Config(AudioRecorder.MediaRecorderConfig.Default)
                 .Loggable()
                 .Build();
 }
示例#31
0
        public ProcessorWaveProvider(string sourceName, IWaveProvider sourceWaveProvider, string waveFilePath, WaveFormat outFormat, Common.ProcessRadioSignalingItemDelegate sigDelegate, Action<bool> hasPropertyChanged, bool recordEnabled, Common.SignalRecordingType recordType, int recordKickTime, Common.NoiseFloor noiseFloor, int customNoiseFloor,bool removeNoise, bool decodeMDC1200, bool decodeGEStar, bool decodeFleetSync, bool decodeP25)
            : base(sourceWaveProvider, waveFilePath)
        {
            LastValidStreamTitle = string.Empty;
            _sourceName = sourceName;
            _sourceFormat = sourceWaveProvider.WaveFormat;
            _outFormat = outFormat;
            _hasPropertyChanged = hasPropertyChanged;

            _silenceHelper = new SilenceHelper(outFormat.AverageBytesPerSecond / (outFormat.BitsPerSample / 8), noiseFloor, removeNoise, customNoiseFloor);

            if (outFormat.Equals(sourceWaveProvider.WaveFormat))
            {
                _resampleStream = null;
                _useResampler = false;
            }
            else
            {
                if (Common.AppSettings.Instance.DiagnosticMode)
                {
                    Common.ConsoleHelper.ColorWriteLine(ConsoleColor.Magenta, "{0}: Source Format <> Out Format [{1}] <> [{2}]", sourceName, sourceWaveProvider.WaveFormat, outFormat);
                }
                _resampleStream = new NAudio.Wave.Compression.AcmStream(sourceWaveProvider.WaveFormat, outFormat);
                _useResampler = true;
            }
            if (decodeMDC1200)
            {
                _mdc = new Decoders.MDC1200(outFormat.SampleRate, ProcessMDC1200, sourceName);
            }
            else
            {
                _mdc = null;
            }
            if (decodeGEStar)
            {
                _star = new Decoders.STAR(outFormat.SampleRate, ProcessSTAR, Decoders.STAR.star_format.star_format_1_16383, sourceName);
            }
            else
            {
                _star = null;
            }
            _rootDecoder = new Decoders.RootDecoder(outFormat.SampleRate, decodeFleetSync, decodeP25, ProcessRootDecoder);

            _recorder = new AudioRecorder(sourceName, recordType, recordKickTime, outFormat, AudioProcessingGlobals.DefaultSaveFileWaveFormat, recordEnabled);
            _bytesPerSample = outFormat.BitsPerSample / 8;
            _encoding = outFormat.Encoding;
            _sigDelegate = sigDelegate;
        }
 //----------------------
 //functions
 //----------------------
 public audioServer()
 {
     audioPlayer = new AudioRecorder();
     return;
 }
        protected override void OnCreate(Bundle bundle)
        {
            base.OnCreate(bundle);
            SetContentView(Resource.Layout.CreateAudioMessage);
            ImageView btns = FindViewById<ImageView>(Resource.Id.imgNewLoginHeader);
            TextView header = FindViewById<TextView>(Resource.Id.txtFirstScreenHeader);
            RelativeLayout relLayout = FindViewById<RelativeLayout>(Resource.Id.relativeLayout1);
            ImageHelper.setupTopPanel(btns, header, relLayout, header.Context);
            Header.headertext = Application.Context.Resources.GetString(Resource.String.audioTitle);
            Header.fontsize = 36f;
            ImageHelper.fontSizeInfo(header.Context);
            header.SetTextSize(Android.Util.ComplexUnitType.Dip, Header.fontsize);
            header.Text = Header.headertext;
            CurrentStepInc = base.Intent.GetIntExtra("CurrentStep", 1);
            isPlayback = base.Intent.GetBooleanExtra("playback", false);
            filename = base.Intent.GetStringExtra("filename");
            time = 15;
            up = 0;
            btnRecord = FindViewById<ImageButton>(Resource.Id.btnRecord);
            btnPlay = FindViewById<ImageButton>(Resource.Id.btnPlay);
            Button btnSend = FindViewById<Button>(Resource.Id.btnSend);
            Button btnAdd = FindViewById<Button>(Resource.Id.btnAdd);

            progress = FindViewById<ProgressBar>(Resource.Id.progressBar1);
            context = progress.Context;
            mmg = wowZapp.LaffOutOut.Singleton.mmg;
            timer = new System.Timers.Timer();
            timer.Interval = 1000;
            timer.Elapsed += new System.Timers.ElapsedEventHandler(timer_Elapsed);
            isRecording = false;

            string filename2 = Path.Combine(wowZapp.LaffOutOut.Singleton.ContentDirectory, "voice_msg_");
            filename2 += CurrentStepInc.ToString() + ".3gp";
            path = string.Format(filename2, CurrentStepInc);

            if (string.IsNullOrEmpty(filename))
                filename = path;

            if (File.Exists(filename))
                File.Delete(filename);

            ar = new AudioRecorder(filename);
            ap = new AudioPlayer(context);

            #if DEBUG
            System.Diagnostics.Debug.WriteLine("Filename audio being saved as = {0}", filename);
            #endif

            btnRecord.Click += new EventHandler(btnRecord_Click);

            btnSend.Click += delegate
            {
                SendAudioMessage();
            };

            btnAdd.Click += delegate
            {
                AddToMessages();
            };

            dbm = wowZapp.LaffOutOut.Singleton.dbm;

            if (isPlayback == true)
            {
                btnRecord.Visibility = ViewStates.Gone;
                btnPlay.Click += new EventHandler(btnPlay_Click);
            } else
            {
                btnPlay.Click += new EventHandler(btnStop_Click);
                btnPlay.SetBackgroundResource(Resource.Drawable.stopbutton);
            }

            rectime = 16;

            ImageButton btnBack = FindViewById<ImageButton>(Resource.Id.btnBack);
            btnBack.Tag = 0;
            ImageButton btnHome = FindViewById<ImageButton>(Resource.Id.btnHome);
            btnHome.Tag = 1;

            btnBack.Click += delegate
            {
                ar.cancelOut();
                Finish();
            };
            btnHome.Click += delegate
            {
                ar.cancelOut();
                Intent i = new Intent(this, typeof(Main.HomeActivity));
                i.AddFlags(ActivityFlags.ClearTop);
                StartActivity(i);
            };

            ImageButton[] buttons = new ImageButton[2];
            LinearLayout bottom = FindViewById<LinearLayout>(Resource.Id.bottomHolder);
            buttons [0] = btnBack;
            buttons [1] = btnHome;
            ImageHelper.setupButtonsPosition(buttons, bottom, context);
        }
示例#34
0
 public Form1()
 {
     InitializeComponent();
     recorder = new AudioRecorder(0);
 }