public AudioRecordingStopsAfterRecording(
     AudioRecording anAudioRecording,
     Microphone aMicrophone)
 {
     audioRecording = anAudioRecording;
     microphone = aMicrophone;
 }
		protected override async void OnNavigatedTo( NavigationEventArgs e )
		{
			base.OnNavigatedTo( e );

			this.microphone = new Microphone();
			await this.microphone.InitializeAsync();
		}
 public AudioRecordingSucceedsWithMicrophone(
     AudioRecording anAudioRecording,
     Microphone aMicrophone)
 {
     audioRecording = anAudioRecording;
     microphone = aMicrophone;
 }
        public void ShouldBeRecordingWhenUsedForAudioRecording(
            Microphone microphone,
            ActivityId audioRecording)
        {
            microphone.UseForAudioRecording(audioRecording);

            microphone.IsInRole(RoleInActivity.Recording(audioRecording)).ShouldBeTrue();
        }
		/// <summary>
		/// Initializes a new instance of the <see cref="XnaMicrophone"/> class.
		/// </summary>
		public XnaMicrophone()
		{
			_microphone = Microphone.Default;

			_timer = new DispatcherTimer { Interval = TimeSpan.FromMilliseconds(50) };

			_timer.Tick += (s, e) => FrameworkDispatcher.Update();
		}
Esempio n. 6
0
 public MainPage()
 {
     microphone=Microphone.GetMicrophone();
     audioPlayer=new AudioPlayer();
     camera = Camera.GetCamera();
     InitializeComponent();
     this.Loaded += new RoutedEventHandler(MainPage_Loaded);
 }
        public void CannotUseForAudioRecordingWhenAlreadyRecording(
            Microphone microphone,
            ActivityId previousAudioRecording,
            ActivityId newAudioRecording)
        {
            microphone.UseForAudioRecording(previousAudioRecording);

            microphone.UseForAudioRecording(newAudioRecording).ShouldBeFalse();
        }
 public VideoRecordingFailsWithoutCamera(
     VideoRecording aVideoRecording,
     VideoCamera aCamera,
     Microphone aMicrophone)
 {
     videoRecording = aVideoRecording;
     camera = aCamera;
     microphone = aMicrophone;
 }
 public VideoRecordingSucceedsWithCameraAndMicrophone(
     VideoRecording aVideoRecording,
     VideoCamera aCamera,
     Microphone aMicrophone)
 {
     videoRecording = aVideoRecording;
     camera = aCamera;
     microphone = aMicrophone;
 }
 public AudioRecordingFailsWithoutMicrophone(
     AudioRecording aPreviousAudioRecording,
     AudioRecording aNewAudioRecording,
     Microphone aMicrophone)
 {
     previousAudioRecording = aPreviousAudioRecording;
     audioRecording = aNewAudioRecording;
     microphone = aMicrophone;
 }
Esempio n. 11
0
        public void ShouldBeAvailableWhenStoppingUseForAudioRecording(
            Microphone microphone,
            ActivityId audioRecording)
        {
            microphone.UseForAudioRecording(audioRecording);

            microphone.StopUsingForAudioRecording(audioRecording);

            microphone.IsAvailable().ShouldBe(true);
        }
Esempio n. 12
0
        /// <summary>
        /// Handler of making call and receiving call
        /// </summary>
        /// <param name="registerName">The SIP ID what will registered into your PBX</param>
        /// <param name="domainHost">The address of your PBX</param>
        public CallHandlerSample(string registerName, string domainHost)
        {
            microphone = Microphone.GetDefaultDevice();
            speaker = Speaker.GetDefaultDevice();
            connector = new MediaConnector();
            mediaSender = new PhoneCallAudioSender();
            mediaReceiver = new PhoneCallAudioReceiver();

            InitializeSoftPhone(registerName, domainHost);
        }
Esempio n. 13
0
        public void ShouldNotFindDeviceByIdWhenDeviceIsNotRegistered(
            Devices devices,
            Microphone aDevice,
            DeviceId anyDeviceId)
        {
            devices.RegisterNewDevice(aDevice);
            Device notFoundDevice = null;

            devices.FindDeviceById(anyDeviceId, out notFoundDevice).ShouldBeFalse();
            notFoundDevice.ShouldBeNull();
        }
Esempio n. 14
0
        public void ShouldFindDevicesByType(
            Devices devices,
            Microphone aDevice,
            Device anotherDevice)
        {
            devices.RegisterNewDevice(aDevice);
            devices.RegisterNewDevice(anotherDevice);

            var foundDevices = devices.FindDevicesByType<Microphone>();

            foundDevices.ShouldNotBeEmpty();
            foundDevices.ShouldContain(aDevice);
        }
Esempio n. 15
0
        public void ShouldFindDeviceByIdWhenDeviceIsRegistered(
            Devices devices,
            Microphone aDevice,
            Device anotherDevice)
        {
            devices.RegisterNewDevice(aDevice);
            devices.RegisterNewDevice(anotherDevice);
            Device foundDevice = null;

            devices.FindDeviceById(aDevice.Id, out foundDevice).ShouldBeTrue();

            foundDevice.ShouldBe(aDevice);
        }
Esempio n. 16
0
    void Update()
    {
        if (!recording)
        {
            return;
        }

        forceTransmit -= Time.deltaTime;

        if (Input.GetKeyUp(toggleToTalkKey))
        {
            transmitToggled = !transmitToggled;
        }

        bool transmit        = transmitToggled || Input.GetKey(pushToTalkKey);
        int  currentPosition = Microphone.GetPosition(Device);

        // This means we wrapped around
        if (currentPosition < previousPosition)
        {
            while (sampleIndex < recordFrequency)
            {
                ReadSample(transmit);
            }

            sampleIndex = 0;
        }

        // Read non-wrapped samples
        previousPosition = currentPosition;

        while (sampleIndex + recordSampleSize <= currentPosition)
        {
            ReadSample(transmit);
        }
    }
Esempio n. 17
0
    private float[] GetSamples()
    {
        int pos = Microphone.GetPosition(DEVICE_NAME);

        float[] samples;
        if (pos - lastSample > 0)
        {
            int start = lastSample;
            int end   = pos;

            samples = new float[(end - start) * micRecording.channels];
            micRecording.GetData(samples, start);
        }
        else if (pos - lastSample != 0)
        {
            int     start1   = lastSample;
            int     end1     = micRecording.samples - 1;
            float[] samples1 = new float[(end1 - start1) * micRecording.channels];
            micRecording.GetData(samples1, start1);
            int     start2   = 0;
            int     end2     = pos;
            float[] samples2 = new float[(end2 - start2) * micRecording.channels];
            micRecording.GetData(samples2, start2);

            samples = new float[samples1.Length + samples2.Length];
            samples1.CopyTo(samples, 0);
            samples2.CopyTo(samples, samples1.Length);
        }
        else
        {
            return(null);
        }

        lastSample = pos;
        return(samples);
    }
Esempio n. 18
0
 //Use this for initialization
 void Start()
 {
     //Check if there is at least one microphone connected
     if (Microphone.devices.Length <= 0)
     {
         //Throw a warning message at the console if there isn't
         Debug.LogWarning("Microphone not connected!");
     }
     else //At least one microphone is present
     {
         //Set 'micConnected' to true
         micConnected = true;
         //Get the default microphone recording capabilities
         Microphone.GetDeviceCaps(null, out minFreq, out maxFreq);
         //According to the documentation, if minFreq and maxFreq are zero, the microphone supports any frequency...
         if (minFreq == 0 && maxFreq == 0)
         {
             //...meaning 44100 Hz can be used as the recording sampling rate
             maxFreq = 44100;
         }
         //Get the attached AudioSource component
         goAudioSource = this.GetComponent <AudioSource>();
     }
 }
Esempio n. 19
0
    public void StopRecord()
    {
        //マイクの録音位置を取得
        int position = Microphone.GetPosition(deviceName: micName);

        //マイクの録音を強制的に終了
        Microphone.End(deviceName: micName);

        //再生時間を確認すると、停止した時間に関わらず、maxDurationの値になっている。これは無音を含んでいる?
        Debug.Log("修正前の録音時間: " + audioClip.length);

        //音声データ一時退避用の領域を確保し、audioClipからのデータを格納
        float[] soundData = new float[audioClip.samples * audioClip.channels];
        audioClip.GetData(soundData, 0);

        //新しい音声データ領域を確保し、positonの分だけ格納できるサイズにする。
        float[] newData = new float[position * audioClip.channels];

        //positionの分だけデータをコピー
        for (int i = 0; i < newData.Length; i++)
        {
            newData[i] = soundData[i];
        }

        //新しいAudioClipのインスタンスを生成し、音声データをセット
        AudioClip newClip = AudioClip.Create(audioClip.name, position, audioClip.channels, audioClip.frequency, false);

        newClip.SetData(newData, 0);

        //audioClipを新しいものに差し替え
        AudioClip.Destroy(audioClip);
        audioClip = newClip;

        //再生時間
        Debug.Log("修正後の録音時間: " + audioClip.length);
    }
Esempio n. 20
0
    // Start is called before the first frame update
    void Start()
    {
        grad_used.SetKeys(new GradientColorKey[] { new GradientColorKey(new Color(0.0f, 0.0f, 0.0f), 0.0f),
                                                   new GradientColorKey(new Color(0.0f, 0.01f, 0.0f), 1.0f) },
                          new GradientAlphaKey[] { new GradientAlphaKey(0.0f, 0.0f), new GradientAlphaKey(0.0f, 0.0f),
                                                   new GradientAlphaKey(0.0f, 0.53f),
                                                   new GradientAlphaKey(0.0f, 1.0f) });

        used = new AnimationCurve(new Keyframe[] { new Keyframe(0f, 0.0f),
                                                   new Keyframe(0.5f, 0.0f),
                                                   new Keyframe(1f, 0.0f) });
        used2 = new AnimationCurve(new Keyframe[] { new Keyframe(0f, 0.0f),
                                                    new Keyframe(0.5f, 0.0f),
                                                    new Keyframe(1f, 0.0f) });

        ps  = this.GetComponent <ParticleSystem>();
        ps1 = fire1.GetComponent <ParticleSystem>();
        ps2 = fire2.GetComponent <ParticleSystem>();
        ps3 = fire3.GetComponent <ParticleSystem>();
        ps4 = fire4.GetComponent <ParticleSystem>();
        ps5 = fire5.GetComponent <ParticleSystem>();
        ps6 = fire6.GetComponent <ParticleSystem>();
        ps7 = fire7.GetComponent <ParticleSystem>();
        ps8 = fire8.GetComponent <ParticleSystem>();



        _audio      = GetComponent <AudioSource>();
        _audio.clip = Microphone.Start(null, true, 10, 44100); //deviceName, loop bool, secounds, frequency
        _audio.loop = true;
        _audio.mute = true;
        while (!(Microphone.GetPosition(null) > 0))
        {
        }
        _audio.Play();
    }
    void DoMicInput()
    {
        if (Microphone.devices.Length < 0)
        {
            return;
        }

        string recordingDeviceName = Microphone.devices[0];

        if (Input.GetMouseButton(0))
        {
            if (!Microphone.IsRecording(recordingDeviceName))
            {
                /*m_MicAudio = */ Microphone.Start(recordingDeviceName, true, 10, 44100);
            }
        }
        else if (Input.GetMouseButtonUp(0))
        {
            if (Microphone.IsRecording(recordingDeviceName))
            {
                Microphone.End(recordingDeviceName);
            }
        }
    }
Esempio n. 22
0
 // Update is called once per frame
 void Update()
 {
     if (Input.GetKeyDown(KeyCode.Z))
     {
         GoBack();
     }
     else if (Input.GetKeyDown(KeyCode.Q))
     {
         GoTo("jars");
     }
     else if (Input.GetKeyDown(KeyCode.E))
     {
         GoTo("instruments");
     }
     else if (Input.GetKeyDown(KeyCode.R))
     {
         GoTo("desk");
     }
     else if (Input.GetKeyDown(KeyCode.Space))
     {
         Debug.Log("is recording? " + Microphone.IsRecording(null));
         aud.Play();
     }
 }
Esempio n. 23
0
        void StartInput()
        {
            var sampleRate = AudioSettings.outputSampleRate;

            // Create a clip which is assigned to the default microphone.
            audioSource.clip = Microphone.Start(null, true, 1, sampleRate);

            if (audioSource.clip != null)
            {
                // Wait until the microphone gets initialized.
                //  int delay = 0;
                //   while (delay <= 0) delay = Microphone.GetPosition(null);

                // Start playing.
                audioSource.Play();

                // Estimate the latency.
                //  estimatedLatency = (float)delay / sampleRate;
            }
            else
            {
                Debug.LogWarning("GenericAudioInput: Initialization failed.");
            }
        }
Esempio n. 24
0
    public static IEnumerator record()
    {
        Debug.Log(String.Format("recording"));
        while (true)
        {
            aud.clip = Microphone.Start("Built-in Microphone", false, recordForSecs, 44100 / sampleQuality);
            yield return(new WaitForSeconds(recordForSecs));

            AudioClip cutClip1  = AudioClip.Create("playback", aud.clip.samples, aud.clip.channels, aud.clip.frequency, false, false);
            float[]   soundData = new float[aud.clip.samples * aud.clip.channels];
            aud.clip.GetData(soundData, 0);

            // create a byte array and copy the floats into it...
            var byteArray = new byte[soundData.Length * 4];
            Buffer.BlockCopy(soundData, 0, byteArray, 0, byteArray.Length);

            byteArray = Encrypt_Compress.Compress(byteArray);

            byte[][] IV_Key = Encrypt_Compress.genIVKey("giannis", 6465456);             //Connection.getConnectionTime ());//pass must be smaller than 32 char. pass<32
            //myAes.IV = IV_Key [0];
            //myAes.Key = IV_Key [1];

            // Encrypt the string to an array of bytes.
            byte[] encrypted = Encrypt_Compress.EncryptStringToBytes_Aes(Convert.ToBase64String(byteArray),              //Encrypt(byteArray,//
                                                                         IV_Key [1], IV_Key [0]);

            Connection.sendByteArr(encrypted);

            /*for(int i=0;i<encrypted.Length;i++){
             *      if (UnityEngine.Random.Range (0,100000)>99998) {
             *              encrypted [i] = encrypted [i-1];
             *      }
             * }*/
            //playSound(encrypted);
        }
    }
Esempio n. 25
0
    public void Initialize(int sampleCount = 1024, int micIndex = 0)
    {
        sampleCount_ = sampleCount;
        data_        = new float[sampleCount];

        // Check if microphone exists
        if (Microphone.devices.Length <= 0)
        {
            Debug.LogWarning("Microphone not connected!");
            return;
        }
        else
        {
            int maxIndex = Microphone.devices.Length - 1;
            if (micIndex > maxIndex)
            {
                Debug.LogWarning("MIC_INDEX:" + micIndex + " are changed to " + maxIndex + ".");
                micIndex = maxIndex;
            }
            Debug.Log("Use:" + Microphone.devices[micIndex]);
            micName_ = Microphone.devices[micIndex];
        }

        // Get default microphone min/max frequencies
        Microphone.GetDeviceCaps(micName_, out minFreq_, out maxFreq_);
        Debug.Log("MIC_FREQ:" + minFreq_.ToString() + ", " + maxFreq_.ToString());
        if (minFreq_ == 0 && maxFreq_ == 0)
        {
            maxFreq_ = 44100;
        }
        else if (maxFreq_ > 44100)
        {
            maxFreq_ = 44100;
        }
        initialized_ = true;
    }
Esempio n. 26
0
    /// <summary>
    /// Ends the recording session.
    /// </summary>
    public static IEnumerator StopRecording()
    {
#if UNITY_WSA || UNITY_STANDALONE_WIN
        if (!IsListening || IsTransitioning)
        {
            Debug.LogWarning("Unable to stop recording");
            yield break;
        }

        IsListening     = false;
        IsTransitioning = true;

        if (hasListener)
        {
            InputManager.Instance.PopModalInputHandler();
            hasListener = false;
        }

        Microphone.End(DeviceName);

        if (dictationRecognizer.Status == SpeechSystemStatus.Running)
        {
            dictationRecognizer.Stop();
        }

        while (dictationRecognizer.Status == SpeechSystemStatus.Running)
        {
            yield return(null);
        }

        PhraseRecognitionSystem.Restart();
        IsTransitioning = false;
#else
        return(null);
#endif
    }
Esempio n. 27
0
    private const int SampleFrequency = 44100;  /// Common sampling rate for recording analog audio.

    // Start on awake so that it's set up before any other scripts that are dependent on this one.
    void Awake()
    {
        // Add an audio source Unity component to our game object.
        gameObject.AddComponent <AudioSource>();

        // Throw an error message if no microphone is detected and exit out of the script.
        if (Microphone.devices.Length == 0)
        {
            Debug.Log("No microphone detected.");
            return;
        }

        // Start recording from the microphone.
        GetComponent <AudioSource>().clip = Microphone.Start(null, false, 3600, SampleFrequency);

        // Set the audio mixer to our custom made silent mixer (this prevents audio playback).
        GetComponent <AudioSource>().outputAudioMixerGroup = audioOutput;

        // Check to make sure microphone is recording.
        if (Microphone.IsRecording(""))
        {
            // Wait until recording actually starts.
            while (Microphone.GetPosition(null) == 0)
            {
                ;
            }

            // Play our audio clip (plays the microphone recording in real-time).
            GetComponent <AudioSource>().Play();
        }
        // If microphone isn't recording, throw an error message.
        else
        {
            Debug.Log("Problem with microphone: " + Microphone.devices[0]);
        }
    }
Esempio n. 28
0
        private void Awake()
        {
            source = GetComponent <AudioSource>();
            if (source == null)
            {
                Debug.LogWarning("No AudioSource Component Provided!");
            }
            if (Microphone.devices.Length <= 0)
            {
                Debug.LogWarning("No Microphone Connected!");
                return;
            }

            Microphone.GetDeviceCaps(null, out minimumFrequency, out maximumFrequency);
            if (minimumFrequency == 0 && maximumFrequency == 0)
            {
                maximumFrequency = 44100;
            }

            CoreServices.InputSystem?.RegisterHandler <IMixedRealityInputHandler>(this);
            indicator.gameObject.SetActive(false);
            service = new DialogFlowService();
            isMicrophoneConnected = true;
        }
    //get data from microphone into audioclip
    float LevelMax()
    {
        float levelMax = 0;

        float[] waveData    = new float[_sampleWindow];
        int     micPosition = Microphone.GetPosition(null) - (_sampleWindow + 1); // null means the first microphone

        if (micPosition < 0)
        {
            return(0);
        }
        _clipRecord.GetData(waveData, micPosition);
        // Getting a peak on the last 128 samples
        for (int i = 0; i < _sampleWindow; i++)
        {
            float wavePeak = waveData[i] * waveData[i];
            if (levelMax <= wavePeak)
            {
                levelMax = wavePeak;
                Debug.Log("LEVEL MAX: " + levelMax);
            }
        }
        return(levelMax);
    }
Esempio n. 30
0
        public static void Save(string fileName = "test")
        {
            while (!(Microphone.GetPosition(null) > 0))
            {
            }
            samplesData = new float[audioSource.clip.samples * audioSource.clip.channels];
            audioSource.clip.GetData(samplesData, 0);
            string filePath = Path.Combine(Application.streamingAssetsPath, fileName + ".wav");

            // Delete the file if it exists.
            if (File.Exists(filePath))
            {
                File.Delete(filePath);
            }
            try
            {
                WriteWAVFile(audioSource.clip, filePath);
                Debug.Log("File Saved Successfully at StreamingAssets/" + fileName + ".wav");
            }
            catch (DirectoryNotFoundException)
            {
                Debug.LogError("Please, Create a StreamingAssets Directory in the Assets Folder");
            }
        }
Esempio n. 31
0
        public void StopRecording()
        {
            if (!isRecording)
            {
                return;
            }

            LeanTween.alphaCanvas(recordingInfoCanvasGroup, 0, 0.3f);

            //End the recording when the mouse comes back up
            Microphone.End("");

            //Trim the audioclip by the length of the recording
            AudioClip recordingNew = AudioClip.Create(gameObject.name, (int)((Time.time - startRecordingTime) * recordingAudioClip.frequency), recordingAudioClip.channels, recordingAudioClip.frequency, false);

            float[] data = new float[(int)((Time.time - startRecordingTime) * recordingAudioClip.frequency)];
            recordingAudioClip.GetData(data, 0);
            recordingNew.SetData(data, 0);
            this.recordingAudioClip = recordingNew;


            this.isRecording = false;
            SoundManager.Instance.enableMusic();
        }
    //get data from microphone into audioclip
    float LevelMax()
    {
        float levelMax = 0;

        float[] waveData    = new float[sample_];
        int     micPosition = Microphone.GetPosition(null) - (sample_ + 1); // null is the first microphone

        if (micPosition < 0)
        {
            return(0);
        }
        clipRecord_.GetData(waveData, micPosition);

        //get a peak on the last 128 samples
        for (int i = 0; i < sample_; i++)
        {
            float wavePeak = waveData[i] * waveData[i];
            if (levelMax < wavePeak)
            {
                levelMax = wavePeak;
            }
        }
        return(levelMax);
    }
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////



//for the above control the mic start or stop


    public IEnumerator StartMicrophone()
    {
        audioSource.clip = Microphone.Start(selectedDevice, true, bufferTime, setFrequency); //Starts recording

        while (!(Microphone.GetPosition(selectedDevice) > 0))                                // Wait if a device is detected and  only then start the recording
        {
            if (debug)
            {
                Debug.Log("Waiting for connection:" + Time.deltaTime);
            }
            yield return(0);
        }
        if (debug)
        {
            Debug.Log("started" + ", Freq (Hz): " + setFrequency + ", samples: " + amountSamples + ", sensitivity: " + sensitivity);
        }

        audioSource.Play(); // Play the audio source!

        if (debug)
        {
            Debug.Log("Receiving data");
        }
    }
        /// <inheritdoc />
        public override void Update()
        {
            using (UpdatePerfMarker.Auto())
            {
                if (!Application.isPlaying || Service == null || dictationRecognizer == null)
                {
                    return;
                }

                base.Update();

                if (!isTransitioning && IsListening && !Microphone.IsRecording(deviceName) && dictationRecognizer.Status == SpeechSystemStatus.Running)
                {
                    // If the microphone stops as a result of timing out, make sure to manually stop the dictation recognizer.
                    StopRecording();
                }

                if (!hasFailed && dictationRecognizer.Status == SpeechSystemStatus.Failed)
                {
                    hasFailed = true;
                    Service.RaiseDictationError(inputSource, "Dictation recognizer has failed!");
                }
            }
        }
Esempio n. 35
0
    void Start()
    {
        //look for connected microphone and print their name in the console
        micTake = GetComponent <AudioSource>();
        foreach (var device in Microphone.devices)
        {
            Debug.Log("Name: " + device);
        }

        // Start recording with built-in Microphone and play the recorded audio right away

        //check if there isn't a microphone connected, and take the first microphone and start it as audiosource micTake
        if (ActiveMic == null)
        {
            ActiveMic = Microphone.devices[0];
            Debug.Log(ActiveMic);
            micTake.clip = Microphone.Start(ActiveMic, true, 100, 44100);
            while (!(Microphone.GetPosition(ActiveMic) > 0))
            {
            }

            micTake.Play();
        }
    }
Esempio n. 36
0
        private float LevelMax()
        {
            float levelMax = 0;

            float[] waveData           = new float[sampleWindow];
            int     microphonePosition = Microphone.GetPosition(null) - (sampleWindow + 1); // null means the first microphone

            if (microphonePosition < 0 || clipRecord == null)
            {
                return(0);
            }

            clipRecord.GetData(waveData, microphonePosition);
            // Getting a peak on the last 128 samples
            for (int i = 0; i < sampleWindow; i++)
            {
                float wavePeak = waveData[i] * waveData[i];
                if (levelMax < wavePeak)
                {
                    levelMax = wavePeak;
                }
            }
            return(levelMax);
        }
Esempio n. 37
0
    /// <summary>
    /// 获取麦克风音量
    /// </summary>
    /// <returns></returns>
    public float GetLevelMax()
    {
        float levelMax = 0;

        float[] waveData    = new float[_sampleWindow];
        int     micPosition = Microphone.GetPosition(null) - (_sampleWindow + 1); // null means the first microphone

        if (micPosition < 0)
        {
            return(0);
        }
        recordedClip.GetData(waveData, micPosition);

        // Getting a peak on the last 128 samples
        for (int i = 0; i < _sampleWindow; i++)
        {
            float wavePeak = waveData[i] * waveData[i];
            if (levelMax < wavePeak)
            {
                levelMax = wavePeak;
            }
        }
        return(levelMax);
    }
Esempio n. 38
0
    void StartMicrophone()//마이크 시작
    {
        audioSource.Stop();

        //녹음 시작 (마이크 이름, 루프여부, 샘플레이트)
        audioSource.clip = Microphone.Start(microphone, true, 10, 44100);
        audioSource.loop = true;

        Debug.Log(Microphone.IsRecording(microphone).ToString());

        if (Microphone.IsRecording(microphone))
        { //마이크가 녹음하고 있는지 확인 (될때까지 루프)
            while (!(Microphone.GetPosition(microphone) > 0))
            {
            }

            Debug.Log("녹음 시작 : " + microphone);
            audioSource.Play();
        }
        else
        {
            Debug.Log("녹음 실패 : " + microphone);
        }
    }
Esempio n. 39
0
    void Start()
    {
        string microphone = "Built-in Microphone";

        foreach (var device in Microphone.devices)
        {
            print(device);
            if (device.Contains("Webcam")) //for testing, to use with HTC Vive change search string to HTC
            {
                print("Found microphone");
                microphone = device;
            }
        }

        AudioSource audioSource = GetComponent <AudioSource>();

        print("Starting recording");
        audioSource.clip = Microphone.Start(microphone, true, 1, 44100); //loops every 1 second, capturing the audio and overwriting the previous clip
        audioSource.loop = false;                                        //playback loop is set to false so the same clip isn't looped
        while (!(Microphone.GetPosition(null) > 0))
        {
        }
        //audioSource.Play();
    }
Esempio n. 40
0
        protected override void OnEnable()
        {
            base.OnEnable();

            if (Microphone.devices.Length == 0)
            {
                Debug.LogFormat("Microphone device not found");
                return;
            }

            var deviceName = Microphone.devices[deviceIndex];

            Microphone.GetDeviceCaps(deviceName, out int minFreq, out int maxFreq);
            var micClip = Microphone.Start(deviceName, true, 1, 48000);

            // set the latency to “0” samples before the audio starts to play.
            while (!(Microphone.GetPosition(deviceName) > 0))
            {
            }

            audioSource.clip = micClip;
            audioSource.loop = true;
            audioSource.Play();
        }
Esempio n. 41
0
    private IEnumerator TimeDown()
    {
        //Debug.Log(" IEnumerator TimeDown()");
        int time = 0;

        while (time < RECORD_TIME)
        {
            if (!Microphone.IsRecording(null))
            { //如果没有录制
                Debug.Log("IsRecording false");
                yield break;
            }
            //Debug.Log("yield return new WaitForSeconds " + time);
            yield return(new WaitForSeconds(1));

            time++;
        }
        if (time >= 10)
        {
            //Debug.Log("RECORD_TIME is out! stop record!");
            StopRecord();
        }
        yield return(0);
    }
Esempio n. 42
0
        public void useMic(string deviceName)
        {
            try
            {
                if (Microphone.IsRecording(micInUse.name))
                {
                    Microphone.End(micInUse.name);
                }
                //micInUse.name = deviceName;
                micInUse = MicrophoneList.Where(x => x.name == deviceName).First();
                //gameObject.GetComponent<AudioSource>().clip = //recording;
                recording =
                    Microphone.Start(micInUse.name, false, MaxLength, micInUse.usedFreq);
                //recording = gameObject.GetComponent<AudioSource>().clip;
#if UNITY_EDITOR && SHOW_DEBUG
                Debug.Log(micInUse.name + " is using frequency: " + micInUse.usedFreq);
#endif
                currentPos = 0;
            }
            catch (Exception e)
            {
                Debug.Log(e.Message);
            }
        }
Esempio n. 43
0
    void OnGUI()
    {
        if (bShowGUI)
        {
            Vector3 pos2d = Camera.main.WorldToViewportPoint(transform.position);
            if (pos2d.z > 0)
            {
                GUI.color = Color.white;
                string strDisplay = name;
                int    x          = (int)(pos2d.x * Screen.width);
                int    y          = (int)(Screen.height - pos2d.y * Screen.height);
                GUI.Box(new Rect(x, y, 200, 60), "AudioTrunk");
                GUI.color = GetComponent <AudioSource>().isPlaying ? Color.green : Color.white;
                if (GUI.Button(new Rect(x, y + 30, 50, 30), "play"))
                {
                    Play();
                }
                GUI.color = Color.white;
                if (GUI.Button(new Rect(x + 50, y + 30, 50, 30), "stop"))
                {
                    Stop();
                }
                GUI.color = Microphone.IsRecording("") ? Color.red : Color.white;
                if (GUI.Button(new Rect(x + 100, y + 30, 50, 30), "rec"))
                {
                    Record();
                }

                GUI.color = GetComponent <AudioSource>().loop ? Color.red : Color.white;
                if (GUI.Button(new Rect(x + 150, y + 30, 50, 30), "loop"))
                {
                    GetComponent <AudioSource>().loop = !GetComponent <AudioSource>().loop;
                }
            }
        }
    }
Esempio n. 44
0
    private IEnumerator SetUp()
    {
        var length = Microphone.devices.Length;

        if (!(m_source != null && length > 0)) // オーディオソースとマイクがある
        {
            Debug.LogError("AudioSourceかマイクが見つからない");
            yield break;
        }

        var text = "Microphone List";

        for (int i = 0; i < length; i++)
        {
            text += "\n" + (i + 1).ToString() + "." + Microphone.devices[i];
        }

        setText.text = text;

        var devName = "";
        var num     = -1;

        setField.ActivateInputField();

        while (true)
        {
            if (Input.GetKeyDown(KeyCode.Return))
            {
                if (int.TryParse(setField.text, out num))
                {
                    if (0 <= num && num <= length)
                    {
                        devName = Microphone.devices[num - 1];
                        break;
                    }
                    else
                    {
                        Debug.LogError("リストの範囲外の数字が入力されている");
                    }
                }
                else
                {
                    Debug.LogError("整数以外が入力されている");
                }
            }

            yield return(null);
        }

        m_source.loop = true;                                                             // ループにする
        m_source.clip = Microphone.Start(devName, true, recordingSec, samplingFrequency); // clipをマイクに設定
        while (!(Microphone.GetPosition(devName) > 0))
        {
        } // きちんと値をとるために待つ

        Microphone.GetPosition(null);
        m_source.Play();

        API = GetComponent <JsonTest>();

        setText.gameObject.SetActive(false);
        setField.gameObject.SetActive(false);
        GetReady();
    }
Esempio n. 45
0
        } //Some work

        public void StartRecord(string MicDeviceName)
        {
            RecordClip = Microphone.Start(MicDeviceName, isLoopingRecord, RecordTimeSec, sampleRate);
            isRecord   = true;
            Debug.Log("Recording Started");
        } //Start Mic Record
Esempio n. 46
0
        /// <summary>
        ///It initializes a softphone object with a SIP BPX, and it is for requisiting a SIP account that is nedded for a SIP PBX service. It registers this SIP
        ///account to the SIP PBX through an ’IphoneLine’ object which represents the telephoneline. 
        ///If the telephone registration is successful we have a call ready softphone. In this example the softphone can be reached by dialing the number 891.
        /// </summary>
        private void InitializeSoftPhone()
        {
            try
            {
                if (Ozeki.VoIP.SDK.Protection.LicenseManager.Instance.LicenseType != Ozeki.VoIP.SDK.Protection.LicenseType.Activated)
                    Ozeki.VoIP.SDK.Protection.LicenseManager.Instance.SetLicense(m_OzekiLicenseId, m_OzekiLicenseKey);

                using (BrightPlatformEntities objDbModel = new BrightPlatformEntities(UserSession.EntityConnection)) {
                    int? _SipAcctId = objDbModel.users.FirstOrDefault(i => i.id == UserSession.CurrentUser.UserId).sip_id;
                    if (!_SipAcctId.HasValue) {
                        //MessageBox.Show(
                        //    string.Format("Your account is not yet configured for calling.{0}Please contact your administrator.", Environment.NewLine),
                        //    "Bright Sales",
                        //    MessageBoxButtons.OK,
                        //    MessageBoxIcon.Information
                        //);
                        BrightVision.Common.UI.NotificationDialog.Error(
                            "Bright Sales",
                            string.Format("Your account is not yet configured for calling.{0}Please contact your administrator.", Environment.NewLine)
                        );
                        return;
                    }

                    sip_accounts sip = objDbModel.sip_accounts.FirstOrDefault(i => i.id == _SipAcctId);
                    if (sip != null)
                        objDbModel.Detach(sip);

                    if (m_UserAudioSetting == null)
                        m_UserAudioSetting = AudioSettingUtility.GetUserAudioSetting();

                    m_UserMicrophone = AudioSettingUtility.GetDefaultDeviceMicrophone();
                    m_UserSpeaker = AudioSettingUtility.GetDefaultDeviceSpeaker();
                    m_UserMicrophone.Volume = (float)m_UserAudioSetting.mic_volume / 10;
                    m_UserSpeaker.Volume = (float)m_UserAudioSetting.speaker_volume / 10;

                    try {
                        softPhone = SoftPhoneFactory.CreateSoftPhone(SoftPhoneFactory.GetLocalIP(), 5700, 5750, 5780);
                    }
                    catch {
                    }

                    this.DisableUnwantedCodec();
                    softPhone.IncomingCall -= new EventHandler<VoIPEventArgs<IPhoneCall>>(softPhone_IncomingCall);
                    softPhone.IncomingCall += new EventHandler<VoIPEventArgs<IPhoneCall>>(softPhone_IncomingCall);
                    SIPAccount acc = new SIPAccount(
                       true,
                       sip.display_name.Trim(),
                       sip.username.Trim(),
                       sip.username.Trim(),
                       sip.password,
                       sip.sip_url.Trim(),
                       5060,
                       ""
                    );
                    // var acc = new SIPAccount(true, sip.display_name, sip.username, sip.username, sip.password, sip.sip_url, 5060,"");
                    //  NatConfiguration newNatConfiguration = new NatConfiguration(NatTraversalMethod.Auto, new NatRemoteServer("stun.ozekiphone.com", "", ""));
                    phoneLine = softPhone.CreatePhoneLine(acc, Ozeki.Network.TransportType.Udp, SRTPMode.None);
                    phoneLine.PhoneLineStateChanged -= new EventHandler<VoIPEventArgs<PhoneLineState>>(phoneLine_PhoneLineInformation);
                    phoneLine.PhoneLineStateChanged += new EventHandler<VoIPEventArgs<PhoneLineState>>(phoneLine_PhoneLineInformation);
                    softPhone.RegisterPhoneLine(phoneLine);
                    objDbModel.Dispose();
                }
            }
            catch (Exception ex) {
            }
        }
Esempio n. 47
0
        /// <summary>
        /// Creates the incoming and outgoing media handlers such as microphone or speaker
        /// </summary>
        private void CreateMediaHandlers()
        {
            MediaHandlerFactory factory = new MediaHandlerFactory();
            activeAudioCallListener = factory.CreateSoftPhoneCallListener();
            activeVideoCallListener = factory.CreateSoftPhoneVideoCallListener();

            phoneCallAudioReceiver = activeAudioCallListener.GetComponent("AudiReceiver") as PhoneCallAudioReceiver;
            phoneCallAudioSender = activeAudioCallListener.GetComponent("AudioSender") as PhoneCallAudioSender;

            phonecallVideoSender = activeVideoCallListener.GetComponent("VideoSender") as PhoneCallVideoSender;
            phonecallVideoReceiver = activeVideoCallListener.GetComponent("VideoReceiver") as PhoneCallVideoReceiver;

            mediaConnector = activeAudioCallListener.MediaConnector;

            microphone = activeAudioCallListener.GetComponent("Microphone") as Microphone;
            if (microphone != null)
            {
                microphone.LevelChanged += (Microphone_LevelChanged);
            }

            speaker = activeAudioCallListener.GetComponent("Speaker") as Speaker;
            if (speaker != null)
            {
                speaker.LevelChanged += (Speaker_LevelChanged);
            }

            incomingDataMixer = activeAudioCallListener.GetComponent("SpeakerMixer") as AudioMixerMediaHandler;
            camera = activeVideoCallListener.GetComponent("WebCamera") as WebCamera;

            remoteImageHandler = activeVideoCallListener.GetComponent("RemoteImageHandler") as ImageProvider<Image>;
            localImageHandler = activeVideoCallListener.GetComponent("LocalImageHandler") as ImageProvider<Image>;

            AudioProcessor = activeAudioCallListener.GetComponent("AudioProcessor") as AudioQualityEnhancer;
            outgoingDataMixer = activeAudioCallListener.GetComponent("OutGoingDataMixer") as AudioMixerMediaHandler;
            RecordDataMixer = activeAudioCallListener.GetComponent("RecordDataMixer") as AudioMixerMediaHandler;

            dtmfEventWavePlayer = activeAudioCallListener.GetComponent("DTMF") as DtmfEventWavePlayer;
            ringtoneWavePlayer = activeAudioCallListener.GetComponent("Ringtones") as PhoneCallStateWavePlayer;

            Stream basicRing = Assembly.GetExecutingAssembly().GetManifestResourceStream(
                "OzekiDemoSoftphone.Resources.basic_ring.wav"
                );

            ringtoneWavePlayer.UpdateIncomingStateStream(CallState.Ringing, @"..\..\Resources\basic_ring.wav");
            ringtoneWavePlayer.UpdateOutgoingStateStream(CallState.Ringing, @"..\..\Resources\ringback.wav");
        }
Esempio n. 48
0
 public void ShouldBeAvailableWhenConstructed(Microphone microphone)
 {
     microphone.IsAvailable().ShouldBeTrue();
 }
Esempio n. 49
0
        /// <summary>
        /// Occurs when the recording settings has been changed.
        /// </summary>
        /// <param name="info">Contains information about the settings.</param>
        public void MicrophoneSettingsHasChanged(AudioSettingsInfo info)
        {
            if (microphone == null) return;

            microphone.Volume = info.Volume;
            microphone.Muted = info.Mute;

            if (microphone.DeviceInfo.DeviceID != info.SelectedDevice)
            {
                  var list = Microphone.GetDevices();
                  foreach (var audioDeviceInfo in list)
                  {
                      if (audioDeviceInfo.DeviceID == info.SelectedDevice)
                      {
                          microphone.Stop();
                          microphone.LevelChanged -= (Microphone_LevelChanged);
                          mediaConnector.Disconnect(microphone, AudioProcessor);
                          microphone = Microphone.GetDevice(audioDeviceInfo);
                          mediaConnector.Connect(microphone,AudioProcessor);
                          microphone.LevelChanged += (Microphone_LevelChanged);
                          microphone.Start();
                      }
                  }
            }
        }
Esempio n. 50
0
 /// <summary>
 /// Specifies an audio stream sent over the NetStream object, from a Microphone object passed as the source.
 /// </summary>
 public void attachAudio(Microphone microphone)
 {
 }
Esempio n. 51
0
        private void SetAudioSettings()
        {
            BrightPlatformEntities objDbModel = new BrightPlatformEntities(UserSession.EntityConnection);
            audio_settings audioSetting = objDbModel.audio_settings.FirstOrDefault(e => e.user_id == UserSession.CurrentUser.UserId);

            speaker = AudioSettingUtility.GetDefaultDeviceSpeaker(); //this.GetDefaultDeviceSpeaker(audioSetting);
            if (speaker != null) {
                if (speaker.DeviceInfo.DeviceID != null)
                    lookUpEditSpeaker.EditValue = speaker.DeviceInfo.DeviceID;
            }

            mic = AudioSettingUtility.GetDefaultDeviceMicrophone(); //this.GetDefaultDeviceMicrophone(audioSetting);
            if (mic != null) {
                if (mic.DeviceInfo.DeviceID != null)
                    lookUpEditMicrophone.EditValue = mic.DeviceInfo.DeviceID;
            }

            if (AudioSettingUtility.SelectedSpeaker != null)
                speaker = AudioSettingUtility.SelectedSpeaker;
            if (AudioSettingUtility.SelectedMicrophone != null)
                mic = AudioSettingUtility.SelectedMicrophone;

            if (speaker == null)
                NotificationDialog.Error("Bright Sales", "No speaker device found. Please connect a speaker device first.");
            else if (mic == null)
                NotificationDialog.Error("Bright Sales", "No microphone device found. Please connect a microphone device first.");

            if (audioSetting == null) {
                if (speaker != null)
                    zoomTrackBarControlSpeakerVolume.EditValue = speaker.Volume * 10;
                if (mic != null)
                    zoomTrackBarControlMicVolume.EditValue = mic.Volume * 10;
            }
            else {
                comboBoxEditMode.SelectedIndex = audioSetting.mode;
                zoomTrackBarControlSpeakerVolume.EditValue = audioSetting.speaker_volume;
                zoomTrackBarControlMicVolume.EditValue = audioSetting.mic_volume;
                checkEditSpeakerAutoAdjust.EditValue = audioSetting.speaker_auto_adjust;
                checkEditMicAutoAdjust.EditValue = audioSetting.mic_auto_adjust;
            }

            //if (speaker != null)
            //    lookUpEditSpeaker.EditValue = speaker.DeviceInfo.DeviceID;

            //if (mic != null)
            //    lookUpEditMicrophone.EditValue = mic.DeviceInfo.DeviceID;
        }
Esempio n. 52
0
    static int _CreateMicrophone(IntPtr L)
    {
        int count = LuaDLL.lua_gettop(L);

        if (count == 0)
        {
            Microphone obj = new Microphone();
            LuaScriptMgr.PushObject(L, obj);
            return 1;
        }
        else
        {
            LuaDLL.luaL_error(L, "invalid arguments to method: Microphone.New");
        }

        return 0;
    }
 public IMicrophone CreateMirophone(string make, string model, decimal price, bool hasCable)
 {
     IMicrophone microphone = new Microphone(make, model, price, hasCable);
     return microphone;
 }
 public WebCameraEngine(VideoDeviceInfo device)
 {
     _camera = WebCamera.GetDevice(device);
     _microphone = Microphone.GetDefaultDevice();
 }
Esempio n. 55
0
 public void ShouldRegisterANewDevice(
     Devices devices,
     Microphone newDevice)
 {
     devices.RegisterNewDevice(newDevice).ShouldBeTrue();
 }
Esempio n. 56
0
        public void ShouldNotRegisterAnAlreadyRegisteredDevice(
            Devices devices,
            Microphone newDevice)
        {
            devices.RegisterNewDevice(newDevice);

            devices.RegisterNewDevice(newDevice).ShouldBeFalse();
        }