Exemplo n.º 1
0
        public static ITalkTarget GetTalkTarget(objectsCamera cam, IAudioSource source)
        {
            ITalkTarget talkTarget;

            switch (cam.settings.audiomodel)
            {
                default://local playback
                    talkTarget = new TalkLocal(source);
                    break;
                case "Foscam":
                    talkTarget = new TalkFoscam(cam.settings.audioip, cam.settings.audioport,
                        cam.settings.audiousername, cam.settings.audiopassword, source);
                    break;
                case "iSpyServer":
                    talkTarget = new TalkiSpyServer(cam.settings.audioip, cam.settings.audioport,
                        source);
                    break;
                case "NetworkKinect":
                    talkTarget = new TalkNetworkKinect(cam.settings.audioip, cam.settings.audioport,
                        source);
                    break;
                case "Axis":
                    talkTarget = new TalkAxis(cam.settings.audioip, cam.settings.audioport,
                        cam.settings.audiousername, cam.settings.audiopassword, source);
                    break;
                case "IP Webcam (Android)":
                    talkTarget = new TalkIPWebcamAndroid(new Uri(cam.settings.videosourcestring), source);
                    break;
            }
            return talkTarget;
        }
Exemplo n.º 2
0
        /// <summary>
        ///   Starts capturing from the chosen audio input interface
        /// </summary>
        /// 
        void btnStart_Click(object sender, EventArgs e)
        {
            // Get the device currently selected in the combobox
            AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;

            if (info == null)
            {
                MessageBox.Show("No audio devices available.");
                return;
            }

            // Create a new audio capture device
            source = new AudioCaptureDevice(info)
            {
                // Capture at 22050 Hz
                DesiredFrameSize = 2048,
                SampleRate = 22050
            };

            // Wire up some notification events
            source.NewFrame += source_NewFrame;
            source.AudioSourceError += source_AudioSourceError;
            

            // Start it!
            source.Start();
        }
Exemplo n.º 3
0
        void Button1Click(object sender, EventArgs e)
        {
            AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;

            if (info == null)
            {
                MessageBox.Show("No audio devices available.");
                return;
            }


            source = new AudioCaptureDevice(info.Guid);
            initializing = true;
            lbStatus.Text = "Waiting for soundcard...";
            source = new AudioCaptureDevice(info.Guid);
            source.SampleRate = 44100;
            source.DesiredFrameSize = 5000;
            source.NewFrame += source_NewFrame;


            detector = new EnergyBeatDetector(43);
            detector.Beat += new EventHandler(detector_Beat);


            sample = new List<ComplexSignal>();

            source.Start();
        }
Exemplo n.º 4
0
 private void SetPosition(long value)
 {
     if (value == _samplePos)
         return;
     long sourceStart = 0;
     for (int iSource = 0; iSource < cueSheet._sources.Count; iSource++)
     {
         if (value >= sourceStart && value < sourceStart + cueSheet._sources[iSource].Length)
         {
             if (iSource != currentSource)
             {
                 if (currentAudio != null)
                     currentAudio.Close();
                 currentSource = iSource;
                 currentAudio = cueSheet.GetAudioSource(currentSource, false);
                 nextPos = sourceStart + cueSheet._sources[currentSource].Length;
             }
             currentAudio.Position = value - sourceStart + cueSheet._sources[currentSource].Offset;
             _samplePos = value;
             return;
         }
         sourceStart += cueSheet._sources[iSource].Length;
     }
     throw new Exception("Invalid position");
 }
 public CueToolsFlacEncoder(IAudioSource audioSource, string targetFilename, AudioFileTag tags, int compressionLevel, TrackGain trackGain, DrMeter drMeter)
     : base(audioSource, targetFilename, tags, trackGain, drMeter)
 {
     this.AudioDest = new FLACWriter(targetFilename, audioSource.PCM)
     {
         CompressionLevel = compressionLevel
     };
 }
 public RemoteMp3VbrEncoder(IPAddress remoteAddress, IAudioSource audioSource, string targetFilename, AudioFileTag tags, int vbrQuality, TrackGain trackGain, DrMeter drMeter)
     : base(audioSource, targetFilename, tags, trackGain, drMeter)
 {
     this.AudioDest = new RemoteMp3VbrWriter(remoteAddress, targetFilename, audioSource.PCM)
     {
         CompressionLevel = vbrQuality
     };
 }
 public void Dispose()
 {
     if (this.audioSource != null)
     {
         this.audioSource.Close();
         this.audioSource = null;
     }
 }
Exemplo n.º 8
0
 public TalkAxis(string server, int port, string username, string password, IAudioSource audioSource)
 {
     _server = server;
     _port = port;
     _audioSource = audioSource;
     _username = username;
     _password = password;
 }
Exemplo n.º 9
0
 public LocalMp3Encoder(IAudioSource audioSource, string targetFilename, AudioFileTag tags, int vbrQuality, TrackGain trackGain, DrMeter drMeter)
     : base(audioSource, targetFilename, tags, trackGain, drMeter)
 {
     this.AudioDest = new LameWriter(targetFilename, audioSource.PCM)
     {
         Settings = LameWriterSettings.CreateVbr(vbrQuality)
     };
 }
Exemplo n.º 10
0
 public static TrackGain CreateTrackGain(IAudioSource audioSource)
 {
     if (audioSource != null && ReplayGain.IsSupportedFormat(audioSource.PCM.SampleRate, audioSource.PCM.BitsPerSample))
     {
         return new TrackGain(audioSource.PCM.SampleRate, audioSource.PCM.BitsPerSample);
     }
     return null;
 }
Exemplo n.º 11
0
 public RemoteFlacEncoder(IPAddress remoteAddress, IAudioSource audioSource, string targetFilename, AudioFileTag tags, int compressionLevel, TrackGain trackGain, DrMeter drMeter)
     : base(audioSource, targetFilename, tags, trackGain, drMeter)
 {
     this.AudioDest = new RemoteFlacWriter(remoteAddress, targetFilename, audioSource.PCM)
     {
         CompressionLevel = compressionLevel
     };
 }
Exemplo n.º 12
0
 public AudioPipe(IAudioSource source, int size, bool own, ThreadPriority priority)
     : this(source.PCM, size)
 {
     this.own = own;
     this.priority = priority;
     _source = source;
     _sampleLen = _source.Length;
     _samplePos = _source.Position;
 }
Exemplo n.º 13
0
 /// <summary>
 /// Adds a source/sink combination to this muxer
 /// </summary>
 /// <param name="source"></param>
 /// <param name="sink"></param>
 public PushPullObject AddInputOutputSource(IAudioSource source, IAudioSink sink)
 {
     PushPullObject member = new PushPullObject(source, sink);
     lock (MemberLock)
     {
         Members.Add(member);
     }
     return member;
 }
Exemplo n.º 14
0
 public CUESheetAudio(CUESheet cueSheet)
 {
     this.cueSheet = cueSheet;
     this.currentAudio = null;
     this._samplePos = 0;
     this._sampleLen = 0;
     this.currentSource = -1;
     this.nextPos = 0;
     cueSheet._sources.ForEach(s => this._sampleLen += s.Length);
 }
Exemplo n.º 15
0
        public AudioPlayer(IAudioSource source)
        {
            Source = source;
            api.Connect();
            api.FlushEvents();

            for (int i = 0; api.OutputDeviceCount > i; i++)
            {
                Devices.Add(api.GetOutputDevice(i));
            }
        }
Exemplo n.º 16
0
        public IAudioSource SpeakWord()
        {
            IAudioSource s = null;

            if (wordData != null && wordData.Id != null)
            {
                s = MakeFriendsConfiguration.Instance.Context.GetAudioManager().PlayVocabularyData(wordData, true);
            }
            LookAngry();
            return(s);
        }
Exemplo n.º 17
0
 protected virtual void OnAudioSourceCreated(TxKitEars creator, IAudioSource src)
 {
     if (!_visible)
     {
         TargetEars.PauseAudio();
     }
     else
     {
         TargetEars.ResumeAudio();
     }
 }
Exemplo n.º 18
0
        public AudioSourceViewModel(IAudioSource AudioSource)
        {
            _audioSource = AudioSource;

            AvailableMicrophones = new ReadOnlyObservableCollection <IAudioItem>(_microphones);
            AvailableSpeakers    = new ReadOnlyObservableCollection <IAudioItem>(_speakers);

            Refresh();

            RefreshCommand = new DelegateCommand(Refresh);
        }
Exemplo n.º 19
0
    public TestableExample(TestableGameObject parent,
        IAudioSource source,
        Sphere sphere,
        [Resource("audio/beep")] AudioClip beep)
        : base(parent)
    {
        this.source = source;
        this.beep = beep;

        this.Obj.transform.localScale = new Vector3(5, 5, 5);
    }
Exemplo n.º 20
0
        private void CloseInternal()
        {
            if (_factory != null && _parameters.AecDump)
            {
                _factory.StopAecDump();
            }

            _logger.Debug(TAG, "Closing peer connection.");
            if (_rtcEventLog != null)
            {
                // RtcEventLog should stop before the peer connection is disposed.
                _rtcEventLog.Stop();
                _rtcEventLog = null;
            }

            _logger.Debug(TAG, "Closing audio source.");
            if (_audioSource != null)
            {
                _audioSource.Dispose();
                _audioSource = null;
            }

            _logger.Debug(TAG, "Stopping capturer.");
            if (_videoCapturer != null)
            {
                _videoCapturer.StopCapture();
                _videoCapturer.Dispose();
                _videoCapturerStopped = true;
                _videoCapturer        = null;
            }

            _logger.Debug(TAG, "Closing video source.");
            if (_videoSource != null)
            {
                _videoSource.Dispose();
                _videoSource = null;
            }

            _localRenderer  = null;
            _remoteRenderer = null;
            _logger.Debug(TAG, "Closing peer connection factory.");
            if (_factory != null)
            {
                _factory.Dispose();
                _factory = null;
            }

            _logger.Debug(TAG, "Closing peer connection done.");
            _peerConnectionEvents.OnPeerConnectionClosed();
            PeerConnectionFactory.StopInternalTracingCapture();
            PeerConnectionFactory.ShutdownInternalTracer();

            _executor.Release();
        }
Exemplo n.º 21
0
 public ConsoleLister(WebcamModel Webcam,
                      IAudioSource AudioSource,
                      IPlatformServices PlatformServices,
                      FFmpegWriterProvider FfmpegWriterProvider,
                      SharpAviWriterProvider SharpAviWriterProvider)
 {
     _webcam                 = Webcam;
     _audioSource            = AudioSource;
     _platformServices       = PlatformServices;
     _ffmpegWriterProvider   = FfmpegWriterProvider;
     _sharpAviWriterProvider = SharpAviWriterProvider;
 }
        protected virtual void UpdateAudioSource(IAudioSource audioSource, out bool deleteSource)
        {
            deleteSource = false;
            var t = audioSource.gameObject.transform;

            t.localScale += Vector3.one * (LosManager.instance.settings.speedOfSoundUnitsPerSecond / 60f) * audioSource.growthSizeFactor; // / 60f because it's updated through fixed update (60x a sec)

            if (t.localScale.x > audioSource.maxGrowthSize)
            {
                deleteSource = true;
            }
        }
Exemplo n.º 23
0
        private IAudioSource InitOutput(IAudioSource source)
        {
            if (SynthesizerIntercepter != null)
            {
                SynthesizerIntercepter.Intercepted -= SynthesizerIntercepter_Intercepted;
            }

            SynthesizerIntercepter              = new InterceptFilter(source);
            SynthesizerRecorder.Source          = SynthesizerIntercepter;
            SynthesizerIntercepter.Intercepted += SynthesizerIntercepter_Intercepted;

            return(SynthesizerRecorder);
        }
Exemplo n.º 24
0
        public void PlayAudioQuestion(Action endCallback)
        {
            audioSource = null;

            endAudioQuestion = endCallback;

            lLDAudioQuestion.Clear();

            for (int i = 0; i < questionData.Count; i++)
            {
                lLDAudioQuestion.Add(questionData[i]);
            }
        }
Exemplo n.º 25
0
        /// <summary>
        /// Converts a duration in raw elements to a duration in milliseconds. For more information about "raw elements" see remarks.
        /// </summary>
        /// <param name="source">The <see cref="IAudioSource"/> instance which provides the <see cref="WaveFormat"/> used
        /// to convert the duration in "raw elements" to a duration in milliseconds.</param>
        /// <param name="elementCount">The duration in "raw elements" to convert to duration in milliseconds.</param>
        /// <returns>The duration in milliseconds.</returns>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// elementCount
        /// </exception>
        /// <remarks>
        /// The term "raw elements" describes the elements, an audio source uses.
        /// What type of unit an implementation of the <see cref="IAudioSource"/> interface uses, depends on the implementation itself.
        /// For example, a <see cref="IWaveSource"/> uses bytes while a <see cref="ISampleSource"/> uses samples.
        /// That means that a <see cref="IWaveSource"/> provides its position, length,... in bytes
        /// while a <see cref="ISampleSource"/> provides its position, length,... in samples.
        /// <para></para>
        /// To get the length or the position of a <see cref="IAudioSource"/> as a <see cref="TimeSpan"/> value, use the
        /// <see cref="GetLength"/> or the <see cref="GetPosition"/> property.
        /// <para></para><para></para>
        /// Internally this method uses the <see cref="TimeConverterFactory"/> class.
        /// </remarks>
        public static long GetMilliseconds(this IAudioSource source, long elementCount)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (elementCount < 0)
            {
                throw new ArgumentOutOfRangeException("elementCount");
            }

            return((long)GetTime(source, elementCount).TotalMilliseconds);
        }
Exemplo n.º 26
0
        /// <summary>
        /// Converts a duration in milliseconds to a duration in "raw elements". For more information about "raw elements" see remarks.
        /// </summary>
        /// <param name="source"><see cref="IWaveSource" /> instance which provides the <see cref="WaveFormat" /> used to convert
        /// the duration in milliseconds to a duration in "raw elements".</param>
        /// <param name="milliseconds">Duration in milliseconds to convert to a duration in "raw elements".</param>
        /// <returns>
        /// Duration in "raw elements".
        /// </returns>
        /// <remarks>
        /// The term "raw elements" describes the elements, an audio source uses.
        /// What type of unit an implementation of the <see cref="IAudioSource"/> interface uses, depends on the implementation itself.
        /// For example, a <see cref="IWaveSource"/> uses bytes while a <see cref="ISampleSource"/> uses samples.
        /// That means that a <see cref="IWaveSource"/> provides its position, length,... in bytes
        /// while a <see cref="ISampleSource"/> provides its position, length,... in samples.
        /// <para></para>
        /// To get the length or the position of a <see cref="IAudioSource"/> as a <see cref="TimeSpan"/> value, use the
        /// <see cref="GetLength"/> or the <see cref="GetPosition"/> property.
        /// <para></para><para></para>
        /// Internally this method uses the <see cref="TimeConverterFactory"/> class.
        /// </remarks>
        /// <exception cref="System.ArgumentNullException">source</exception>
        /// <exception cref="System.ArgumentOutOfRangeException">milliseconds is less than zero.</exception>
        public static long GetRawElements(this IAudioSource source, long milliseconds)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (milliseconds < 0)
            {
                throw new ArgumentOutOfRangeException("milliseconds");
            }

            return(GetRawElements(source, TimeSpan.FromMilliseconds(milliseconds)));
        }
Exemplo n.º 27
0
        public LossyWAVReader(IAudioSource audioSource, IAudioSource lwcdfSource)
        {
            _audioSource = audioSource;
            _lwcdfSource = lwcdfSource;

            if (_audioSource.Length != _lwcdfSource.Length)
                throw new Exception("Data not same length");
            if (_audioSource.PCM.BitsPerSample != _lwcdfSource.PCM.BitsPerSample
                || _audioSource.PCM.ChannelCount != _lwcdfSource.PCM.ChannelCount
                || _audioSource.PCM.SampleRate != _lwcdfSource.PCM.SampleRate)
                throw new Exception("FMT Data mismatch");

            scaling_factor = 1.0; // !!!! Need to read 'fact' chunks or tags here
        }
Exemplo n.º 28
0
        private Resources()
        {
            var sprites = Texture2D.LoadAndSplitFrom("./resources/sprites.png", 6, 1, VectorInt.One * 16);

            TextureSnakeHead = sprites[0];
            TextureSnakeBody = sprites[1];
            TextureFruit     = sprites[2];
            TextureFruitGold = sprites[3];
            TextureBug1      = sprites[4];
            TextureBug2      = sprites[5];

            BgmMain  = new VorbisAudioSource("./resources/bgm_main.ogg");
            BgmTitle = new VorbisAudioSource("./resources/bgm_title.ogg");
        }
Exemplo n.º 29
0
        public FileEncoderBase(IAudioSource audioSource, string targetFilename, AudioFileTag tags, TrackGain trackGain, DrMeter drMeter)
        {
            if (audioSource == null)
            {
                throw new SkipEncodingItemException("Unsupported audio source.");
            }

            this.targetFilename = targetFilename;
            this.audioSource    = audioSource;
            Directory.CreateDirectory(Path.GetDirectoryName(this.targetFilename));
            this.tags      = tags;
            this.trackGain = trackGain;
            this.drMeter   = drMeter;
        }
Exemplo n.º 30
0
        public FileEncoderBase(IAudioSource audioSource, string targetFilename, AudioFileTag tags, TrackGain trackGain, DrMeter drMeter)
        {
            if (audioSource == null)
            {
                throw new SkipEncodingItemException("Unsupported audio source.");
            }

            this.targetFilename = targetFilename;
            this.audioSource = audioSource;
            Directory.CreateDirectory(Path.GetDirectoryName(this.targetFilename));
            this.tags = tags;
            this.trackGain = trackGain;
            this.drMeter = drMeter;
        }
Exemplo n.º 31
0
        //private void OnDisable()
        //{
        //    Poof(m_oParticleTime);
        //}

        void OnDestroy()
        {
            if (poofSound != null)
            {
                poofSound.Stop();
                poofSound = null;
            }

            if (OnDeath != null)
            {
                OnDeath();
            }
            CancelInvoke();
        }
Exemplo n.º 32
0
        public override void OnActivate()
        {
            var newExplosion = _gameEngineInterface.Clone(_sourceExplosion, string.Empty);

            newExplosion.Transform.Position = _playerShip.Transform.Position;
            newExplosion.Transform.Scale(1.1f, 1.1f, 1.1f);
            newExplosion.SetActive(true);

            //_audioSourceLoseTransition.Play();

            _audioSourceExplosion = newExplosion.GetComponent <IAudioSource>(); //this audio source has PlayOnAwake = true

            _playerShip.SetActive(false);
        }
Exemplo n.º 33
0
        public async Task AnnounceNowplayAsync()
        {
            GuildVoiceState voiceState = audioService.GetVoiceState(Context.Guild);
            IAudioSource    np         = voiceState.Player.PlayingTrack;

            if (np == null)
            {
                await ReplyAsync("Not playing anything.");
            }
            else
            {
                await ReplyAsync(":musical_note: Now playing: " + voiceState.Player.PlayingTrack.Info.Title);
            }
        }
Exemplo n.º 34
0
        /// <summary>
        /// Initializes the audio provider with the given audio source.
        /// </summary>
        /// <param name="audioSource">The AudioSource.</param>
        public void Initialize(IAudioSource audioSource)
        {
            var waveOutAudioSource = audioSource as WaveOutAudioSource;

            if (waveOutAudioSource == null)
            {
                throw new InvalidOperationException("The specified audio source does not match the AudioProvider type.");
            }

            _currentAudioSource = audioSource;
            _waveOut.Device     = _device;
            _waveOut.Initialize(waveOutAudioSource.WaveData, waveOutAudioSource.WaveFormat);
            _userStop = false;
        }
Exemplo n.º 35
0
        internal WaveFormat BuildOutputWaveFormat(IAudioSource audioSource)
        {
            if (audioSource == null)
            {
                throw new ArgumentNullException("source");
            }

            return(new WaveFormatExtensible(
                       audioSource.WaveFormat.SampleRate,
                       audioSource.WaveFormat.BitsPerSample,
                       OutputChannelCount,
                       WaveFormatExtensible.SubTypeFromWaveFormat(audioSource.WaveFormat),
                       OutputMask));
        }
Exemplo n.º 36
0
        /// <summary>
        /// Converts a duration in raw elements to a <see cref="TimeSpan"/> value. For more information about "raw elements" see remarks.
        /// </summary>
        /// <param name="source">The <see cref="IAudioSource"/> instance which provides the <see cref="WaveFormat"/> used
        /// to convert the duration in "raw elements" to a <see cref="TimeSpan"/> value.</param>
        /// <param name="elementCount">The duration in "raw elements" to convert to a <see cref="TimeSpan"/> value.</param>
        /// <returns>The duration as a <see cref="TimeSpan"/> value.</returns>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// elementCount
        /// </exception>
        /// <remarks>
        /// The term "raw elements" describes the elements, an audio source uses.
        /// What type of unit an implementation of the <see cref="IAudioSource"/> interface uses, depends on the implementation itself.
        /// For example, a <see cref="IWaveSource"/> uses bytes while a <see cref="ISampleSource"/> uses samples.
        /// That means that a <see cref="IWaveSource"/> provides its position, length,... in bytes
        /// while a <see cref="ISampleSource"/> provides its position, length,... in samples.
        /// <para></para>
        /// To get the length or the position of a <see cref="IAudioSource"/> as a <see cref="TimeSpan"/> value, use the
        /// <see cref="GetLength"/> or the <see cref="GetPosition"/> property.
        /// <para></para><para></para>
        /// Internally this method uses the <see cref="TimeConverterFactory"/> class.
        /// </remarks>
        public static TimeSpan GetTime(this IAudioSource source, long elementCount)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (elementCount < 0)
            {
                throw new ArgumentNullException("elementCount");
            }

            return(TimeConverterFactory.Instance.GetTimeConverterForSource(source)
                   .ToTimeSpan(source.WaveFormat, elementCount));
        }
Exemplo n.º 37
0
        void OnTriggerStay(Collider other)
        {
            if ((other.tag == ScannerGame.TAG_SCAN_START || other.tag == ScannerGame.TAG_SCAN_END) && isDragging)
            {
                if (!willPronounce)
                {
                    scanStartPos  = transform.position;
                    playTime      = 0;
                    willPronounce = true;

                    if (LL)
                    {
                        LL.setColor(Color.white);
                    }

                    LL        = other.transform.parent.GetComponent <ScannerLivingLetter>();
                    dataAudio = game.Context.GetAudioManager().PlayVocabularyData(LL.LLController.Data, true);

                    LL.setColor(Color.green);

                    if (game.tut.tutStep == 1)
                    {
                        game.tut.setupTutorial(2, LL);
                    }
                }

                /*if (timeDelta == 0 || lastTag == other.tag)
                 *              {
                 *                      timeDelta = Time.time;
                 *                      lastTag = other.tag;
                 *              }
                 *              else
                 *              {
                 *                      ScannerLivingLetter LL = other.transform.parent.GetComponent<ScannerLivingLetter>();
                 *                      timeDelta = Time.time - timeDelta;
                 *                      game.PlayWord(timeDelta, LL);
                 *                      timeDelta = 0;
                 *
                 *  if(game.tut.tutStep == 1)
                 *      game.tut.setupTutorial(2, LL);
                 * }*/
            }


            if (other.gameObject.name.Equals("Antura") && isDragging)
            {
                game.antura.GetComponent <ScannerAntura>().beScared();
            }
        }
Exemplo n.º 38
0
        public void StopEffect(uint uid)
        {
            Dictionary <uint, IAudioSource> .Enumerator iter = m_fxAudio.GetEnumerator();
            IAudioSource ae = null;

            if (m_fxAudio.TryGetValue(uid, out ae))
            {
                if (ae.isPlaying)
                {
                    ae.Stop();
                    ae.Release();
                    m_fxAudio.Remove(uid);
                }
            }
        }
Exemplo n.º 39
0
        public void Update(float delta)
        {
            game.Context.GetOverlayWidget().SetClockTime(gameTime.Time);

            if (!hurryUpSfx)
            {
                if (gameTime.Time < 4f)
                {
                    hurryUpSfx = true;

                    timesUpAudioSource = game.Context.GetAudioManager().PlaySound(Sfx.DangerClockLong);
                }
            }
            gameTime.Update(delta);
        }
Exemplo n.º 40
0
        /// <summary>
        ///     Sets the position of a <see cref="IAudioSource"/> as a <see cref="TimeSpan"/> value.
        /// </summary>
        /// <param name="source">The source to set the new position for.</param>
        /// <param name="position">The new position as a <see cref="TimeSpan"/> value.</param>
        /// <remarks>
        /// The source must support seeking to get or set the position.
        /// Use the <see cref="IAudioSource.CanSeek"/> property to determine whether the stream supports seeking.
        /// Otherwise a call to this method may result in an exception.
        /// </remarks>
        public static void SetPosition(this IAudioSource source, TimeSpan position)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (position.TotalMilliseconds < 0)
            {
                throw new ArgumentOutOfRangeException("position");
            }

            long bytes = GetRawElements(source, position);

            source.Position = bytes;
        }
        public bool IsHearable(IListener listener, IAudioSource source, AudioSourceInfo info)
        {
            if (info.volume >= listener.config.minHearingVolume)
            {
                var raycastPosition = new Vector3[]
                {
                    new Vector3(0f, 0f),  // Middle
                    new Vector3(0f, 1f),  // Up
                    new Vector3(0f, -1f), // Down
                    new Vector3(-1f, 0f), // Left side
                    new Vector3(1f, 0f),  // Right side
                };

                const int   levels  = 1;
                const float scaling = 1.5f;
                for (int i = 1; i < levels + 1; i++)
                {
                    foreach (var pos in raycastPosition)
                    {
                        var raycastToPosition = listener.transform.position + (pos * i * scaling);

                        RaycastHit hitInfo;
                        bool       hit = _raycaster.Linecast(source.transform.position, raycastToPosition, out hitInfo, _config.raycastLayer);
                        // If nothing was hit the object is visible.
                        if (hit == false)
                        {
                            hitInfo.gameObject = listener.gameObject;
                            hitInfo.point      = raycastToPosition;
                            hitInfo.normal     = Vector3.zero;
                        }

                        // Direct path from audio source to listener.
                        if (_config.debug)
                        {
                            LosDebugUtility.DrawDebugLine(source.gameObject, hitInfo.gameObject, listener.gameObject, source.transform.position, hitInfo.point, raycastToPosition, Color.magenta, Color.black);
                        }

                        // If we did hit something, is it the target?
                        if (LosUtility.IsValidTarget(listener.gameObject, hitInfo.gameObject) || hit == false)
                        {
                            return(true);
                        }
                    }
                }
            }

            return(false);
        }
Exemplo n.º 42
0
        public unsafe static Task WriteAsync(Stream stream, IAudioSource source, TimeSpan duration, int bufferSize = 44100)
        {
            const int headerSize = 44;
            const int bitDepth   = 16;
            int       dataSize   = (int)(source.Format.SampleRate * source.Format.Channels * duration.TotalSeconds * (bitDepth / 8));
            int       totalSize  = headerSize + dataSize;

            return(Task.Run(() =>
            {
                using (var bw = new BinaryWriter(stream, Encoding.ASCII, true))
                {
                    bw.Write(Encoding.ASCII.GetBytes("RIFF"));
                    bw.Write(totalSize - 8);
                    bw.Write(Encoding.ASCII.GetBytes("WAVE"));
                    bw.Write(Encoding.ASCII.GetBytes("fmt "));
                    bw.Write(16);
                    bw.Write((short)1);
                    bw.Write((short)source.Format.Channels);
                    bw.Write(source.Format.SampleRate);
                    bw.Write(source.Format.SampleRate * (bitDepth / 8) * source.Format.Channels);
                    bw.Write((short)(bitDepth / 8 * source.Format.Channels));
                    bw.Write((short)bitDepth);

                    // Data Chunk
                    bw.Write(Encoding.ASCII.GetBytes("data"));
                    bw.Write(dataSize);

                    int left = dataSize / (bitDepth / 8);
                    float[] buffer;
                    short[] writeBuffer = new short[bufferSize];
                    while (left > 0)
                    {
                        int write = Math.Min(left, bufferSize);
                        buffer = source.Next(write);

                        fixed(float *buf = buffer)
                        {
                            for (int i = 0; write > i; i++)
                            {
                                bw.Write((short)(buf[i] * short.MaxValue));
                            }
                        }

                        left -= write;
                    }
                }
            }));
        }
Exemplo n.º 43
0
        /// <summary>Rewinds the timer and pauses it</summary>
        public void Rewind()
        {
            if (!Validate("MinigamesUITimer"))
            {
                return;
            }

            if (alarmSfxSource != null)
            {
                alarmSfxSource.Stop();
                alarmSfxSource = null;
            }
            shakeTween.Rewind();
            endTween.Rewind();
            timerTween.Rewind();
        }
Exemplo n.º 44
0
        public void PlaySong(IAudioSource source, System.Action onSongCompleted)
        {
            if (playingSong)
            {
                return;
            }

            playingSong          = true;
            songSource           = source;
            this.onSongCompleted = onSongCompleted;

            for (int i = 0; i < bars.Count; ++i)
            {
                bars[i].shineWhenNearTarget = true;
            }
        }
Exemplo n.º 45
0
        /// <summary>
        /// Reproduce an audio stream in a channel.
        /// </summary>
        /// <param name="channel">Target channel.</param>
        public ReproductionSession ReproduceWav(string audio, IAudioSource source = null)
        {
            byte[] buffer = null;

            try
            {
                // Create a new Disposable MP3FileReader, to read audio from the filePath parameter
                using (var MP3Reader = new WaveFileReader(audio))
                    buffer = Reproduce(MP3Reader);

                return(SendAudio(buffer, source));
            }
            catch (Exception) { }

            return(null);
        }
Exemplo n.º 46
0
 public MediaClient(
     ProtocolSwitch protocolSwitch,
     IAudioSource audioSource,
     IAudioPlayer audioPlayer,
     IAudioCodec audioCodec,
     IJitterBuffer jitterBuffer,
     IClientSettings clientSettings)
 {
     _protocolSwitch = protocolSwitch;
     _protocolSwitch.SetMediaPacketParser(this);
     _audioSource    = audioSource;
     _audioPlayer    = audioPlayer;
     _audioCodec     = audioCodec;
     _jitterBuffer   = jitterBuffer;
     _clientSettings = clientSettings;
 }
Exemplo n.º 47
0
        // Update is called once per frame
        void Update()
        {
            if (Input.GetKey(KeyCode.E) && LLController.Data != null)
            {
                if (wordSound == null)
                {
                    wordSound = game.Context.GetAudioManager().PlayVocabularyData(LLController.Data, true);
                }
                if (!wordSound.IsPlaying)
                {
                    wordSound = game.Context.GetAudioManager().PlayVocabularyData(LLController.Data, true);
                }

                wordSound.Position = 0;
            }
            if (wordSound != null)
            {
                wordSound.Position = Mathf.Clamp(wordSound.Position + Time.deltaTime / 10, 0, 0.5f);
            }

            if (status == LLStatus.Sliding)
            {
                transform.Translate(slideSpeed * Time.deltaTime, -slideSpeed * Time.deltaTime / 2, 0);
            }
            else if (status == LLStatus.StandingOnBelt)
            {
                transform.Translate(game.beltSpeed * Time.deltaTime, 0, 0);
            }
            else if (status == LLStatus.Flying)
            {
                transform.Translate(Vector2.up * flightSpeed * Time.deltaTime);
            }
            else if (status == LLStatus.Falling)
            {
                transform.Translate(Vector2.down * flightSpeed * Time.deltaTime);
            }

            if (livingLetter.transform.position.x > fallOffX && status == LLStatus.StandingOnBelt)
            {
                StartCoroutine(co_FallOff());
            }
            else if (livingLetter.transform.position.x > midPointX && !passedMidPoint)
            {
                passedMidPoint = true;
                onPassedMidPoint(this);
            }
        }
        protected override IEncoder CreateEncoderInternal(int threadNumber, FileEncodeTask task, IAudioSource audioSource)
        {
            if (threadNumber < this.localConcurrency)
            {
                return new LocalMp3Encoder(audioSource, task.TargetFilename, task.Tag, vbrQuality, task.TrackGain, task.DrMeter);
            }

            threadNumber -= Environment.ProcessorCount;

            foreach (DiscoveryServerDescriptor server in this.servers)
            {
                if (threadNumber < server.ThreadCount)
                {
                    return new RemoteMp3VbrEncoder(server.Address, audioSource, task.TargetFilename, task.Tag, this.vbrQuality, task.TrackGain, task.DrMeter);
                }
                threadNumber -= server.ThreadCount;
            }

            throw new ArgumentException("threadNumber is too large.");
        }
Exemplo n.º 49
0
        public void RemoveInputOutputSource(IAudioSource source, IAudioSink sink)
        {
            lock (MemberLock)
            {
                PushPullObject removeobject = null;
                foreach (PushPullObject ppo in Members)
                {
                    if ((ppo.AudioSource == source) && (ppo.AudioSink == sink))
                    {
                        removeobject = ppo;
                        break;
                    }
                }

                if (removeobject != null)
                {
                    Members.Remove(removeobject);
                }
            }
        }
        protected override IEncoder CreateEncoderInternal(int threadNumber, FileEncodeTask task, IAudioSource audioSource)
        {
            if (threadNumber < this.concurrencylevel)
            {
                return new NativeFlacEncoder(audioSource, task.TargetFilename, task.Tag, compressionLevel, task.TrackGain, task.DrMeter);
            }

            threadNumber -= this.concurrencylevel;

            foreach (DiscoveryServerDescriptor server in this.servers)
            {
                if (threadNumber < server.ThreadCount)
                {
                    return new RemoteFlacEncoder(server.Address, audioSource, task.TargetFilename, task.Tag, this.compressionLevel, task.TrackGain, task.DrMeter);
                }
                threadNumber -= server.ThreadCount;
            }

            throw new ArgumentException("threadNumber is too large.");
        }
Exemplo n.º 51
0
        void Button1Click(object sender, EventArgs e)
        {
            AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;

            if (info == null)
            {
                MessageBox.Show("No audio devices available.");
                return;
            }


            source = new AudioCaptureDevice(info.Guid);
            source.DesiredFrameSize = 2048;
            source.SampleRate = 22050;
            source.NewFrame += source_NewFrame;
            source.AudioSourceError += source_AudioSourceError;

            window = RaisedCosineWindow.Hamming(source.DesiredFrameSize);

            source.Start();
        }
Exemplo n.º 52
0
        public void Dispose()
        {
            try
            {
                if (this.audioSource != null)
                {
                    this.audioSource.Close();
                    this.audioSource = null;
                }
            }
            catch { }

            try
            {
                if (this.AudioDest != null)
                {
                    this.AudioDest.Close();
                    this.AudioDest = null;
                }
            }
            catch { }
        }
Exemplo n.º 53
0
 public TalkIPWebcamAndroid(Uri server,IAudioSource audioSource)
 {
     _server = server;
     _audioSource = audioSource;
 }
Exemplo n.º 54
0
 public static DrMeter CreateDrMeter(IAudioSource audioSource)
 {
     return new DrMeter(audioSource.PCM.ChannelCount, audioSource.PCM.SampleRate, audioSource.PCM.BitsPerSample);
 }
Exemplo n.º 55
0
 public AudioPipe(IAudioSource source, int size)
     : this(source, size, true, ThreadPriority.BelowNormal)
 {
 }
Exemplo n.º 56
0
 //public new void Dispose()
 //{
 //    _buffer.Clear();
 //}
 public void Close()
 {
     lock (this)
     {
         _close = true;
         Monitor.Pulse(this);
     }
     if (_workThread != null)
     {
         _workThread.Join();
         _workThread = null;
     }
     if (_source != null)
     {
         if (own) _source.Close();
         _source = null;
     }
     if (_readBuffer != null)
     {
         //_readBuffer.Clear();
         _readBuffer = null;
     }
     if (_writeBuffer != null)
     {
         //_writeBuffer.Clear();
         _writeBuffer = null;
     }
 }
Exemplo n.º 57
0
		internal void LoadDeck(int row)
		{
			CUEConfig _config = (MdiParent as frmCUEPlayer).Config;
			DataSet1 dataSet = (MdiParent as frmCUEPlayer).DataSet;
			Playlist playlist = (MdiParent as frmCUEPlayer).wndPlaylist;
			string path = dataSet.Playlist[row].path;
			int track = dataSet.Playlist[row].track;

			try
			{
				playingCue = new CUESheet(_config);
				playingCue.Open(path);
				playingSource = new CUESheetAudio(playingCue);
				playingSource.Position = (long)playingCue.TOC[track].Start * 588;
				playingSource = new AudioPipe(playingSource, 0x2000);
				playingStart = playingSource.Position;
				playingFinish = playingStart + (long)playingCue.TOC[track].Length * 588;
				playingRow = row;
				//playlist.List.Items[playingRow].BackColor = Color.AliceBlue;
				needUpdate = true;
				UpdateDeck();
			}
			catch (Exception ex)
			{
				playingStart = playingFinish = 0;
				playingCue = null;
				playingSource = null;
				return;
			}
		}
 public DspCalculatorEncoder(IAudioSource audioSource, TrackGain trackGain, DrMeter drMeter)
 {
     this.audioSource = audioSource;
     this.trackGain = trackGain;
     this.drMeter = drMeter;
 }
Exemplo n.º 59
0
		private void buttonStop_Click(object sender, EventArgs e)
		{
			if (playThread != null)
			{
				stopNow = true;
				playThread.Join();
			}
			else
			{
				if (playingSource != null)
				{
					playingSource.Close();
					playingSource = null;
				}
				if (playingCue != null)
				{
					playingCue.Close();
					playingCue = null;
				}
				playingFinish = 0;
				playingStart = 0;
				playingRow = -1;
				needUpdate = true;
				UpdateDeck();
			}
		}
Exemplo n.º 60
0
        public void Enable()
        {
            if (_enabling)
                return;
            if (InvokeRequired)
            {
                Invoke(new Delegates.EnableDelegate(Enable));
                return;
            }

            lock (_lockobject)
            {
                if (IsEnabled)
                    return;
                IsEnabled = true;
            }
            _enabling = true;

            try
            {

                if (CameraControl != null)
                {
                    Width = CameraControl.Width;
                    Location = new Point(CameraControl.Location.X, CameraControl.Location.Y + CameraControl.Height);
                    Width = Width;
                    Height = 50;
                    if (!CameraControl.IsEnabled)
                    {
                        CameraControl.Enable();
                    }
                }

                IsEnabled = true;
                IsReconnect = false;

                int channels = Micobject.settings.channels;
                int sampleRate = Micobject.settings.samples;
                const int bitsPerSample = 16;

                if (channels < 1)
                {
                    channels = Micobject.settings.channels = 1;

                }
                if (sampleRate < 8000)
                {
                    sampleRate = Micobject.settings.samples = 8000;
                }

                IsClone = Micobject.settings.typeindex==5 || (CameraControl != null && CameraControl.Camobject.settings.sourceindex == 10 &&
                          Micobject.settings.typeindex == 4);

                switch (Micobject.settings.typeindex)
                {
                    case 0: //usb
                        AudioSource = new LocalDeviceStream(Micobject.settings.sourcename)
                                      {RecordingFormat = new WaveFormat(sampleRate, bitsPerSample, channels)};
                        break;
                    case 1: //ispy server (fixed waveformat at the moment...)
                        AudioSource = new iSpyServerStream(Micobject.settings.sourcename)
                                      {RecordingFormat = new WaveFormat(8000, 16, 1)};
                        break;
                    case 2: //VLC listener
                        List<String> inargs = Micobject.settings.vlcargs.Split(Environment.NewLine.ToCharArray(),
                            StringSplitOptions.RemoveEmptyEntries).
                            ToList();
                        //switch off video output
                        inargs.Add(":sout=#transcode{vcodec=none}:Display");

                        AudioSource = new VLCStream(Micobject.settings.sourcename, inargs.ToArray())
                                      {
                                          RecordingFormat = new WaveFormat(sampleRate, bitsPerSample, channels),
                                          TimeOut = Micobject.settings.timeout
                                      };
                        break;
                    case 3: //FFMPEG listener
                        AudioSource = new FFMPEGAudioStream(Micobject.settings.sourcename)
                                      {
                                          RecordingFormat = new WaveFormat(sampleRate, bitsPerSample, channels),
                                          AnalyseDuration = Micobject.settings.analyzeduration,
                                          Timeout = Micobject.settings.timeout
                                      };
                        break;
                    case 4: //From Camera Feed
                        AudioSource = null;
                        if (CameraControl != null)
                        {
                            if (CameraControl.Camera != null)
                            {
                                AudioSource = CameraControl.Camera.VideoSource as IAudioSource;
                                if (AudioSource == null)
                                {
                                    if (IsClone)
                                    {
                                        //cloned feed
                                        int icam = Convert.ToInt32(CameraControl.Camobject.settings.videosourcestring);

                                            var cw = MainForm.InstanceReference.GetCameraWindow(icam);
                                            if (cw != null)
                                            {
                                                if (CameraControl != null && CameraControl.VolumeControl != null &&
                                                    cw.VolumeControl != null && cw.VolumeControl.AudioSource != null)
                                                {
                                                    AudioSource = cw.VolumeControl.AudioSource;
                                                }
                                            }
                                    }
                                }
                                if (AudioSource != null && AudioSource.RecordingFormat != null)
                                {
                                    Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate;
                                    Micobject.settings.channels = AudioSource.RecordingFormat.Channels;

                                }
                            }
                            if (AudioSource == null)
                            {
                                SetErrorState("Mic source offline");
                                AudioSourceErrorState = true;
                                _requestRefresh = true;
                            }
                        }
                        break;
                    case 5:
                        int imic;
                        if (Int32.TryParse(Micobject.settings.sourcename, out imic))
                        {

                                var vl = MainForm.InstanceReference.GetVolumeLevel(imic);
                                if (vl != null)
                                {
                                    AudioSource = vl.AudioSource;

                                    if (AudioSource != null && AudioSource.RecordingFormat != null)
                                    {
                                        Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate;
                                        Micobject.settings.channels = AudioSource.RecordingFormat.Channels;
                                    }
                                    vl.AudioDeviceDisabled -= MicrophoneDisabled;
                                    vl.AudioDeviceEnabled -= MicrophoneEnabled;
                                    vl.AudioDeviceReConnected -= MicrophoneReconnected;

                                    vl.AudioDeviceDisabled += MicrophoneDisabled;
                                    vl.AudioDeviceEnabled += MicrophoneEnabled;
                                    vl.AudioDeviceReConnected += MicrophoneReconnected;
                                }

                        }
                        if (AudioSource == null)
                        {
                            SetErrorState("Mic source offline");
                            AudioSourceErrorState = true;
                            _requestRefresh = true;
                        }
                        break;
                }

                if (AudioSource != null)
                {
                    WaveOut = !String.IsNullOrEmpty(Micobject.settings.deviceout)
                        ? new DirectSoundOut(new Guid(Micobject.settings.deviceout), 100)
                        : new DirectSoundOut(100);

                    AudioSource.AudioFinished -= AudioDeviceAudioFinished;
                    AudioSource.DataAvailable -= AudioDeviceDataAvailable;
                    AudioSource.LevelChanged -= AudioDeviceLevelChanged;

                    AudioSource.AudioFinished += AudioDeviceAudioFinished;
                    AudioSource.DataAvailable += AudioDeviceDataAvailable;
                    AudioSource.LevelChanged += AudioDeviceLevelChanged;

                    var l = new float[Micobject.settings.channels];
                    for (int i = 0; i < l.Length; i++)
                    {
                        l[i] = 0.0f;
                    }
                    AudioDeviceLevelChanged(this, new LevelChangedEventArgs(l));

                    if (!AudioSource.IsRunning && !IsClone && !(AudioSource is IVideoSource))
                    {
                        lock (_lockobject)
                        {
                            AudioSource.Start();
                        }
                    }
                }

                SoundDetected = false;
                _soundRecentlyDetected = false;
                Alerted = false;
                FlashCounter = DateTime.MinValue;
                ReconnectCount = 0;
                Listening = false;
                LastSoundDetected = Helper.Now;
                UpdateFloorplans(false);
                Micobject.settings.active = true;

                MainForm.NeedsSync = true;
                _requestRefresh = true;

                if (AudioDeviceEnabled != null)
                    AudioDeviceEnabled(this, EventArgs.Empty);
            }
            catch (Exception ex)
            {
                if (ErrorHandler != null)
                    ErrorHandler(ex.Message);
            }
            _enabling = false;
        }