예제 #1
0
        public FFTDataProvider(IWaveSource source)
        {
            if (source == null)
                throw new ArgumentNullException("source");

            CreateFFTAggregator(source);
        }
예제 #2
0
        public override bool MusicDeliver(PluginMusicDeliveryArgs args)
        {
            if (_client == null)
            {
                return false;
            }

            if (_providerConverted == null)
            {
                _volumeProvider = new VolumeSource(args.Source.ChangeSampleRate(48000).ToSampleSource());
                _providerConverted = new BufferSource(_volumeProvider.ToWaveSource(16), _volumeProvider.WaveFormat.BytesPerSecond * 4);
            }

            _volumeProvider.Volume = MusicPlayer.Current.Volume;

            byte[] buffer = new byte[_volumeProvider.WaveFormat.BytesPerSecond];
            int byteCount = _providerConverted.Read(buffer, 0, buffer.Length);

            if (byteCount > 0)
            {
                _voiceClient?.Send(buffer, 0, byteCount);
            }
           

            return true;
        }
예제 #3
0
        public static ISampleSource CreateConverter(IWaveSource source)
        {
            if (source == null)
                throw new ArgumentNullException("source");

            int bps = source.WaveFormat.BitsPerSample;
            if (source.WaveFormat.IsPCM())
            {
                switch (bps)
                {
                    case 8:
                        return new Pcm8BitToSample(source);

                    case 16:
                        return new Pcm16BitToSample(source);

                    case 24:
                        return new Pcm24BitToSample(source);

                    default:
                        throw new NotSupportedException("Waveformat is not supported. Invalid BitsPerSample value.");
                }
            }
            else if (source.WaveFormat.IsIeeeFloat() && bps == 32)
            {
                return new IeeeFloatToSample(source);
            }
            else
            {
                throw new NotSupportedException("Waveformat is not supported. Invalid WaveformatTag.");
            }
        }
예제 #4
0
        /// <summary>
        /// Initializes a new instance of the <see cref="DmoChannelResampler"/> class.
        /// </summary>
        /// <param name="source">Underlying source which has to get resampled.</param>
        /// <param name="channelMatrix"><see cref="ChannelMatrix" /> which defines how to map each channel.</param>
        /// <param name="outputFormat">Waveformat, which specifies the new format. Note, that by far not all formats are supported.</param>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// channelMatrix
        /// or
        /// outputFormat
        /// </exception>
        /// <exception cref="System.ArgumentException">The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.</exception>
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, WaveFormat outputFormat)
            : base(source, outputFormat)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (channelMatrix == null)
                throw new ArgumentNullException("channelMatrix");
            if(outputFormat == null)
                throw new ArgumentNullException("outputFormat");

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
            {
                throw new ArgumentException(
                    "The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.");
            }

            var inputFormat = new WaveFormatExtensible(
                source.WaveFormat.SampleRate,
                source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                channelMatrix.InputMask);

            Outputformat = new WaveFormatExtensible(
                outputFormat.SampleRate,
                outputFormat.BitsPerSample,
                outputFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(outputFormat),
                channelMatrix.OutputMask);

            Initialize(inputFormat, Outputformat);
            _channelMatrix = channelMatrix;
            CommitChannelMatrixChanges();
        }
예제 #5
0
        private void btnStart_Click(object sender, EventArgs e)
        {
            if (deviceslist.SelectedItems.Count <= 0)
                return;

            SaveFileDialog sfd = new SaveFileDialog();
            sfd.Filter = "WAV (*.wav)|*.wav";
            sfd.Title = "Speichern";
            sfd.FileName = String.Empty;
            if (sfd.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                _waveIn = new WaveInEvent(new WaveFormat(44100, 16, _selectedDevice.Channels));
                _waveIn.Device = deviceslist.SelectedItems[0].Index;

                _waveIn.Initialize();
                _waveIn.Start();

                var waveInToSource = new SoundInSource(_waveIn);

                _source = waveInToSource;
                var notifyStream = new SingleBlockNotificationStream(_source);
                notifyStream.SingleBlockRead += OnNotifyStream_SingleBlockRead;

                _source = notifyStream.ToWaveSource(16);
                _writerBuffer = new byte[_source.WaveFormat.BytesPerSecond];

                _writer = new WaveWriter(File.OpenWrite(sfd.FileName), _source.WaveFormat);
                waveInToSource.DataAvailable += OnNewData;

                btnStart.Enabled = false;
                btnStop.Enabled = true;
            }
        }
예제 #6
0
        /// <summary>
        /// Record sound made in Mic and save it to a wave file
        /// </summary>
        /// <param name="wavefile">name of the wave file with extension</param>
        public void CaptureMicToWave(string wavefile)
        {
            int i = 0;
            string extension = ".wav";

            foreach (var device in WaveIn.Devices)
            {
                _waveIn = new WaveInEvent(new WaveFormat(44100, 16, device.Channels));
                _waveIn.Device = i++;

                _waveIn.Initialize();
                _waveIn.Start();

                var waveInToSource = new SoundInSource(_waveIn);

                _source = waveInToSource;
                var notifyStream = new SingleBlockNotificationStream(_source);

                _source = notifyStream.ToWaveSource(16);
                _writerBuffer = new byte[_source.WaveFormat.BytesPerSecond];

                wavefile = string.Format("{0}{1}{2}", wavefile.Remove(wavefile.LastIndexOf(extension) - (i > 1 ? 1 : 0)), i, extension);
                _writer = new WaveWriter(wavefile, _source.WaveFormat);
                waveInToSource.DataAvailable += (s, e) =>
                {
                    int read = 0;
                    while ((read = _source.Read(_writerBuffer, 0, _writerBuffer.Length)) > 0)
                    {
                        _writer.Write(_writerBuffer, 0, read);
                    }
                };
            }
        }
예제 #7
0
파일: Program.cs 프로젝트: opcon/cscore
        private static void PlayWithoutStreaming(IWaveSource waveSource)
        {
            using (var xaudio2 = XAudio2.CreateXAudio2())
            using (var masteringVoice = xaudio2.CreateMasteringVoice()) //ALWAYS create at least one masteringVoice.
            using (var sourceVoice = xaudio2.CreateSourceVoice(waveSource.WaveFormat))
            {
                var buffer = waveSource.ToByteArray();
                using (var sourceBuffer = new XAudio2Buffer(buffer.Length))
                {
                    using (var stream = sourceBuffer.GetStream())
                    {
                        stream.Write(buffer, 0, buffer.Length);
                    }

                    sourceVoice.SubmitSourceBuffer(sourceBuffer);
                }

                sourceVoice.Start();

                Console.WriteLine("Press any key to exit.");
                Console.ReadKey();

                sourceVoice.Stop();
            }
        }
예제 #8
0
        public virtual void OnCleanup()
        {
            PrintCurrentTestClass();

            SourceToTest.Dispose();
            _sourceToTest = null;
        }
예제 #9
0
        /// <summary>
        /// Creates a new instance of the <see cref="DmoAggregator"/> class.
        /// </summary>
        /// <param name="source">Base source of the <see cref="DmoAggregator"/>.</param>
        public DmoAggregator(IWaveSource source)
        {
            if (source == null)
                throw new ArgumentNullException("source");

            _source = source;
        }
예제 #10
0
        /// <summary>
        ///     Creates a new instance of <see cref="WaveAggregatorBase"/> class.
        /// </summary>
        /// <param name="baseSource">Underlying base stream.</param>
        protected WaveAggregatorBase(IWaveSource baseSource)
            : this()
        {
            if (baseSource == null)
                throw new ArgumentNullException("baseSource");

            _baseSource = baseSource;
        }
예제 #11
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Pcm24BitToSample"/> class.
 /// </summary>
 /// <param name="source">The underlying 24-bit POCM <see cref="IWaveSource"/> instance which has to get converted to a <see cref="ISampleSource"/>.</param>
 /// <exception cref="ArgumentNullException"><paramref name="source"/> is null.</exception>
 /// <exception cref="ArgumentException">The format of the <paramref name="source"/> is not 24-bit PCM.</exception>
 public Pcm24BitToSample(IWaveSource source)
     : base(source)
 {
     if (source == null)
         throw new ArgumentNullException("source");
     if (!source.WaveFormat.IsPCM() && source.WaveFormat.BitsPerSample != 24)
         throw new InvalidOperationException("Invalid format. Format has to 24 bit Pcm.");
 }
예제 #12
0
 public Pcm8BitToSample(IWaveSource source)
     : base(source, 8, AudioEncoding.Pcm)
 {
     if (source == null)
         throw new ArgumentNullException("source");
     if (!source.WaveFormat.IsPCM() && source.WaveFormat.BitsPerSample != 8)
         throw new InvalidOperationException("Invalid format. Format has to 8 bit Pcm.");
 }
예제 #13
0
        internal CsCoreData(ISoundOut soundOut, IWaveSource source)
        {
            this.soundOut = soundOut;
            this.source = source;
            locker = new object();

            soundOut.Initialize(source);
        }
예제 #14
0
 public IeeeFloatToSample(IWaveSource source)
     : base(source, 32, AudioEncoding.IeeeFloat)
 {
     if (source == null)
         throw new ArgumentNullException("source");
     if (!source.WaveFormat.IsIeeeFloat() ||
         source.WaveFormat.BitsPerSample != 32)
         throw new InvalidOperationException("Invalid format. Format has to be 32 bit IeeeFloat");
 }
예제 #15
0
        public WaveToSampleBase(IWaveSource source, int bits, AudioEncoding encoding)
        {
            if (source == null) throw new ArgumentNullException("source");

            _source = source;
            _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32,
                                source.WaveFormat.Channels, AudioEncoding.IeeeFloat);
            _bpsratio = 32.0 / bits;
        }
예제 #16
0
        /// <summary>
        /// Initializes a new instance of the <see cref="WaveToSampleBase"/> class.
        /// </summary>
        /// <param name="source">The underlying <see cref="IWaveSource"/> instance which has to get converted to a <see cref="ISampleSource"/>.</param>
        /// <exception cref="ArgumentNullException">The <paramref name="source"/> argument is null.</exception>
        protected WaveToSampleBase(IWaveSource source)
        {
            if (source == null)
                throw new ArgumentNullException("source");

            Source = source;
            _waveFormat = (WaveFormat) source.WaveFormat.Clone();
            _waveFormat.BitsPerSample = 32;
            _waveFormat.SetWaveFormatTagInternal(AudioEncoding.IeeeFloat);
        }
예제 #17
0
        /// <summary>
        /// Initializes a new instance of the <see cref="CachedSoundSource"/> class.
        /// </summary>
        /// <param name="source">Source which will be copied to a cache.</param>
        public CachedSoundSource(IWaveSource source)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (source.Length > Int32.MaxValue)
                throw new ArgumentException("Length is of source is too large.");

            _waveFormat = source.WaveFormat;

            CacheSource(source);
        }
예제 #18
0
        public IWaveSource InitializeVisualization(IWaveSource source)
        {
            source = new FFTDataProvider(source) { Bands = 512 };
            FFTDataProvider = source as FFTDataProvider;

            var sampleDataProvier = new SampleDataProvider(source);
            sampleDataProvier.Mode = SampleDataProviderMode.LeftAndRight;
            SampleDataProvider = sampleDataProvier;

            return sampleDataProvier.ToWaveSource(16);
        }
예제 #19
0
        private void CreateFFTAggregator(IWaveSource source)
        {
            if (_fftaggregator != null)
            {
                _fftaggregator.FFTCalculated -= OnNewData;
                _fftaggregator = null;
            }

            _fftaggregator = new FFTAggregator(source);
            _fftaggregator.FFTCalculated += OnNewData;
            BaseStream = _fftaggregator;
        }
예제 #20
0
 ISoundOut CreateSoundOut(ref IWaveSource source)
 {
     ISoundOut soundOut;
     if (WasapiOut.IsSupportedOnCurrentPlatform)
         soundOut = new WasapiOut(true, AudioClientShareMode.Shared, 50);
     else
     {
         soundOut = new DirectSoundOut() { Latency = 100 };
         if (source.WaveFormat.BitsPerSample > 16)
             source = source.ToSampleSource().ToWaveSource(16);
     }
     return soundOut;
 }
예제 #21
0
파일: AudioPlayer.cs 프로젝트: opcon/cscore
        public void OpenFile(string filename)
        {
            Stop();

            Vector3 center = new Vector3(0);

            _waveSource = CodecFactory.Instance.GetCodec(filename).ToMono();
            _masteringVoice = _xaudio2.CreateMasteringVoice(XAudio2.DefaultChannels, XAudio2.DefaultSampleRate);
            _streamingSourceVoice = new StreamingSourceVoice(_xaudio2, _waveSource, 150);

            object defaultDevice = _xaudio2.DefaultDevice;
            ChannelMask channelMask;
            if (_xaudio2.Version == XAudio2Version.XAudio2_7)
            {
                var xaudio27 = (XAudio2_7) _xaudio2;
                var deviceDetails = xaudio27.GetDeviceDetails((int) defaultDevice);
                channelMask = deviceDetails.OutputFormat.ChannelMask;
                _destinationChannels = deviceDetails.OutputFormat.Channels;
            }
            else
            {
                channelMask = _masteringVoice.ChannelMask;
                _destinationChannels = _masteringVoice.VoiceDetails.InputChannels;
            }
            _sourceChannels = _waveSource.WaveFormat.Channels;

            _x3daudio = new X3DAudioCore(channelMask);

            _listener = new Listener()
            {
                Position = center,
                OrientFront = new Vector3(0, 0, 1),
                OrientTop = new Vector3(0, 1, 0),
                Velocity = new Vector3(0, 0, 0)
            };

            _emitter = new Emitter()
            {
                ChannelCount = _sourceChannels,
                CurveDistanceScaler = float.MinValue,
                OrientFront = new Vector3(0, 0, 1),
                OrientTop = new Vector3(0, 1, 0),
                Position = new Vector3(0, 0, 0),
                Velocity = new Vector3(0, 0, 0)
            };

            StreamingSourceVoiceListener.Default.Add(_streamingSourceVoice);
            _streamingSourceVoice.Start();

            _isPlaying = true;
        }
예제 #22
0
        void UpdateMetadata(IWaveSource source)
        {
            // duration of the last track imported from a CUE sheet is not initially known;
            // update it now that we have audio source decoded; this update is only valid for the last track!
            if (_duration == TimeSpan.Zero)
            {
                var duration = source.GetLength();
                _duration = duration - Offset;
                SetDuration(_duration);
            }

            kHz = source.WaveFormat.SampleRate / 1000;
            kbps = source.WaveFormat.BytesPerSecond * 8 / 1000;
        }
예제 #23
0
        public static async Task<float[][]> GetData(IWaveSource waveSource)
        {
            if (waveSource == null)
                throw new ArgumentNullException("waveSource");

            return await Task.Run(() =>
            {
                var sampleSource = new InterruptDisposeChainSource(waveSource).ToSampleSource();

                var channels = sampleSource.WaveFormat.Channels;
                var blockSize = (int) (sampleSource.Length / channels / NumberOfPoints);
                var waveformDataChannels = new WaveformDataChannel[channels];
                for (var i = 0; i < channels; i++)
                {
                    waveformDataChannels[i] = new WaveformDataChannel(blockSize);
                }

                var buffer = new float[sampleSource.WaveFormat.BlockAlign * 5];
                var sampleCount = 0;

                var flag = true;
                while (flag)
                {
                    var samplesToRead = buffer.Length;
                    var read = sampleSource.Read(buffer, 0, samplesToRead);
                    for (var i = 0; i < read; i += channels)
                    {
                        for (var n = 0; n < channels; n++)
                        {
                            waveformDataChannels[n].AddSample(buffer[i + n]);
                            sampleCount++;
                        }
                    }

                    if (read == 0)
                        flag = false;
                }

                foreach (var waveformDataChannel in waveformDataChannels)
                {
                    waveformDataChannel.Finish();
                }

                Length = sampleCount;


                return waveformDataChannels.Select(x => x.GetData()).ToArray();
            });
        }
예제 #24
0
        /// <summary>
        /// Initializes a new instance of the <see cref="BufferSource"/> class.
        /// </summary>
        /// <param name="source">The <see cref="IWaveSource"/> to buffer.</param>
        /// <param name="bufferSize">Size of the buffer.</param>
        /// <exception cref="System.ArgumentOutOfRangeException"><paramref name="bufferSize"/> is out of range.</exception>
        public BufferSource(IWaveSource source, int bufferSize)
            : base(source)
        {
            if (bufferSize <= 0 || bufferSize % source.WaveFormat.BlockAlign != 0)
                throw new ArgumentOutOfRangeException("bufferSize");

            _buffer = new FixedSizeBuffer<byte>(bufferSize);
            _lockObject = new Object();

            _bufferThread = new Thread(BufferProc)
            {
                Priority = ThreadPriority.Normal,
                IsBackground = false
            };
            _bufferThread.Start();
        }
예제 #25
0
        public SoundEffect(string a_soundFile, float a_vol, Event ev)
        {
            soundFileName = ev.SoundName;
            IWaveSource input = CodecFactory.Instance.GetCodec(a_soundFile);
            IWaveSource final = input;

            if (ev.UseEcho)
            {
                DmoEchoEffect echo = new DmoEchoEffect(input);
                echo.LeftDelay  = ev.EchoAmount;
                echo.RightDelay = ev.EchoAmount;
                echo.WetDryMix  = 40;
                final           = echo;
            }

            soundOut1        = new DirectSoundOut(100, System.Threading.ThreadPriority.AboveNormal);
            soundOut1.Device = Helper.Out;
            soundOut1.Initialize(final);
            soundOut1.Volume = a_vol;
            soundOut1.Play();

            if (Helper.Mic != Helper.Out)
            {
                IWaveSource waveSource2 = CodecFactory.Instance.GetCodec(a_soundFile);
                IWaveSource final2      = waveSource2;

                if (ev.UseEcho)
                {
                    DmoEchoEffect echo = new DmoEchoEffect(waveSource2);
                    echo.LeftDelay  = ev.EchoAmount;
                    echo.RightDelay = ev.EchoAmount;
                    echo.WetDryMix  = 40;
                    final2          = echo;
                }

                soundOut2        = new DirectSoundOut(100, System.Threading.ThreadPriority.AboveNormal);
                soundOut2.Device = Helper.Mic;
                soundOut2.Initialize(final2);
                soundOut2.Volume = a_vol;
                soundOut2.Play();
            }

            timer          = new Timer(1000);
            timer.Elapsed += Timer_Elapsed;
            timer.Start();
        }
예제 #26
0
        internal StreamingSourceVoice(XAudio2 xaudio2, IWaveSource waveSource, VoiceCallback voiceCallback, int bufferSize)
            : base(CreateSourceVoice(xaudio2, waveSource, voiceCallback), xaudio2.Version)
        {
            _voiceCallback = voiceCallback;
            _waveSource = waveSource;

            var maxBufferBytes = (int)waveSource.WaveFormat.MillisecondsToBytes(bufferSize);
            _buffer = new byte[maxBufferBytes];

            for (int i = 0; i < _buffers.Length; i++)
            {
                var buffer = new XAudio2Buffer(maxBufferBytes);
                _buffers[i] = buffer;
            }

            InitializeForStreaming();
        }
예제 #27
0
 public static byte[] ToByteArray(this IWaveSource source)
 {
     if (source == null)
     {
         throw new ArgumentNullException(nameof(source));
     }
     using (var buffer = new MemoryStream())
     {
         int read;
         var temporaryBuffer = new byte[source.WaveFormat.BytesPerSecond];
         while ((read = source.Read(temporaryBuffer, 0, temporaryBuffer.Length)) > 0)
         {
             buffer.Write(temporaryBuffer, 0, read);
         }
         return(buffer.ToArray());
     }
 }
예제 #28
0
        /// <summary>
        /// Load a sound file to be played
        /// </summary>
        /// <param name="file">File location</param>
        public void Initialise(string file)
        {
            this.SoundSource = CodecFactory.Instance.GetCodec(file);

            if (WasapiOut.IsSupportedOnCurrentPlatform)
            {
                // only support since vista
                this.SoundOut = new WasapiOut();
            }
            else
            {
                // if not running vista or above
                this.SoundOut = new DirectSoundOut();
            }

            this.SoundOut.Initialize(this.SoundSource);
        }
예제 #29
0
        /// <summary>
        ///     Initializes a new instance of the <see cref="StreamingSourceVoice" /> class.
        /// </summary>
        /// <param name="ptr">Pointer to a <see cref="XAudio2SourceVoice" /> object.</param>
        /// <param name="voiceCallback">
        ///     <see cref="VoiceCallback" /> instance which receives notifications from the
        ///     <see cref="XAudio2SourceVoice" /> which got passed as a pointer (see the <paramref name="ptr" /> argument).
        /// </param>
        /// <param name="waveSource"><see cref="IWaveSource" /> which provides the audio data to stream.</param>
        /// <param name="bufferSize">
        ///     Buffersize of the internal used buffers in milliseconds. Values in the range from 70ms to
        ///     200ms are recommended.
        /// </param>
        /// <remarks>It is recommended to use the <see cref="Create" /> method instead of the this constructor.</remarks>
        public StreamingSourceVoice(IntPtr ptr, VoiceCallback voiceCallback, IWaveSource waveSource, int bufferSize)
        {
            BasePtr = ptr;
            _voiceCallback = voiceCallback;
            _waveSource = waveSource;

            var maxBufferBytes = (int) waveSource.WaveFormat.MillisecondsToBytes(bufferSize);
            _buffer = new byte[maxBufferBytes];

            for (int i = 0; i < _buffers.Length; i++)
            {
                var buffer = new XAudio2Buffer(maxBufferBytes);
                _buffers[i] = buffer;
            }

            InitializeForStreaming();
        }
예제 #30
0
        // Play a source
        private void play(ref IWaveSource source, int priority)
        {
            if (source == null)
            {
                Logging.Debug("Source is null; skipping");
                return;
            }

            using (EventWaitHandle waitHandle = new EventWaitHandle(false, EventResetMode.AutoReset))
            {
                ISoundOut soundOut = GetSoundOut();
                try
                {
                    try
                    {
                        soundOut.Initialize(source);
                    }
                    catch (System.Runtime.InteropServices.COMException ce)
                    {
                        Logging.Error("Failed to speak; missing media pack?", ce);
                        return;
                    }
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    TimeSpan waitTime = source.GetTime(source.Length);

                    Logging.Debug("Starting speech");
                    StartSpeech(ref soundOut, priority);
                    Logging.Debug("Waiting for speech - " + waitTime);
                    // Wait for the appropriate amount of time before stopping the speech.  This is belt-and-braces approach,
                    // as we should receive the stopped signal when the buffer runs out, but there is suspicion that the stopped
                    // signal does not show up at time
                    waitHandle.WaitOne(waitTime);
                    Logging.Debug("Finished waiting for speech");
                    StopCurrentSpeech();
                }
                finally
                {
                    if (soundOut != null)
                    {
                        soundOut.Dispose();
                    }
                }
            }
        }
예제 #31
0
        private async Task <IWaveSource> GetSoundSource(IPlaySource track, long position)
        {
            _soundSourceLoadingToken?.Cancel();
            _soundSourceLoadingToken = new CancellationTokenSource();
            var         token  = _soundSourceLoadingToken.Token;
            IWaveSource result = null;

            TrackBitrate = track.Bitrate;

            try
            {
                switch (track.Type)
                {
                case PlaySourceType.LocalFile:
                    result =
                        await
                        Task.Run(() => CodecFactory.Instance.GetCodec(((LocalFilePlaySource)track).Path), token);

                    break;

                case PlaySourceType.Http:
                    result =
                        await Task.Run(() => CodecFactory.Instance.GetCodec(((HttpPlaySource)track).WebUri), token);

                    break;

                case PlaySourceType.Stream:
                    result = new DmoMp3Decoder(((StreamPlaySource)track).Stream);
                    break;

                default:
                    throw new ArgumentOutOfRangeException();
                }

                // ReSharper disable once AccessToDisposedClosure
                await Task.Run(() => result.Position = position, token);
            }
            catch (TaskCanceledException)
            {
                result?.Dispose();
                return(null);
            }

            return(token.IsCancellationRequested ? null : result);
        }
예제 #32
0
 /// <summary>
 /// Load an audio file. Unfortunately, Linux can't load from the file system YET.
 /// </summary>
 /// <param name="name">The audio name.</param>
 /// <param name="filePath">Path to the file.</param>
 /// <param name="loop">Loop. Will try and find one with the same name and in the same location if null.</param>
 public void LoadAudioFile(string name, string filePath, Loop loop = null)
 {
     if (loop == null && FileSystem.FileExists(Path.GetFileNameWithoutExtension(filePath) + ".dbli"))
     {
         loop = (Loop)FileSystem.OpenFile <Loop>(Path.GetFileNameWithoutExtension(filePath) + ".dbli");
     }
     if (IsLinux)
     {
         var        c   = CSCore.Linux.Codecs.CodecFactory.Instance.GetCodec(filePath);
         var        src = new AudioSource(c, loop);
         ALSoundOut o   = new ALSoundOut();
         o.Initialize(src);
         if (!LinuxBuffer.ContainsKey(name))
         {
             LinuxBuffer.Add(name, o);
         }
         else
         {
             LinuxBuffer[name] = o;
         }
     }
     else
     {
         IWaveSource c = null;
         if (WindowsCodecs.ContainsKey(Path.GetExtension(filePath)))
         {
             c = WindowsCodecs[Path.GetExtension(filePath)](FileSystem.OpenFileStream(filePath));
         }
         if (c == null)
         {
             c = new CSCore.MediaFoundation.MediaFoundationDecoder(FileSystem.OpenFileStream(filePath));
         }
         var       src = new AudioSource(c, loop);
         WasapiOut o   = new WasapiOut();
         o.Initialize(src);
         if (!WindowsBuffer.ContainsKey(name))
         {
             WindowsBuffer.Add(name, o);
         }
         else
         {
             WindowsBuffer[name] = o;
         }
     }
 }
예제 #33
0
        private IWaveSource GetCodec(string filename)
        {
            IWaveSource waveSource       = null;
            bool        useFfmpegDecoder = true;

            // FfmpegDecoder doesn't support WMA lossless. If Windows Media Foundation is available,
            // we can use MediaFoundationDecoder for WMA files, which supports WMA lossless.
            if (this.supportsWindowsMediaFoundation && Path.GetExtension(filename).ToLower().Equals(FileFormats.WMA))
            {
                try
                {
                    waveSource       = new MediaFoundationDecoder(filename);
                    useFfmpegDecoder = false;
                }
                catch (Exception)
                {
                }
            }

            if (useFfmpegDecoder)
            {
                // waveSource = new FfmpegDecoder(this.filename);

                // On some systems, files with special characters (e.g. "æ", "ø") can't be opened by FfmpegDecoder.
                // This exception is thrown: avformat_open_input returned 0xfffffffe: No such file or directory.
                // StackTrace: at CSCore.Ffmpeg.FfmpegCalls.AvformatOpenInput(AVFormatContext** formatContext, String url)
                // This issue can't be reproduced for now, so we're using a stream as it works in all cases.
                // See: https://github.com/digimezzo/Dopamine/issues/746
                // And: https://github.com/filoe/cscore/issues/344
                this.audioStream = File.OpenRead(filename);
                waveSource       = new FfmpegDecoder(this.audioStream);
            }

            // If the SampleRate < 32000, make it 32000. The Equalizer's maximum frequency is 16000Hz.
            // The sample rate has to be bigger than 2 * frequency.
            if (waveSource.WaveFormat.SampleRate < 32000)
            {
                waveSource = waveSource.ChangeSampleRate(32000);
            }

            return(waveSource
                   .ToSampleSource()
                   .AppendSource(this.Create10BandEqualizer, out this.equalizer)
                   .ToWaveSource());
        }
예제 #34
0
        public bool OpenFile(string filename, Func <IWaveSource, IWaveSource> oninitcallback)
        {
            if (String.IsNullOrWhiteSpace(filename))
            {
                throw new ArgumentException("filename");
            }

            try
            {
                var source = CodecFactory.Instance.GetCodec(filename);
                source = new LoopStream(source);
                (source as LoopStream).EnableLoop = false;

                if (source.WaveFormat.Channels == 1)
                {
                    source = new MonoToStereoSource(source).ToWaveSource(16);
                }
                _panSource = new PanSource(source)
                {
                    Pan = this.Pan
                };
                var _notification = new SimpleNotificationSource(_panSource);
                _notification.DataRead += OnNotification;

                source = _notification.ToWaveSource(16);
                //source = new BufferSource(source, source.WaveFormat.BytesPerSecond * 2);

                _source = source;

                if (oninitcallback != null)
                {
                    SoundOutManager.Initialize(oninitcallback(source));
                }
                else
                {
                    SoundOutManager.Initialize(source);
                }
            }
            catch (Exception)
            {
                return(false);
            }
            RaiseUpdated();
            return(true);
        }
예제 #35
0
    void Start()
    {
        // This uses the wasapi api to get any sound data played by the computer
        capture = new WasapiLoopbackCapture();

        capture.Initialize();

        // Get our capture as a source
        IWaveSource source = new SoundInSource(capture);


        // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

        // This is the typical size, you can change this for higher detail as needed
        fftSize = FftSize.Fft4096;

        // Actual fft data
        fftBuffer = new float[(int)fftSize];

        // These are the actual classes that give you spectrum data
        // The specific vars of lineSpectrum are changed below in the editor so most of these aren't that important here
        spectrumProvider = new BasicSpectrumProvider(capture.WaveFormat.Channels,
                                                     capture.WaveFormat.SampleRate, fftSize);

        lineSpectrum = new LineSpectrum(fftSize)
        {
            SpectrumProvider = spectrumProvider,
            UseAverage       = true,
            BarCount         = numBars,
            BarSpacing       = 2,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        // Tells us when data is available to send to our spectrum
        var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        // We use this to request data so it actualy flows through (figuring this out took forever...)
        finalSource = notificationSource.ToWaveSource();

        capture.DataAvailable += Capture_DataAvailable;
        capture.Start();
    }
예제 #36
0
        /*
         * Initializes WASAPI, initializes the sample handler, and sends captured data to it.
         */
        private void StartCapture()
        {
            // Initialize hardware capture
            wasapiCapture = new WasapiLoopbackCapture(25);
            wasapiCapture.Initialize();

            // Initialize sample handler
            SampleHandler = new SampleHandler(wasapiCapture.WaveFormat.Channels);

            // Configure per-block reads rather than per-sample reads
            notificationSource = new SingleBlockNotificationStream(new SoundInSource(wasapiCapture).ToSampleSource());
            notificationSource.SingleBlockRead += (s, e) => SampleHandler.Add(e.Left, e.Right);
            finalSource = notificationSource.ToWaveSource();
            wasapiCapture.DataAvailable += (s, e) => finalSource.Read(e.Data, e.Offset, e.ByteCount);

            // Start capture
            wasapiCapture.Start();
        }
예제 #37
0
        /// <summary>
        ///     Initializes a new instance of the <see cref="StreamingSourceVoice" /> class.
        /// </summary>
        /// <param name="ptr">Pointer to a <see cref="XAudio2SourceVoice" /> object.</param>
        /// <param name="voiceCallback">
        ///     <see cref="VoiceCallback" /> instance which receives notifications from the
        ///     <see cref="XAudio2SourceVoice" /> which got passed as a pointer (see the <paramref name="ptr" /> argument).
        /// </param>
        /// <param name="waveSource"><see cref="IWaveSource" /> which provides the audio data to stream.</param>
        /// <param name="bufferSize">
        ///     Buffersize of the internal used buffers in milliseconds. Values in the range from 70ms to
        ///     200ms are recommended.
        /// </param>
        /// <remarks>It is recommended to use the <see cref="Create(XAudio2,IWaveSource,int)" /> method instead of the this constructor.</remarks>
        internal StreamingSourceVoice(IntPtr ptr, VoiceCallback voiceCallback, IWaveSource waveSource, int bufferSize)
        {
            BasePtr        = ptr;
            _voiceCallback = voiceCallback;
            _waveSource    = waveSource;

            var maxBufferBytes = (int)waveSource.WaveFormat.MillisecondsToBytes(bufferSize);

            _buffer = new byte[maxBufferBytes];

            for (int i = 0; i < _buffers.Length; i++)
            {
                var buffer = new XAudio2Buffer(maxBufferBytes);
                _buffers[i] = buffer;
            }

            InitializeForStreaming();
        }
예제 #38
0
 private void CleanupPlayback()
 {
     if (PositionUpdateThread != null)
     {
         PositionUpdateThread.Abort();
         PositionUpdateThread = null;
     }
     if (_soundOut != null)
     {
         _soundOut.Dispose();
         _soundOut = null;
     }
     if (_waveSource != null)
     {
         _waveSource.Dispose();
         _waveSource = null;
     }
 }
예제 #39
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // SoundOut implementation which plays the sound
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);
            ((WasapiOut)this.soundOut).StreamRoutingOptions = StreamRoutingOptions.All;

            // Initialize the soundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            // Create the FFT provider
            this.fftProvider = new FftProvider(this.soundOut.WaveSource.WaveFormat.Channels, FftSize.Fft2048);

            this.notificationSource.SingleBlockRead += this.InputStream_Sample;
            this.soundOut.Stopped += this.SoundOutStoppedHandler;

            this.soundOut.Volume = this.volume;
        }
예제 #40
0
 public void Dispose()
 {
     if (_soundOut != null)
     {
         _soundOut.Stop();
         _soundOut.Dispose();
         if (_soundOut is ALSoundOut)
         {
             CSCore.SoundOut.AL.ALDevice.DefaultDevice.Dispose();
         }
         _soundOut = null;
     }
     if (_soundSource != null)
     {
         _soundSource.Dispose();
         _soundSource = null;
     }
 }
예제 #41
0
        internal StreamingSourceVoice(XAudio2 xaudio2, IWaveSource waveSource, VoiceCallback voiceCallback, int bufferSize)
            : base(CreateSourceVoice(xaudio2, waveSource, voiceCallback), xaudio2.Version)
        {
            _voiceCallback = voiceCallback;
            _waveSource    = waveSource;

            var maxBufferBytes = (int)waveSource.WaveFormat.MillisecondsToBytes(bufferSize);

            _buffer = new byte[maxBufferBytes];

            for (int i = 0; i < _buffers.Length; i++)
            {
                var buffer = new XAudio2Buffer(maxBufferBytes);
                _buffers[i] = buffer;
            }

            InitializeForStreaming();
        }
예제 #42
0
        /// <summary>
        /// 打开一个音频文件。
        /// </summary>
        /// <param name="filename">音频文件名</param>
        /// <param name="device">要使用的音频设备</param>
        public void Open(string filename)
        {
            CleanupPlayback();

            _waveSource = CodecFactory.Instance.GetCodec(filename);

            _soundOut = new WasapiOut()
            {
                Latency = 100, Device = _device
            };
            _soundOut.Initialize(_waveSource);

            _soundOut.Volume = Math.Min(1.0f, Math.Max(_volume / 100f, 0f));;
            if (PlaybackStopped != null)
            {
                _soundOut.Stopped += PlaybackStopped;
            }
        }
예제 #43
0
        private void openAudioFileToolStripMenuItem_Click(object sender, EventArgs e)
        {
            if (ofd_music.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            waveSource = CodecFactory.Instance.GetCodec(ofd_music.FileName)
                         .ToSampleSource()
                         .ToStereo()
                         .ToWaveSource();

            soundOut = GetSoundOut();
            soundOut.Initialize(waveSource);
            tbar_pos.Maximum = (int)waveSource.GetLength().TotalMilliseconds;

            sfd_lrc.FileName = Path.GetFileNameWithoutExtension(ofd_music.FileName) + ".lrc";
        }
예제 #44
0
        private static WaveFormat GetOutputWaveFormat(IWaveSource source, int sampleRate, ChannelMatrix channelMatrix)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (channelMatrix == null)
            {
                throw new ArgumentNullException("channelMatrix");
            }

            return(new WaveFormatExtensible(
                       sampleRate,
                       source.WaveFormat.BitsPerSample,
                       channelMatrix.OutputChannelCount,
                       WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                       channelMatrix.OutputMask));
        }
예제 #45
0
        /// <summary>
        /// Initializes a new instance of the <see cref="BufferSource"/> class.
        /// </summary>
        /// <param name="source">The <see cref="IWaveSource"/> to buffer.</param>
        /// <param name="bufferSize">Size of the buffer.</param>
        /// <exception cref="System.ArgumentOutOfRangeException"><paramref name="bufferSize"/> is out of range.</exception>
        public BufferSource(IWaveSource source, int bufferSize)
            : base(source)
        {
            if (bufferSize <= 0 || bufferSize % source.WaveFormat.BlockAlign != 0)
            {
                throw new ArgumentOutOfRangeException("bufferSize");
            }

            _buffer     = new FixedSizeBuffer <byte>(bufferSize);
            _lockObject = new Object();

            _bufferThread = new Thread(BufferProc)
            {
                Priority     = ThreadPriority.Normal,
                IsBackground = false
            };
            _bufferThread.Start();
        }
예제 #46
0
        private void CanHandleEOFTestInternal(ISoundOut soundOut, IWaveSource source)
        {
            int sourceLength = (int)source.GetLength().TotalMilliseconds;

            Debug.WriteLine(soundOut.GetType().FullName);
            for (int i = 0; i < BasicIterationCount; i++)
            {
                soundOut.Initialize(source);
                soundOut.Play();

                Thread.Sleep(sourceLength + 500);
                Assert.AreEqual(source.Length, source.Position, "Source is not EOF");

                soundOut.Stop();
                source.Position = 0;

                soundOut.Initialize(source);
                soundOut.Play();

                Thread.Sleep(sourceLength + 500);
                Assert.AreEqual(source.Length, source.Position, "Source is not EOF");

                soundOut.Pause();
                soundOut.Resume();

                Thread.Sleep(10);

                soundOut.Stop();
                source.Position = 0;

                soundOut.Initialize(source);
                soundOut.Play();

                Thread.Sleep(sourceLength + 1000);
                Assert.AreEqual(source.Length, source.Position, "Source is not EOF");
                Assert.AreEqual(PlaybackState.Stopped, soundOut.PlaybackState);

                source.Position = 0;
                soundOut.Play();

                Thread.Sleep(sourceLength + 500);
                Assert.AreEqual(source.Length, source.Position, "Source is not EOF");
            }
        }
예제 #47
0
        public void Initialize()
        {
            MMDevice   captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat  = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try { _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i)); } catch { /* We're just trying ... */ }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _source = _soundInSource.WaveFormat.SampleRate == 44100
                          ? _soundInSource.ToStereo()
                          : _soundInSource.ChangeSampleRate(44100).ToStereo();

            _stream = new SingleBlockNotificationStream(_source.ToSampleSource());
            _stream.SingleBlockRead += StreamOnSingleBlockRead;

            _source = _stream.ToWaveSource();

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            _soundInSource.DataAvailable += (s, aEvent) =>
            {
                while ((_source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _capture.Start();
        }
예제 #48
0
        private void buttonLoadFile_Click(object sender, EventArgs e)
        {
            openFileDialog.Filter = "Cursor Files|*.mp3;*.wav";
            if (openFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                for (int i = 0; i < openFileDialog.FileNames.Length; i++)
                {
                    fileSounds.Add(openFileDialog.FileNames[i]);
                    WiveEditorList.Add(new WaveEditor());
                    if (WiveEditorList.Count <= 1)
                    {
                        WiveEditorList[WiveEditorList.Count - 1].Location = new Point(0, 0);
                    }
                    else
                    {
                        WiveEditorList[WiveEditorList.Count - 1].Location = new Point(0, WiveEditorList[WiveEditorList.Count - 2].Location.Y + WiveEditorList[WiveEditorList.Count - 2].Height);
                    }

                    WiveEditorList[WiveEditorList.Count - 1].Size = new Size(new Point(Params.NewSamplesWidth, mEditor.Size.Height * Params.CoefNewSamplesToMainSample));

                    this.Controls.Add(WiveEditorList[WiveEditorList.Count - 1]);
                    WiveEditorList[WiveEditorList.Count - 1].Parent = samplesPanel;

                    this.Update();
                    //ресурс 1
                    soundSource1 = soundSource.InitializationWaveSource(fileSounds[fileSounds.Count - 1]);
                    //теперь воспросизводить будем ресурс 1
                    soundSource.InitializationSoundOut(soundSource1);
                    soundOut.Volume = (float)0.3;
                    try
                    {
                        WiveEditorList[WiveEditorList.Count - 1].OpenWaveFile(openFileDialog.FileNames[i], (MMDevice)comboBox1.SelectedItem);
                        trackBarVolume.Value = WiveEditorList[WiveEditorList.Count - 1].Player.Volume;
                        WiveEditorList[WiveEditorList.Count - 1].Focus();
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show("Could not open file: " + ex.Message);
                    }
                }
                MessageBox.Show("загружено");
                WiveEditorList[WiveEditorList.Count - 1].Focus();
            }
        }
예제 #49
0
        public void LoadFile(IWaveSource source, IFileFormat fileFormat, bool ClearPlaylist = false, object AudioStruct = null)
        {
            if (ClearPlaylist)
            {
                audioListView.Items.Clear();
            }

            AudioFileFormats.Add(fileFormat);

            AudioFile file = new AudioFile();

            file.Title = fileFormat.FileName;

            if (AudioStruct is ID3v1)
            {
                var mp3 = (ID3v1)AudioStruct;

                file.Title  = mp3.Title;
                file.Artist = mp3.Artist;
            }

            AudioChannel audioChannel = new AudioChannel();

            audioChannel.Name = $"Channel [0]";
            file.Channels.Add(audioChannel);
            audioChannel.audioPlayer.Open(source, activeDevice);

            audioChannel.audioPlayer.PlaybackStopped += (s, args) =>
            {
                //WasapiOut uses SynchronizationContext.Post to raise the event
                //There might be already a new WasapiOut-instance in the background when the async Post method brings the PlaybackStopped-Event to us.
                if (audioChannel.audioPlayer.PlaybackState != PlaybackState.Stopped)
                {
                }
            };

            audioListView.AddObject(file);
            //   audioListView.UpdateObject(file);

            if (audioListView.Items.Count != 0)
            {
                audioListView.SelectedIndex = 0;
            }
        }
예제 #50
0
        static void StereoToMono(string input, string sDest)
        {
            CSCore.DSP.ChannelMatrix cm = CSCore.DSP.ChannelMatrix.StereoToMonoMatrix;
            var sarasa = CodecFactory.Instance.GetCodec(input);

            if (sarasa.WaveFormat.Channels == 2)
            {
                IWaveSource waveSource = sarasa.AppendSource(x => new CSCore.Streams.CachedSoundSource(x))
                                         .AppendSource(x => new DmoChannelResampler(x, cm))        //append a channelresampler with the channelmatrix
                                         .ToSampleSource()
                                         .ToWaveSource(16);
                waveSource.WriteToFile(sDest);
            }
            else
            {
                sarasa.WriteToFile(sDest);
            }
            sarasa.Dispose();
        }
예제 #51
0
        /// <summary>
        ///     Changes the SampleRate of an already existing wave source.
        /// </summary>
        /// <param name="input">Already existing wave source whose sample rate has to be changed.</param>
        /// <param name="destinationSampleRate">Destination sample rate.</param>
        /// <returns>Wave source with the specified <paramref name="destinationSampleRate" />.</returns>
        public static IWaveSource ChangeSampleRate(this IWaveSource input, int destinationSampleRate)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (destinationSampleRate <= 0)
            {
                throw new ArgumentOutOfRangeException("destinationSampleRate");
            }

            if (input.WaveFormat.SampleRate == destinationSampleRate)
            {
                return(input);
            }

            return(new DmoResampler(input, destinationSampleRate));
        }
예제 #52
0
        public void Stop(bool force)
        {
            if (_soundOut != null)
            {
                _soundOut.Stop();
                PlayStopped();

                if (force)
                {
                    _resourceItemQueue.Clear();
                }
            }

            if (_waveSource != null)
            {
                _waveSource.Dispose();
                _waveSource = null;
            }
        }
예제 #53
0
    public void BeginCapture(MMDevice device1, MMDevice device2)
    {
        // This uses the wasapi api to get any sound data played by the computer
        capture        = new WasapiLoopbackCapture();
        capture.Device = device1;
        capture.Initialize();

        actualSource = new SoundInSource(capture);

        dataSource = new PureDataSource(new WaveFormat(device2.DeviceFormat.SampleRate, 8, 2), actualSource.ToSampleSource());

        capture.Start();

        games        = new WasapiOut();
        games.Device = device2;
        games.Initialize(dataSource.ToWaveSource());

        isSetup = true;
    }
        /// <summary>
        ///     Encodes the whole <paramref name="source" /> with the specified <paramref name="encoder" />. The encoding process
        ///     stops as soon as the <see cref="IReadableAudioSource{T}.Read" /> method of the specified <paramref name="source" />
        ///     returns 0.
        /// </summary>
        /// <param name="encoder">The encoder which should be used to encode the audio data.</param>
        /// <param name="source">The <see cref="IWaveSource" /> which provides the raw audio data to encode.</param>
        public static void EncodeWholeSource(MediaFoundationEncoder encoder, IWaveSource source)
        {
            if (encoder == null)
            {
                throw new ArgumentNullException("encoder");
            }
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            var buffer = new byte[source.WaveFormat.BytesPerSecond * 4];
            int read;

            while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
            {
                Debug.WriteLine(String.Format("{0:#00.00}%", source.Position / (double)source.Length * 100));
                encoder.Write(buffer, 0, read);
            }
        }
예제 #55
0
        public void Open(string filename, MMDevice device)
        {
            CleanupPlayback();

            _waveSource =
                CodecFactory.Instance.GetCodec(filename)
                .ToSampleSource()
                .ToMono()
                .ToWaveSource();
            _soundOut = new WasapiOut()
            {
                Latency = 100, Device = device
            };
            _soundOut.Initialize(_waveSource);
            if (PlaybackStopped != null)
            {
                _soundOut.Stopped += PlaybackStopped;
            }
        }
        public override void Initialize()
        {
            CleanupPlayback();

            _waveSource = CodecFactory.Instance.GetCodec(_source)
                          .ToSampleSource()
                          .ToMono()
                          .ToWaveSource();
            _soundOut = new WasapiOut()
            {
                Latency = 100
            };
            _soundOut.Initialize(_waveSource);

            if (PlaybackStopped != null)
            {
                _soundOut.Stopped += PlaybackStopped;
            }
        }
        public void StartRecordingDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                Console.WriteLine("No devices found.");
                return;
            }

            StopRecording();

            soundIn = new CSCore.SoundIn.WasapiLoopbackCapture()
            {
                Device = recordingDevice
            };

            soundIn.Initialize();
            soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };
            convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
            convertedSource              = convertedSource.ToStereo();
            soundInSource.DataAvailable += OnDataAvailable;
            soundIn.Start();

            waveFormat = convertedSource.WaveFormat;

            buffer0 = new BufferBlock()
            {
                Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
            };
            buffer1 = new BufferBlock()
            {
                Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
            };

            enabled = true;

            eventThread              = new Thread(EventThread);
            eventThread.Name         = "Loopback Event Thread";
            eventThread.IsBackground = true;
            eventThread.Start(new WeakReference <LoopbackRecorder>(this));
        }
예제 #58
0
        public static void WriteToFile(string filename, IWaveSource source, bool deleteIfExists, int maxlength = -1)
        {
            if (deleteIfExists && File.Exists(filename))
                File.Delete(filename);

            int read = 0;
            int r = 0;
            byte[] buffer = new byte[source.WaveFormat.BytesPerSecond];
            using (var w = new WaveWriter(filename, source.WaveFormat))
            {
                while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    w.Write(buffer, 0, read);
                    r += read;
                    if (maxlength != -1 && r > maxlength)
                        break;
                }
            }
        }
예제 #59
0
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, int destSampleRate)
            : base(source, destSampleRate)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (channelMatrix == null)
                throw new ArgumentNullException("channelMatrix");

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
                throw new ArgumentException("source.WaveFormat.Channels != channelMatrix.InputChannelCount");

            WaveFormatExtensible inputformat = new WaveFormatExtensible(source.WaveFormat.SampleRate, source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat), _channelMatrix.InputMask);

            _outputformat = new WaveFormat(destSampleRate, source.WaveFormat.BitsPerSample, 6, source.WaveFormat.WaveFormatTag, source.WaveFormat.ExtraSize);
            WaveFormatExtensible outputformat = new WaveFormatExtensible(_outputformat.SampleRate, _outputformat.BitsPerSample,
            _outputformat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(_outputformat), _channelMatrix.OutputMask);

            Init(inputformat, outputformat);
            _resampler.ResamplerProps.SetUserChannelMtx(_channelMatrix.GetMatrix());
        }
예제 #60
0
        public LORSceneCarol(IEnumerable<string> args)
        {
            hours.AddRange("5:00 pm", "9:00 pm");

            reindeer.Value = true;
            airSnowman.Value = true;
            airR2D2.Value = true;
            airSanta.Value = true;
            packages.Value = true;

            hours
                .ControlsMasterPower(packages)
                .ControlsMasterPower(airSnowman)
                .ControlsMasterPower(airR2D2)
                .ControlsMasterPower(airSanta)
                .ControlsMasterPower(reindeer);

            lorImport.LoadFromFile(@"..\..\..\Test Files\David Foster - Carol of the Bells.lms");

            lorImport.MapDeviceRGB("E - 1", "D# - 2", "D - 3", lightNote1);
            lorImport.MapDeviceRGB("C# - 4", "C - 5", "B - 6", lightNote2);
            lorImport.MapDeviceRGB("A# - 7", "A - 8", "G# - 9", lightNote3);
            lorImport.MapDeviceRGB("G - 10", "F# - 11", "F - 12", lightNote4);
            lorImport.MapDeviceRGB("E - 13", "D# - 14", "D - 15", lightNote5);
            lorImport.MapDeviceRGB("C# - 16", "C - 1", "B - 2", lightNote6);
            lorImport.MapDeviceRGB("A# - 3", "A - 4", "G# - 5", lightNote7);
            lorImport.MapDeviceRGB("G - 6", "F# - 7", "F - 8", lightNote8);
            lorImport.MapDeviceRGB("E - 9", "D# - 10", "D - 11", lightNote9);
            lorImport.MapDeviceRGB("C# - 12", "C - 13", "B - 14", lightNote10);
            lorImport.MapDevice("A# - 15", lightNote11);
            lorImport.MapDevice("A - 16", lightNote12);

            lorImport.MapDevice("Sky 1", lightNet1);
            lorImport.MapDevice("Sky 2", lightNet2);
            lorImport.MapDevice("Sky 3", lightNet3);
            lorImport.MapDevice("Sky 4", lightNet4);
            lorImport.MapDevice("Sky 5", lightNet5);

            lorImport.MapDevice("Sky 1", lightNet6);
            lorImport.MapDevice("Sky 2", lightNet7);
            lorImport.MapDevice("Sky 3", lightNet8);
            lorImport.MapDevice("Sky 4", lightNet9);
            lorImport.MapDevice("Sky 5", lightNet10);

            lorImport.MapDevice("Rooftop", snowmanKaggen);

            lorImport.MapDevice("Star1", lightStar1);
            lorImport.MapDevice("Star2", lightStar2);
            lorImport.MapDevice("Star3", lightStar3);
            lorImport.MapDevice("Star extra", lightStarExtra);

            lightREdge.OutputBrightness.Subscribe(x =>
                {
                    pixelsRoofEdge.SetBrightness(x, null);
                });
            lightREdge.OutputColor.Subscribe(x =>
                {
                    pixelsRoofEdge.SetAllOnlyColor(x);
                });

            lightBottom.OutputBrightness.Subscribe(x =>
                {
            //                    pixelsVideo.SetBrightness(x, null);
                });
            lightBottom.OutputColor.Subscribe(x =>
                {
            //                    pixelsVideo.SetAllOnlyColor(x);
                });

            lorImport.MapDeviceRGBW("R-Edge R", "R-Edge G", "R-Edge B", "R-Edge W", lightREdge);
            lorImport.MapDeviceRGBW("R-Bottom", "G-Bottom", "B-Bottom", "W-Bottom", lightBottom);
            lorImport.MapDeviceRGBW("Garage R", "Garage G", "Garage B", "Garage W", lightGarage);
            lorImport.MapDeviceRGBW("Rwindo R", "Rwindo G", "Rwindo B", "Rwindo W", lightRWindow);
            lorImport.MapDeviceRGBW("Cwindo R", "Cwindo G", "Cwindo B", "Cwindo W", lightCWindow);
            lorImport.MapDeviceRGBW("Lwindo R", "Lwindo G", "Lwindo B", "Lwindo W", lightLWindow);
            lorImport.MapDeviceRGBW("Ft door R", "Ft door G", "Ft door B", "FT door W", lightFrontDoor);
            lorImport.MapDeviceRGBW("Bush - red", "Bush - green", "Bush - blue", "Bush - white", lightBush);

            lorImport.MapDevice("Tree - A", lightSnowman);
            lorImport.MapDevice("Tree - B", lightSanta);

            lorImport.MapDevice("Spoke 1a", lightHat1);
            lorImport.MapDevice("Spoke 2a", lightHat2);
            lorImport.MapDevice("Spoke 3a", lightHat3);
            lorImport.MapDevice("Spoke  4a", lightHat4);
            lorImport.MapDevice("Spoke 5a", lightR2D2);
            //            lorImport.MapDevice("Spoke 6a", lightTest1);

            // lorImport.MapDevice("Spoke 7a", light);
            // lorImport.MapDevice("Spoke 8a", light);
            // lorImport.MapDevice("Spoke 9a", light);
            // lorImport.MapDevice("Spoike 10a", light);
            // lorImport.MapDevice("Spoke  11a", light);
            // lorImport.MapDevice("Spoke  12a", light);
            // lorImport.MapDevice("Spoke  13a", light);
            // lorImport.MapDevice("Spoke  14a", light);
            // lorImport.MapDevice("Spoke  15a", light);
            // lorImport.MapDevice("Spoke  16a", light);
            // lorImport.MapDevice("Pillar L8", light);
            // lorImport.MapDevice("Pillar L7", light);
            // lorImport.MapDevice("Pillar L6", light);
            // lorImport.MapDevice("Pillar L5", light);
            // lorImport.MapDevice("Pillar L4", light);
            // lorImport.MapDevice("Pillar L3", light);
            // lorImport.MapDevice("Pillar L2", light);
            // lorImport.MapDevice("Pillar L1", light);
            // lorImport.MapDevice("Pillar R8", light);
            // lorImport.MapDevice("Pillar R7", light);
            // lorImport.MapDevice("Pillar R6", light);
            // lorImport.MapDevice("Pillar R5", light);
            // lorImport.MapDevice("Pillar R4", light);
            // lorImport.MapDevice("Pillar R3", light);
            // lorImport.MapDevice("Pillar R2", light);
            // lorImport.MapDevice("Pillar R1", light);
            // lorImport.MapDevice("8  MiniTree 1r", light);
            // lorImport.MapDevice("8  MiniTree 2r", light);
            // lorImport.MapDevice("8  MiniTree 3r", light);
            // lorImport.MapDevice("8  MiniTree 4r", light);
            // lorImport.MapDevice("8  MiniTree 5r", light);
            // lorImport.MapDevice("8  MiniTree 6r", light);
            // lorImport.MapDevice("8  MiniTree 7r", light);
            // lorImport.MapDevice("8  MiniTree 8r", light);
            // lorImport.MapDevice("8  MiniTree 9r", light);
            // lorImport.MapDevice("8  MiniTree 10r", light);
            // lorImport.MapDevice("8  MiniTree 11r", light);
            // lorImport.MapDevice("8  MiniTree 12r", light);
            // lorImport.MapDevice("8  MiniTree 13r", light);
            // lorImport.MapDevice("8  MiniTree 14r", light);
            // lorImport.MapDevice("8  MiniTree 15r", light);
            // lorImport.MapDevice("8  MiniTree 16r", light);
            // lorImport.MapDevice("MiniTree 1g", light);
            // lorImport.MapDevice("MiniTree 2g", light);
            // lorImport.MapDevice("MiniTree 3g", light);
            // lorImport.MapDevice("MiniTree 4g", light);
            // lorImport.MapDevice("MiniTree 5g", light);
            // lorImport.MapDevice("MiniTree 6g", light);
            // lorImport.MapDevice("MiniTree 7g", light);
            // lorImport.MapDevice("MiniTree 8g", light);
            // lorImport.MapDevice("MiniTree 9g", light);
            // lorImport.MapDevice("MiniTree 10g", light);
            // lorImport.MapDevice("MiniTree 11g", light);
            // lorImport.MapDevice("MiniTree 12g", light);
            // lorImport.MapDevice("MiniTree 13g", light);
            // lorImport.MapDevice("MiniTree 14g", light);
            // lorImport.MapDevice("MiniTree 15g", light);
            // lorImport.MapDevice("MiniTree 16g", light);
            // lorImport.MapDevice("Hray B1", light);
            // lorImport.MapDevice("Hray B2", light);
            // lorImport.MapDevice("Hray B3", light);
            // lorImport.MapDevice("Hray B4", light);
            // lorImport.MapDevice("Hray B5", light);
            // lorImport.MapDevice("Hray B6", light);
            // lorImport.MapDevice("Hray B7", light);
            // lorImport.MapDevice("Hray B8", light);
            // lorImport.MapDevice("Hray R1", light);
            // lorImport.MapDevice("Hray R2", light);
            // lorImport.MapDevice("Hray R3", light);
            // lorImport.MapDevice("Hray R4", light);
            // lorImport.MapDevice("Hray R5", light);
            // lorImport.MapDevice("Hray R6", light);
            // lorImport.MapDevice("Hray R7", light);
            // lorImport.MapDevice("Hray R8", light);
            // lorImport.MapDevice("Vray 1", light);
            // lorImport.MapDevice("Vray 2", light);
            // lorImport.MapDevice("Vray 3", light);
            // lorImport.MapDevice("Vray 4", light);
            // lorImport.MapDevice("Vray 5", light);
            // lorImport.MapDevice("Vray 6", light);
            // lorImport.MapDevice("Vray 7", light);
            // lorImport.MapDevice("Vray 8", light);
            // lorImport.MapDevice("Vray 9", light);
            // lorImport.MapDevice("Vray 10", light);
            // lorImport.MapDevice("Vray 11", light);
            // lorImport.MapDevice("Vray 12", light);
            // lorImport.MapDevice("Vray 13", light);
            // lorImport.MapDevice("Vray 14", light);
            // lorImport.MapDevice("Vray 15", light);
            // lorImport.MapDevice("Vray 16", light);
            // lorImport.MapDevice("Vray 17", light);
            // lorImport.MapDevice("Vray 18", light);
            // lorImport.MapDevice("Vray 19", light);
            // lorImport.MapDevice("Vray 20", light);
            // lorImport.MapDevice("Vray 21", light);
            // lorImport.MapDevice("Vray 22", light);
            // lorImport.MapDevice("Vray 23", light);
            // lorImport.MapDevice("Vray 24", light);
            // lorImport.MapDevice("Vray 25", light);
            // lorImport.MapDevice("Vray 26", light);
            // lorImport.MapDevice("Vray 27", light);
            // lorImport.MapDevice("Vray 28", light);
            // lorImport.MapDevice("Vray 29", light);
            // lorImport.MapDevice("Vray 30", light);
            // lorImport.MapDevice("Vray 31", light);
            // lorImport.MapDevice("Vray 32", light);
            // lorImport.MapDevice("Arch 1-1", light);
            // lorImport.MapDevice("Arch 1-2", light);
            // lorImport.MapDevice("Arch 1-3", light);
            // lorImport.MapDevice("Arch 1-4", light);
            // lorImport.MapDevice("Arch 1-5", light);
            // lorImport.MapDevice("Arch 1-6", light);
            // lorImport.MapDevice("Arch 1-7", light);
            // lorImport.MapDevice("Arch 1-8", light);
            // lorImport.MapDevice("Arch 2-1", light);
            // lorImport.MapDevice("Arch 2-2", light);
            // lorImport.MapDevice("Arch 2-3", light);
            // lorImport.MapDevice("Arch 2-4", light);
            // lorImport.MapDevice("Arch 2-5", light);
            // lorImport.MapDevice("Arch 2-6", light);
            // lorImport.MapDevice("Arch 2-7", light);
            // lorImport.MapDevice("Arch 2-8", light);
            // lorImport.MapDevice("Arch 3-1", light);
            // lorImport.MapDevice("Arch 3-2", light);
            // lorImport.MapDevice("Arch 3-3", light);
            // lorImport.MapDevice("Arch 3-4", light);
            // lorImport.MapDevice("Arch 3-5", light);
            // lorImport.MapDevice("Arch 3-6", light);
            // lorImport.MapDevice("Arch 3-7", light);
            // lorImport.MapDevice("Arch 3-8", light);
            // lorImport.MapDevice("Arch 4-1", light);
            // lorImport.MapDevice("Arch 4-2", light);
            // lorImport.MapDevice("Arch 4-3", light);
            // lorImport.MapDevice("Arch 4-4", light);
            // lorImport.MapDevice("Arch 4-5", light);
            // lorImport.MapDevice("Arch 4-6", light);
            // lorImport.MapDevice("Arch 4-7", light);
            // lorImport.MapDevice("Arch 4-8", light);

            lorImport.Prepare();
            //            lorImport.Dump();

            waveSource = CodecFactory.Instance.GetCodec(@"C:\Projects\Other\ChristmasSounds\trk\09 Carol of the Bells (Instrumental).wav");

            soundOut.Initialize(waveSource);

            acnOutput.Connect(new Physical.PixelRope(pixelsRoofEdge, 0, 50), 4, 1);
            acnOutput.Connect(new Physical.PixelRope(pixelsRoofEdge, 50, 100), 5, 1);
            acnOutput.Connect(new Physical.PixelRope(pixelsVideo, 0, 200), 1, 1);

            acnOutput.Connect(new Physical.GenericDimmer(lightStarExtra, 50), SacnUniverseDMX);

            acnOutput.Connect(new Physical.GenericDimmer(reindeer, 10), SacnUniverseDMX);
            acnOutput.Connect(new Physical.GenericDimmer(airSnowman, 11), SacnUniverseDMX);
            acnOutput.Connect(new Physical.GenericDimmer(airSanta, 12), SacnUniverseDMX);
            acnOutput.Connect(new Physical.GenericDimmer(airR2D2, 13), SacnUniverseDMX);

            acnOutput.Connect(new Physical.SmallRGBStrobe(lightBottom, 1), SacnUniverseDMX);
            acnOutput.Connect(new Physical.RGBStrobe(lightNote1, 60), SacnUniverseDMX);
            acnOutput.Connect(new Physical.RGBStrobe(lightNote2, 80), SacnUniverseDMX);
            acnOutput.Connect(new Physical.RGBStrobe(lightNote6, 40), SacnUniverseDMX);
            acnOutput.Connect(new Physical.RGBStrobe(lightNote10, 70), SacnUniverseDMX);
            acnOutput.Connect(new Physical.GenericDimmer(lightHat1, 1), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightHat2, 2), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightHat3, 3), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightHat4, 4), SacnUniverseRenard2);

            acnOutput.Connect(new Physical.GenericDimmer(lightNet4, 5), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet3, 6), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet1, 7), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet2, 8), SacnUniverseRenard2);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet5, 4), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet6, 5), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet7, 6), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet8, 7), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet9, 8), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightNet10, 9), SacnUniverseRenard1);
            acnOutput.Connect(new Physical.GenericDimmer(lightRWindow, 10), SacnUniverseRenard1);     // Metal reindeers

            acnOutput.Connect(new Physical.AmericanDJStrobe(lightGarage, 5), SacnUniverseDMX);

            acnOutput.Connect(new Physical.GenericDimmer(lightSanta, 1), SacnUniverseArduino);
            acnOutput.Connect(new Physical.GenericDimmer(lightSnowman, 2), SacnUniverseArduino);
            acnOutput.Connect(new Physical.GenericDimmer(snowmanKaggen, 2), SacnUniverseRenard1);

            this.lorImport.Progress.Subscribe(x =>
                {
                    long soundPos = waveSource.GetMilliseconds(waveSource.Position);

                    log.Trace("Sound pos: {0:N0}   Timeline pos: {1:N0}   Diff: {2:N0} ms",
                        soundPos, x, soundPos - x);
                });
        }