Example #1
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource());
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            throw new ArgumentException(
                      "The specified input can't be converted to a mono source. The input does not provide a WaveFormatExtensible.",
                      "input");
        }
Example #2
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource(input.WaveFormat.BitsPerSample));
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            Debug.WriteLine("MultiChannel stream with no ChannelMask.");

            WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();

            waveFormat.Channels = 1;
            return(new DmoResampler(input, waveFormat));
        }
Example #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="ChannelConversionSource"/> class.
        /// </summary>
        /// <param name="source">The <see cref="ISampleSource"/> which provides input data.</param>
        /// <param name="channelMatrix">The <see cref="ChannelMatrix"/> which defines the mapping of the input channels to the output channels.</param>
        public ChannelConversionSource(ISampleSource source, ChannelMatrix channelMatrix)
            : base(source)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (channelMatrix == null)
            {
                throw new ArgumentNullException("channelMatrix");
            }

            _channelMatrix = channelMatrix;
            _waveFormat    = channelMatrix.BuildOutputWaveFormat(source);

            _ratio = (float)_waveFormat.Channels / source.WaveFormat.Channels;
        }
Example #4
0
        public BasicAudioController(ISoundOut programSoundOut, int channels, int sampleRate)
        {
            mixer = new SoundMixer(channels, sampleRate)
            {
                FillWithZeros = true,
                DivideResult  = true
            };

            monoToStereoChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);
            monoToStereoChannelMatrix.SetMatrix(
                new[, ]
            {
                { 1.0f, 1.0f }
            });

            soundOut = programSoundOut;
        }
        public MIDIAudioController(ISoundOut programSoundOut)
        {
            frequencies = new List <double>();
            mixer       = new SoundMixer(2, sampleRate)
            {
                FillWithZeros = true,
                DivideResult  = true
            };

            monoToStereoChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);
            monoToStereoChannelMatrix.SetMatrix(
                new[, ]
            {
                { 1.0f, 1.0f }
            });

            soundOut = programSoundOut;
            midiKeys = new List <MidiKeyPlaying>();
        }
Example #6
0
        /// <summary>
        ///     Converts the specified wave source with n channels to a wave source with one channel.
        ///     Note: If the <paramref name="input" /> has two channels, the <see cref="ToMono(CSCore.IWaveSource)" /> extension
        ///     has to convert the <paramref name="input" /> to a <see cref="ISampleSource" /> and back to a
        ///     <see cref="IWaveSource" />.
        /// </summary>
        /// <param name="input">Already existing wave source.</param>
        /// <returns><see cref="IWaveSource" /> instance with one channel.</returns>
        public static IWaveSource ToMono(this IWaveSource input)
        {
            if (input == null)
            {
                throw new ArgumentNullException("input");
            }

            if (input.WaveFormat.Channels == 1)
            {
                return(input);
            }
            if (input.WaveFormat.Channels == 2)
            {
                return(new StereoToMonoSource(input.ToSampleSource()).ToWaveSource());
            }

            WaveFormatExtensible format = input.WaveFormat as WaveFormatExtensible;

            if (format != null)
            {
                ChannelMask   channelMask   = format.ChannelMask;
                ChannelMatrix channelMatrix = ChannelMatrix.GetMatrix(channelMask, ChannelMasks.MonoMask);
                return(new DmoChannelResampler(input, channelMatrix));
            }

            //throw new ArgumentException(
            //    "The specified input can't be converted to a mono source. The input does not provide a WaveFormatExtensible.",
            //    "input");

            Debug.WriteLine("MultiChannel stream with no ChannelMask.");

            WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();

            waveFormat.Channels = 1;
            return(new DmoResampler(input, waveFormat));
        }
Example #7
0
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats =
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate,    32, waveFormat.Channels,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   2,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   2,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   1,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   1,
                                                     AudioSubTypes.Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            throw new NotSupportedException("Could not find a supported format.");
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                                        ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                                        : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
Example #8
0
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            //check whether initial format is supported
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                //initial format is not supported -> maybe there was some kind of close match ...
                if (closestMatch == null)
                {
                    //no match ... check whether the format of the windows audio mixer is supported
                    //yes ... this gets executed for shared and exclusive mode streams
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        //mixformat is not supported
                        //start generating possible formats

                        mixformat = null;
                        WaveFormatExtensible[] possibleFormats;
                        if (_shareMode == AudioClientShareMode.Exclusive)
                        {
                            //for exclusive mode streams use the DeviceFormat of the initialized MMDevice
                            //as base for further possible formats
                            var deviceFormat = Device.DeviceFormat;

                            //generate some possible formats based on the samplerate of the DeviceFormat
                            possibleFormats = GetPossibleFormats(deviceFormat.SampleRate, deviceFormat.Channels);
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                //none of the tested formats were supported
                                //try some different samplerates
                                List <WaveFormatExtensible> waveFormats = new List <WaveFormatExtensible>();
                                foreach (var sampleRate in new[] { 44100, 48000, 96000, 192000 })
                                {
                                    waveFormats.AddRange(GetPossibleFormats(sampleRate, deviceFormat.Channels));
                                }

                                //assign the generated formats with samplerates 44.1kHz, 48kHz, 96kHz and 192kHz to
                                //the possibleFormats array which will be used below
                                possibleFormats = waveFormats.ToArray();
                            }
                        }
                        else
                        {
                            //for shared mode streams, generate some formats based on the initial waveFormat
                            possibleFormats = GetPossibleFormats(waveFormat.SampleRate, waveFormat.Channels);
                        }

                        if (mixformat == null)
                        {
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                    ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                    : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
Example #9
0
        static void Main(string[] args)
        {
            Console.WriteLine(
                "This example will mix one audio file with \n" +
                "two sine waves (300Hz and 700Hz).\n" +
                "The 300Hz sine wave will play only on the right\n" +
                "channel and the 700Hz sine wave only on the left channel." +
                "\n\n\nPlease select the audio file!\n"
                );

            IWaveSource fileWaveSource = null;

            do
            {
                OpenFileDialog openFileDialog = new OpenFileDialog()
                {
                    Title  = "Select any file to mix into",
                    Filter = CodecFactory.SupportedFilesFilterEn
                };

                if (openFileDialog.ShowDialog() == DialogResult.OK)
                {
                    try
                    {
                        fileWaveSource = CodecFactory.Instance.GetCodec(openFileDialog.FileName);
                    }
                    catch
                    {
                    }
                }
            } while (fileWaveSource == null);


            const int mixerSampleRate = 44100;              //44.1kHz

            var mixer = new SimpleMixer(2, mixerSampleRate) //output: stereo, 44,1kHz
            {
                FillWithZeros = true,
                DivideResult  = true //you may play around with this
            };

            var monoToLeftOnlyChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter,
                                                                ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);
            var monoToRightOnlyChannelMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter,
                                                                 ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);

            /*
             * Set the channel conversion matrix.
             * The y-axis specifies the input. This in only one channel since the SineGenerator only uses one channel.
             * The x-axis specifies the output. There we have to use two channels since we want stereo output.
             * The first value on the x-axis specifies the volume of the left channel, the second value
             * on the x-axis specifies the volume of the right channel.
             *
             * If we take look at the left only channel conversion matrix, we can see that we are mapping one channel (y-axis)
             * to two channels (x-axis). The left channel receives a volume of 1.0 (which means 100%) and the right channel
             * receives a volume of 0.0 (which means 0.0% -> muted).
             */
            monoToLeftOnlyChannelMatrix.SetMatrix(
                new[, ]
            {
                { 1.0f, 0.0f }
            });

            monoToRightOnlyChannelMatrix.SetMatrix(
                new[, ]
            {
                { 0.0f, 1.0f }
            });

            VolumeSource volumeSource1, volumeSource2;

            //Add any sound track.
            mixer.AddSource(
                fileWaveSource
                .ChangeSampleRate(mixerSampleRate)
                .ToStereo()
                .ToSampleSource());

            //Add a 700Hz sine with a amplitude of 0.5 which plays only on the left channel.
            mixer.AddSource(
                new SineGenerator(700, 0.5, 0).ToWaveSource()
                .AppendSource(x => new DmoChannelResampler(x, monoToLeftOnlyChannelMatrix, mixerSampleRate))
                .AppendSource(x => new VolumeSource(x.ToSampleSource()), out volumeSource1));

            //Add a 300Hz sine with a amplitude of 0.5 which plays only on the right channel.
            mixer.AddSource(
                new SineGenerator(300, 0.5, 0).ToWaveSource()
                .AppendSource(x => new DmoChannelResampler(x, monoToRightOnlyChannelMatrix, mixerSampleRate))
                .AppendSource(x => new VolumeSource(x.ToSampleSource()), out volumeSource2));

            //Initialize the soundout with the mixer.
            var soundOut = new WasapiOut()
            {
                Latency = 200
            };                                                //better use a quite high latency

            soundOut.Initialize(mixer.ToWaveSource());
            soundOut.Play();

            //adjust the volume of the input signals (default value is 100%):
            volumeSource1.Volume = 0.5f; //set the volume of the 700Hz sine to 50%
            volumeSource2.Volume = 0.7f; //set the volume of the 300Hz sine to 70%

            Console.ReadKey();

            mixer.Dispose();
            soundOut.Dispose();
        }
Example #10
0
        private void InitializeInternal()
        {
            Debug.WriteLine("Initialize, thread id: " + Thread.CurrentThread.ManagedThreadId);
            _callbackThread = null;
            var supportedFormats = new Queue <WaveFormat>(Device.SupportedFormats
                                                          .OrderBy(x => Math.Abs(x.SampleRate - _source.WaveFormat.SampleRate))
                                                          .ThenBy(x => Math.Abs(x.BitsPerSample - _source.WaveFormat.BitsPerSample))
                                                          .ThenBy(x => Math.Abs(x.Channels - _source.WaveFormat.Channels)));

            var finalFormat = _source.WaveFormat;

            do
            {
                try
                {
                    _waveOutHandle = CreateWaveOutHandle(finalFormat);
                }
                catch (MmException exception)
                {
                    if (exception.Result == MmResult.BadFormat && supportedFormats.Count > 0)
                    {
                        finalFormat = supportedFormats.Dequeue();
                    }
                    else if (exception.Result == MmResult.BadFormat && supportedFormats.Count == 0)
                    {
                        throw new Exception("No valid format could be found.", exception);
                    }
                    else
                    {
                        throw;
                    }
                }
            } while (_waveOutHandle == IntPtr.Zero);

            if (finalFormat != _source.WaveFormat)
            {
                //the original format of the source is not supported
                //we have to convert the source
                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                    ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                    : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source = resampler;
            }

            _failedBuffers.Clear();
            var bufferSize = (int)WaveSource.WaveFormat.MillisecondsToBytes(_latency);

            _buffers = new WaveOutBuffer[BufferCount];
            for (int i = 0; i < _buffers.Length; i++)
            {
                _buffers[i] = new WaveOutBuffer(_waveOutHandle, bufferSize, (IntPtr)i);
            }
        }