Ejemplo n.º 1
0
        public AACEncoder(WaveFormat sourceFormat, Stream targetStream, int defaultBitrate, Guid containerType)
        {
            if (sourceFormat == null)
                throw new ArgumentNullException("sourceForamt");

            if (targetStream == null)
                throw new ArgumentNullException("targetStream");
            if (!targetStream.CanWrite)
                throw new ArgumentException("Stream is not writeable.");

            if (defaultBitrate <= 0)
                throw new ArgumentOutOfRangeException("defaultBitrate");

            if (containerType == Guid.Empty)
                throw new ArgumentNullException("containerType");

            var targetMediaType = FindBestMediaType(MFMediaTypes.MFAudioFormat_AAC,
                sourceFormat.SampleRate, sourceFormat.Channels, defaultBitrate);

            if (targetMediaType == null)
                throw new NotSupportedException("No AAC-Encoder was found. Check whether your system supports AAC encoding.");

            var sourceMediaType = MediaFoundationCore.MediaTypeFromWaveFormat(sourceFormat);

            SetTargetStream(targetStream, sourceMediaType, targetMediaType, containerType);
        }
Ejemplo n.º 2
0
        // Full constructor takes Capture instance and specific values for
        // channels, bits, and samples.
        internal SoundFormat(Capture captureDevice, SampleRate rate, SampleSize size, short channels)
        {
            if (captureDevice == null)
            {
                throw new ArgumentNullException("captureDevice");
            }

            this._captureDevice = captureDevice;

            try
            {
                // Test the supplied format characteristics.
                this._currentFormat = ConstructFormat((int)rate, (short)size, (short)channels);
            }
            catch (Exception ex)
            {
                string errMsg =
                    string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.",
                        (int)rate, (short)size, (short)channels);
                throw new Exception(errMsg, ex);
            }

            this._channels = channels;
            this._bitsPerSample = (short)size;
            this._samplesPerSecond = (int)rate;
        }
        public void StartCapture(int sampleRate, Capture captureDevice)
        {
            StopCapture();
            EmptyRequest();

            this.sampleRate = sampleRate;
            readPos = 0;
            IsRecording = false;
            record = null;
            recordTime = 0;
            noRecordTime = 0;
            lastSample = null;
            lastSize = 0;

            capture = (captureDevice == null) ? new Capture() : captureDevice;

            WaveFormat waveFormat = new WaveFormat();// Load the sound 
            waveFormat.BitsPerSample = 16;
            waveFormat.BlockAlign = 2;
            waveFormat.Channels = 1;
            waveFormat.AverageBytesPerSecond = sampleRate * 2;
            waveFormat.SamplesPerSecond = sampleRate;
            waveFormat.FormatTag = WaveFormatTag.Pcm;

            CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription();
            captureBuffDesc.BufferBytes = bufferSize;
            captureBuffDesc.Format = waveFormat;

            captureBuffer = new CaptureBuffer(captureBuffDesc, capture);
            captureBuffer.Start(true);

            captureThread = new Thread(captureLoop);
            captureThread.Start();
            new Thread(EmptyRequest).Start();
        }
Ejemplo n.º 4
0
 public MonoToStereoSource(IWaveStream source)
     : base(source)
 {
     if (source.WaveFormat.Channels != 1)
         throw new ArgumentException("format of source has to be stereo(1 channel)", "source");
     _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 2, AudioEncoding.IeeeFloat);
 }
Ejemplo n.º 5
0
		public unsafe XAudio2Renderer()
		{
			waveFormat = new WaveFormat();
			waveFormat.FormatTag = WaveFormatTag.Pcm;
			xAudio = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.AnyProcessor);
			masteringVoice = new MasteringVoice(xAudio, 2, 44100);
		}
Ejemplo n.º 6
0
        public void WriteWave(Stream Stream, Action Writer, WaveFormat WaveFormat)
        {
            this.Stream = Stream;
            this.BinaryWriter = new BinaryWriter(Stream);

            WriteChunk("RIFF", () =>
            {
                Stream.Write(Encoding.ASCII.GetBytes("WAVE"), 0, 4);
                WriteChunk("fmt ", () =>
                {
                    //Stream.WriteStruct(WaveFormat);
                    var BinaryWriter = new BinaryWriter(Stream);
                    BinaryWriter.Write(WaveFormat.CompressionCode);
                    BinaryWriter.Write(WaveFormat.NumberOfChannels);
                    BinaryWriter.Write(WaveFormat.SampleRate);
                    BinaryWriter.Write(WaveFormat.BytesPerSecond);
                    BinaryWriter.Write(WaveFormat.BlockAlignment);
                    BinaryWriter.Write(WaveFormat.BitsPerSample);
                    BinaryWriter.Write(WaveFormat.Padding);
                });
                WriteChunk("data", () =>
                {
                    Writer();
                });
            });
        }
 public NormalSourceVoice(SoundSystem system, WaveFormat format)
 {
     this.system = system;
     sourceVoice = new SourceVoice(system.AudioDevice, format);
     sourceVoice.StreamEnd += new EventHandler(sourceVoice_StreamEnd);
     defaultOutputMatrix = sourceVoice.GetOutputMatrix(sourceVoice.VoiceDetails.InputChannels, system.DeviceDetails.OutputFormat.Channels);
 }
Ejemplo n.º 8
0
        public void CanPlayBuffers()
        {
            using (var dsound = CreateDirectSound8())
            {
                dsound.SetCooperativeLevel(DSUtils.GetDesktopWindow(), DSCooperativeLevelType.DSSCL_NORMAL);
                WaveFormat waveFormat = new WaveFormat(44100, 16, 2);
                using (var primaryBuffer = new DirectSoundPrimaryBuffer(dsound))
                using (var secondaryBuffer = new DirectSoundSecondaryBuffer(dsound, waveFormat, (int)waveFormat.MillisecondsToBytes(10000)))
                {
                    primaryBuffer.Play(DSBPlayFlags.DSBPLAY_LOOPING);
                    var caps = secondaryBuffer.BufferCaps;

                    var data = GenerateData(caps.dwBufferBytes / 2, waveFormat);

                    if (secondaryBuffer.Write(data, 0, data.Length))
                    {
                        secondaryBuffer.Play(DSBPlayFlags.DSBPLAY_LOOPING);
                    }
                    else
                    {
                        Assert.Fail("Could not write data.");
                    }
                    Thread.Sleep(1);
                }
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        ///     Creates a MediaType based on a given WaveFormat. Don't forget to call Free() for the returend MediaType.
        /// </summary>
        /// <param name="waveFormat">WaveFormat to create a MediaType from.</param>
        /// <returns>Dmo MediaType</returns>
        public static MediaType FromWaveFormat(WaveFormat waveFormat)
        {
            if (waveFormat == null)
                throw new ArgumentNullException("waveFormat");

            var mediaType = new MediaType();
            NativeMethods.MoInitMediaType(ref mediaType, Marshal.SizeOf(waveFormat));

            mediaType.MajorType = AudioSubTypes.MediaTypeAudio;
            mediaType.SubType = WaveFormatExtensible.SubTypeFromWaveFormat(waveFormat);
            mediaType.FixedSizeSamples = (mediaType.SubType == AudioSubTypes.IeeeFloat ||
                                          mediaType.SubType == AudioSubTypes.Pcm)
                ? 1
                : 0;
            mediaType.FormatType = FORMAT_WaveFormatEx;

            IntPtr hWaveFormat = Marshal.AllocHGlobal(Marshal.SizeOf(waveFormat));

            Marshal.StructureToPtr(waveFormat, hWaveFormat, false);

            if (hWaveFormat == IntPtr.Zero)
                throw new InvalidOperationException("hWaveFormat == IntPtr.Zero");
            if (mediaType.CbFormat < Marshal.SizeOf(waveFormat))
                throw new InvalidOperationException("No memory for Format reserved");
            mediaType.PtrFormat = hWaveFormat;

            return mediaType;
        }
Ejemplo n.º 10
0
 void InitDirectSound(IntPtr handle)
 {            
     //Create the device
     _SoundDevice = new DirectSound();
     _SoundDevice.SetCooperativeLevel(handle, CooperativeLevel.Priority);
     //Creat the wav format, it will be mono-44100-pcm-16bit
     //TODO: support more wave formats 
     WaveFormat wav = new WaveFormat();
     wav.FormatTag = WaveFormatTag.Pcm;
     wav.SamplesPerSecond = 44100;
     wav.Channels = 1;//mono
     wav.BitsPerSample = 16;
     wav.AverageBytesPerSecond = 88200;//wav.SamplesPerSecond * wav.Channels * (wav.BitsPerSample / 8);
     wav.BlockAlignment = 2;//(wfx.Channels * wfx.BitsPerSample / 8);
     BufferSize = 88200 * 5;
     //Description
     SoundBufferDescription des = new SoundBufferDescription();
     des.Format = wav;
     des.SizeInBytes = BufferSize;
     des.Flags = BufferFlags.GlobalFocus | BufferFlags.Software;
     //buffer
     buffer = new SecondarySoundBuffer(_SoundDevice, des);
     DATA = new byte[BufferSize];
     buffer.Play(0, PlayFlags.Looping);
     //channels
     InitChannels();
 }
Ejemplo n.º 11
0
        /// <summary>
        /// Initializes a new instance of the <see cref="DmoChannelResampler"/> class.
        /// </summary>
        /// <param name="source">Underlying source which has to get resampled.</param>
        /// <param name="channelMatrix"><see cref="ChannelMatrix" /> which defines how to map each channel.</param>
        /// <param name="outputFormat">Waveformat, which specifies the new format. Note, that by far not all formats are supported.</param>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// channelMatrix
        /// or
        /// outputFormat
        /// </exception>
        /// <exception cref="System.ArgumentException">The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.</exception>
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, WaveFormat outputFormat)
            : base(source, outputFormat)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (channelMatrix == null)
                throw new ArgumentNullException("channelMatrix");
            if(outputFormat == null)
                throw new ArgumentNullException("outputFormat");

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
            {
                throw new ArgumentException(
                    "The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.");
            }

            var inputFormat = new WaveFormatExtensible(
                source.WaveFormat.SampleRate,
                source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                channelMatrix.InputMask);

            Outputformat = new WaveFormatExtensible(
                outputFormat.SampleRate,
                outputFormat.BitsPerSample,
                outputFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(outputFormat),
                channelMatrix.OutputMask);

            Initialize(inputFormat, Outputformat);
            _channelMatrix = channelMatrix;
            CommitChannelMatrixChanges();
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Creates a new instance of <see cref="AcmFileWriter"/>.
        /// </summary>
        /// <param name="FileName">Path to the file to write.</param>
        /// <param name="Encoding"><see cref="WaveFormatTag"/> for written audio.</param>
        /// <param name="Format"><see cref="WaveFormat"/> of input audio.</param>
        public AcmFileWriter(string FileName, WaveFormatTag Encoding, WaveFormat Format)
        {
            if (FileName == null)
                throw new ArgumentNullException(nameof(FileName));

            _channel = GetDummyChannel(Format);
        
            // Get the Length of the ACMFormat structure
            var suggestedFormatLength = BassEnc.GetACMFormat(0);
            var acmFormat = Marshal.AllocHGlobal(suggestedFormatLength);

            try
            {
                // Retrieve ACMFormat and Init Encoding
                if (BassEnc.GetACMFormat(_channel,
                    acmFormat,
                    suggestedFormatLength,
                    null,
                    // If encoding is Unknown, then let the User choose encoding.
                    Encoding == WaveFormatTag.Unknown ? 0 : ACMFormatFlags.Suggest,
                    Encoding) != 0)
                    _handle = BassEnc.EncodeStartACM(_channel, acmFormat, 0, FileName);
                else throw new Exception(Bass.LastError.ToString());
            }
            finally
            {
                // Free the ACMFormat structure
                Marshal.FreeHGlobal(acmFormat);
            }
        }
Ejemplo n.º 13
0
        public void ini()
        {
            this.Text = this.Tag.ToString();
            file = this.Text;
            mWavFormat = SetWaveFormat();
            try
            {
                f.Hide();
                FileInfo de = new FileInfo(Path.GetDirectoryName(this.Text) + "\\" + Path.GetFileNameWithoutExtension(this.Text) + "T.wav");
                if (de.Exists)
                    de.Delete();
                CreateWaveFile(Path.GetDirectoryName(this.Text) + "\\" + Path.GetFileNameWithoutExtension(this.Text) + "T.wav");
                CreateCaptuerDevice();
                CreateCaptureBuffer();
                CreateNotification();

                if (File.Exists(Path.GetDirectoryName(file) + "//" + Path.GetFileNameWithoutExtension(file) + ".lrc"))
                {
                    lrc = new Lyrics(file);
                }
                wi = new WaveInfo(file);
                progressBar1.Maximum = (int)wi.Second;
            }
            catch (Exception ex)
            {
                MessageBox.Show("Error");
                f.Show();
                this.Close();
            }
        }
Ejemplo n.º 14
0
        public Sound(string filename, int ID, short type)
            : base(filename, ID)
        {
            // get the file data
            WaveFile wf = FileManager.Instance.Load(filename);

            if(wf.WavFile != null) // we have a wave file with headers
            {
                // set up the buffer properties
                soundDesc = new BufferDescription();
                soundDesc.GlobalFocus = false;
                soundDesc.ControlVolume = true;

                // enable 3D features for 3D sounds
                if(type == Sound.THREED_SOUND)
                {
                    soundDesc.Control3D = true;
                    soundDesc.Mute3DAtMaximumDistance = true;
                }

                // load the wave file from the stream into the buffer
                sound = new SecondaryBuffer(wf.WavFile, soundDesc, ((DirectSoundManager)SoundManager.Instance).Device);

            } else { // we have only raw PCM encoded sound data (usually from a decoder)

                // convert the format settings
                WaveFormat wfo = new WaveFormat();
                wfo.BitsPerSample = wf.Bits;
                wfo.Channels = wf.Channels;
                wfo.SamplesPerSecond = wf.Frequency;
                wfo.BlockAlign = (short)(wf.Bits*wf.Channels / 8);
                wfo.FormatTag = WaveFormatTag.Pcm;
                wfo.AverageBytesPerSecond = wf.Frequency * wfo.BlockAlign;

                // set up buffer properties
                soundDesc = new BufferDescription(wfo);
                soundDesc.GlobalFocus = false;
                soundDesc.ControlVolume = true;
                soundDesc.BufferBytes = (int)wf.Data.Length;

                // enable 3D features for 3D sounds
                if(type == Sound.THREED_SOUND)
                {
                    soundDesc.Control3D = true;
                    soundDesc.Mute3DAtMaximumDistance = true;
                }

                // initialise the buffer and copy the (raw data) stream into it
                sound = new SecondaryBuffer(soundDesc, ((DirectSoundManager)SoundManager.Instance).Device);
                sound.Write(0, wf.Data, (int)wf.Data.Length, LockFlag.EntireBuffer);
            }

            // create a 3D buffer for 3D sounds
            if(type == Sound.THREED_SOUND)
            {
                threeDsound = new Buffer3D(sound);
                threeDsound.Mode = Mode3D.Normal;
                threeDsound.Deferred = true;
            }
        }
Ejemplo n.º 15
0
 public static WaveFormat SuggestFormat(WaveFormat sourceFormat)
 {
     WaveFormat result = new WaveFormat(sourceFormat.SampleRate, 16, sourceFormat.Channels); //todo: 16bits fix
     AcmException.Try(AcmInterop.acmFormatSuggest(IntPtr.Zero, sourceFormat, result,
         Marshal.SizeOf(result), AcmFormatSuggestFlags.FormatTag), "acmFormatSuggest");
     return result;
 }
Ejemplo n.º 16
0
 /// <summary>
 /// Adds a new mixer input
 /// </summary>
 /// <param name="mixerInput">Mixer input</param>
 public void AddMixerInput(ISampleProvider mixerInput)
 {
     // we'll just call the lock around add since we are protecting against an AddMixerInput at
     // the same time as a Read, rather than two AddMixerInput calls at the same time
     lock (sources)
     {
         if (this.sources.Count >= maxInputs)
         {
             throw new InvalidOperationException("Too many mixer inputs");
         }
         this.sources.Add(mixerInput);
     }
     if (this.waveFormat == null)
     {
         this.waveFormat = mixerInput.WaveFormat;
     }
     else
     {
         if (this.WaveFormat.SampleRate != mixerInput.WaveFormat.SampleRate ||
             this.WaveFormat.Channels != mixerInput.WaveFormat.Channels)
         {
             throw new ArgumentException("All mixer inputs must have the same WaveFormat");
         }
     }
 }
Ejemplo n.º 17
0
        /// <summary>
        /// Initializes a new instance of the <see cref="DirectSoundSecondaryBuffer"/> class.
        /// </summary>
        /// <param name="directSound">A <see cref="DirectSoundBase"/> instance which provides the <see cref="DirectSoundBase.CreateSoundBuffer"/> method.</param>
        /// <param name="waveFormat">The <see cref="WaveFormat"/> of the sound buffer.</param>
        /// <param name="bufferSize">The buffer size. Internally, the <see cref="DSBufferDescription.BufferBytes"/> will be set to <paramref name="bufferSize"/> * 2.</param>
        /// <exception cref="System.ArgumentNullException"><paramref name="directSound"/> or <paramref name="waveFormat"/></exception>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="bufferSize"/> must be a value between 4 and 0x0FFFFFFF.</exception>
        public DirectSoundSecondaryBuffer(DirectSoundBase directSound, WaveFormat waveFormat, int bufferSize)
        {
            if (directSound == null)
                throw new ArgumentNullException("directSound");
            if (waveFormat == null)
                throw new ArgumentNullException("waveFormat");
            if(bufferSize < 4 || bufferSize > 0x0FFFFFFF)
                throw new ArgumentOutOfRangeException("bufferSize");

            DSBufferDescription secondaryBufferDesc = new DSBufferDescription()
            {
                BufferBytes = bufferSize,
                Flags = DSBufferCapsFlags.ControlFrequency | DSBufferCapsFlags.ControlPan |
                          DSBufferCapsFlags.ControlVolume | DSBufferCapsFlags.ControlPositionNotify |
                          DSBufferCapsFlags.GetCurrentPosition2 | DSBufferCapsFlags.GlobalFocus |
                          DSBufferCapsFlags.StickyFocus,
                Reserved = 0,
                Guid3DAlgorithm = Guid.Empty
            };

            secondaryBufferDesc.Size = Marshal.SizeOf(secondaryBufferDesc);
            GCHandle hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);
            try
            {
                secondaryBufferDesc.PtrFormat = hWaveFormat.AddrOfPinnedObject();
                //Create(directSound, secondaryBufferDesc);
                BasePtr = directSound.CreateSoundBuffer(secondaryBufferDesc, IntPtr.Zero);
            }
            finally
            {
                hWaveFormat.Free();
            }
        }
Ejemplo n.º 18
0
		public Sound(IntPtr handle, DirectSound device)
		{
			if (device != null)
			{
				device.SetCooperativeLevel(handle, CooperativeLevel.Priority);

				var format = new WaveFormat
					{
						SamplesPerSecond = 44100,
						BitsPerSample = 16,
						Channels = 2,
						FormatTag = WaveFormatTag.Pcm,
						BlockAlignment = 4
					};
				format.AverageBytesPerSecond = format.SamplesPerSecond * format.Channels * (format.BitsPerSample / 8);

				var desc = new SoundBufferDescription
					{
						Format = format,
						Flags =
							BufferFlags.GlobalFocus | BufferFlags.Software | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlVolume,
						SizeInBytes = BufferSize
					};
				DSoundBuffer = new SecondarySoundBuffer(device, desc);
				ChangeVolume(Global.Config.SoundVolume);
			}
			SoundBuffer = new byte[BufferSize];

			disposed = false;
		}
Ejemplo n.º 19
0
        protected override MediaObject CreateMediaObject(WaveFormat inputFormat, WaveFormat outputFormat)
        {
            _comObj = new DmoMP3DecoderObject();
            var mediaObject = new MediaObject(Marshal.GetComInterfaceForObject(_comObj, typeof(IMediaObject)));

            return mediaObject;
        }
Ejemplo n.º 20
0
        /// <summary>
        /// Initializes a new instance of the <see cref="SoundEffect"/> class.
        /// </summary>
        /// <param name="audioManager">The associated audio manager instance.</param>
        /// <param name="name">The name of the current instance.</param>
        /// <param name="waveFormat">The format of the current instance.</param>
        /// <param name="buffer">The buffer containing audio data.</param>
        /// <param name="decodedPacketsInfo">The information regaring decoded packets.</param>
        internal SoundEffect(AudioManager audioManager, string name, WaveFormat waveFormat, DataStream buffer, uint[] decodedPacketsInfo)
        {
            AudioManager = audioManager;
            Name = name;
            Format = waveFormat;
            AudioBuffer = new AudioBuffer
            {
                Stream = buffer,
                AudioBytes = (int)buffer.Length,
                Flags = BufferFlags.EndOfStream,
            };
            LoopedAudioBuffer = new AudioBuffer
            {
                Stream = buffer,
                AudioBytes = (int)buffer.Length,
                Flags = BufferFlags.EndOfStream,
                LoopCount = AudioBuffer.LoopInfinite,
            };

            DecodedPacketsInfo = decodedPacketsInfo;

            Duration = Format.SampleRate > 0 ? TimeSpan.FromMilliseconds(GetSamplesDuration() * 1000 / Format.SampleRate) : TimeSpan.Zero;

            children = new List<WeakReference>();
            VoicePool = AudioManager.InstancePool.GetVoicePool(Format);
        }
Ejemplo n.º 21
0
        public FMTChunk(BinaryReader reader)
            : base(reader)
        {
            if (reader == null) throw new ArgumentNullException("reader");

            if (ChunkID == chunkID) //"fmt "
            {
                AudioEncoding encoding = (AudioEncoding)reader.ReadInt16();
                int channels = reader.ReadInt16();
                int sampleRate = reader.ReadInt32();
                int avgBPS = reader.ReadInt32();
                int blockAlign = reader.ReadInt16();
                int bitsPerSample = reader.ReadInt16();

                int extraSize = 0;
                if (ChunkDataSize > 16)
                {
                    extraSize = reader.ReadInt16();
                    if (extraSize != ChunkDataSize - 18)
                        extraSize = ChunkDataSize - 18;

                    for (int i = ChunkDataSize - 16; i > 0; i--)
                    {
                        reader.ReadByte();
                    }

                    reader.BaseStream.Position -= 2;
                }

                _waveFormat = new WaveFormat(sampleRate, (short)bitsPerSample, (short)channels, encoding, extraSize);
            }
        }
Ejemplo n.º 22
0
        public Wa2Input(Stream file)
            : base(null)
        {
            var header = new byte[0x2C];
            if (header.Length != file.Read (header, 0, header.Length))
                throw new EndOfStreamException();
            if (!Binary.AsciiEqual (header, 8, "WAVEfmt "))
                throw new InvalidFormatException();

            var format = new WaveFormat();
            format.FormatTag                = LittleEndian.ToUInt16 (header, 0x14);
            format.Channels                 = LittleEndian.ToUInt16 (header, 0x16);
            format.SamplesPerSecond         = LittleEndian.ToUInt32 (header, 0x18);
            format.AverageBytesPerSecond    = LittleEndian.ToUInt32 (header, 0x1C);
            format.BlockAlign               = LittleEndian.ToUInt16 (header, 0x20);
            format.BitsPerSample            = LittleEndian.ToUInt16 (header, 0x22);
            format.ExtraSize                = 0;
            this.Format = format;

            uint pcm_size = LittleEndian.ToUInt32 (header, 0x28);
            var pcm = new byte[pcm_size];
            Decode (file, pcm);
            Source = new MemoryStream (pcm);
            this.PcmSize = pcm_size;
            file.Dispose();
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Create the writer indicating Metadata information
        /// </summary>
        /// <param name="output"><see cref="System.IO.Stream"/> Where resulting WMA string will be written</param>
        /// <param name="format">PCM format of input data received in <see cref="WmaWriter.Write"/> method</param>
        /// <param name="profile">IWMProfile that describe the resulting compressed stream</param>
        /// <param name="metadataAttributes">Array of <see cref="yeti.wma.structs.WM_Attr"/> structures describing the metadata information that will be in the result stream</param>
        public WmaWriter(Stream output, WaveFormat format, IWMProfile profile, IEnumerable<WM_Attr> metadataAttributes)
            : base(output, format)
        {
            m_Writer = WM.CreateWriter();
            var wa = (IWMWriterAdvanced)m_Writer;
            wa.AddSink((IWMWriterSink)this);
            m_Writer.SetProfile(profile);
            uint inputs;
            m_Writer.GetInputCount(out inputs);
            if (inputs == 1)
            {
                IWMInputMediaProps inpProps;
                Guid type;
                m_Writer.GetInputProps(0, out inpProps);
                inpProps.GetType(out type);
                if (type == MediaTypes.WMMEDIATYPE_Audio)
                {
                    WM_MEDIA_TYPE mt;
                    mt.majortype = MediaTypes.WMMEDIATYPE_Audio;
                    mt.subtype = MediaTypes.WMMEDIASUBTYPE_PCM;
                    mt.bFixedSizeSamples = true;
                    mt.bTemporalCompression = false;
                    mt.lSampleSize = (uint)m_InputDataFormat.nBlockAlign;
                    mt.formattype = MediaTypes.WMFORMAT_WaveFormatEx;
                    mt.pUnk = IntPtr.Zero;
                    mt.cbFormat = (uint)Marshal.SizeOf(m_InputDataFormat);

                    GCHandle h = GCHandle.Alloc(m_InputDataFormat, GCHandleType.Pinned);
                    try
                    {
                        mt.pbFormat = h.AddrOfPinnedObject();
                        inpProps.SetMediaType(ref mt);
                    }
                    finally
                    {
                        h.Free();
                    }
                    m_Writer.SetInputProps(0, inpProps);
                    if (metadataAttributes != null)
                    {
                        var info = new WMHeaderInfo((IWMHeaderInfo)m_Writer);
                        foreach (WM_Attr attr in metadataAttributes)
                        {
                            info.SetAttribute(attr);
                        }
                        info = null;
                    }
                    m_Writer.BeginWriting();
                    m_Profile = profile;
                }
                else
                {
                    throw new ArgumentException("Invalid profile", "profile");
                }
            }
            else
            {
                throw new ArgumentException("Invalid profile", "profile");
            }
        }
Ejemplo n.º 24
0
        /// <summary>
        /// Creates a new ACM stream to convert one format to another. Note that
        /// not all conversions can be done in one step
        /// </summary>
        /// <param name="sourceFormat">The source audio format</param>
        /// <param name="destFormat">The destination audio format</param>
        public AcmStream(WaveFormat sourceFormat, WaveFormat destFormat)
        {
            try
            {
                streamHandle = IntPtr.Zero;
                this.sourceFormat = sourceFormat;
                int sourceBufferSize = Math.Max(16384, sourceFormat.AverageBytesPerSecond);
                sourceBufferSize -= (sourceBufferSize % sourceFormat.BlockAlign);
                MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, IntPtr.Zero, sourceFormat, destFormat, null, 0, 0, AcmStreamOpenFlags.NonRealTime), "acmStreamOpen");
                
                // horrible stuff due to wierd Marshalling issues
                /*
                IntPtr sourceFormatPointer = WaveFormat.MarshalToPtr(sourceFormat);
                IntPtr destFormatPointer = WaveFormat.MarshalToPtr(destFormat);
                MmResult result = AcmInterop.acmStreamOpen2(out streamHandle, IntPtr.Zero, sourceFormatPointer, destFormatPointer, null, 0, 0, AcmStreamOpenFlags.NonRealTime);
                Marshal.FreeHGlobal(sourceFormatPointer);
                Marshal.FreeHGlobal(destFormatPointer);
                MmException.Try(result, "acmStreamOpen");*/

                streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, SourceToDest(sourceBufferSize));
                driverHandle = IntPtr.Zero;
            }
            catch
            {
                // suppress the finalise and clean up resources
                Dispose();
                throw;
            }
        }
Ejemplo n.º 25
0
        public SoundPlayer(Control owner, PullAudio pullAudio, string sample, short channels)
        {
            if (sample == null || File.Exists(sample) == false)
                return;
            this.channels = channels;
            this.pullAudio = pullAudio;
            this.samplefile = sample;
            this._owner = owner;

            this.soundDevice = new Device();
            this.soundDevice.SetCooperativeLevel(_owner, CooperativeLevel.Priority);

            // Set up our wave format to 44,100Hz, with 16 bit resolution
            WaveFormat wf = new WaveFormat();
            wf.FormatTag = WaveFormatTag.Pcm;
            wf.SamplesPerSecond = 44100;
            wf.BitsPerSample = 16;
            wf.Channels = channels;
            wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8);
            wf.AverageBytesPerSecond = wf.SamplesPerSecond * wf.BlockAlign;

            this.samplesPerUpdate = 512;

            // Create a buffer with 2 seconds of sample data
            BufferDescription bufferDesc = new BufferDescription();
            bufferDesc.BufferBytes = this.samplesPerUpdate * wf.BlockAlign * 2;
            bufferDesc.ControlPositionNotify = true;
            bufferDesc.GlobalFocus = true;
            bufferDesc.ControlFrequency = true;
            bufferDesc.ControlEffects = true;
            bufferDesc.ControlVolume = true;

            this.soundBuffer = new SecondaryBuffer(samplefile, bufferDesc, this.soundDevice);
            this.soundBuffer.Volume = 0;

            Notify notify = new Notify(this.soundBuffer);
            fillEvent[0] = new AutoResetEvent(false);
            fillEvent[1] = new AutoResetEvent(false);

            // Set up two notification events, one at halfway, and one at the end of the buffer
            BufferPositionNotify[] posNotify = new BufferPositionNotify[2];
            posNotify[0] = new BufferPositionNotify();
            posNotify[0].Offset = bufferDesc.BufferBytes / 2 - 1;
            posNotify[0].EventNotifyHandle = fillEvent[0].Handle;
            posNotify[1] = new BufferPositionNotify();
            posNotify[1].Offset = bufferDesc.BufferBytes - 1;
            posNotify[1].EventNotifyHandle = fillEvent[1].Handle;

            notify.SetNotificationPositions(posNotify);

            this.thread = new Thread(new ThreadStart(SoundPlayback));
            this.thread.Priority = ThreadPriority.Lowest;
            this.thread.IsBackground = true;

            this.Pause();
            this.running = true;

            this.thread.Start();
        }
Ejemplo n.º 26
0
		private void ReadExtendedFormatFmtData(BinaryReader reader)
		{
			extensionSamplesPerBlock = reader.ReadInt16();
			reader.ReadInt32();
			WaveFormat extFormat = (WaveFormat)reader.ReadInt16();
			waveFormat = waveFormat < 0 ? extFormat : waveFormat;
			reader.ReadBytes(14);
		}
Ejemplo n.º 27
0
 private static void Decode2Pcm(ref IntPtr format, ref byte[] data, ref WaveFormat wf)
 {
     IntPtr newFormat = AudioCompressionManager.GetCompatibleFormat(format, AudioCompressionManager.PcmFormatTag);
     byte[] buffer = AudioCompressionManager.Convert(format, newFormat, data, false);
     wf = AudioCompressionManager.GetWaveFormat(newFormat);
     format = newFormat;
     data = buffer;
 }
Ejemplo n.º 28
0
 public WaveInWindow(IntPtr windowHandle, WaveFormat waveFormat)
     : this(waveFormat)
 {
     if (windowHandle == IntPtr.Zero)
         throw new ArgumentException("windowHandle is zero", "windowHandle");
     _window = new WaveWindow(new MMInterops.WaveCallback(Callback));
     ((WaveWindow)_window).AssignHandle(windowHandle);
 }
Ejemplo n.º 29
0
 private static void Resample(ref IntPtr format, ref byte[] data, ref WaveFormat wf, int samplesPerSec)
 {
     IntPtr newFormat = AudioCompressionManager.GetPcmFormat(wf.nChannels, wf.wBitsPerSample, samplesPerSec);
     byte[] buffer = AudioCompressionManager.Convert(format, newFormat, data, false);
     format = newFormat;
     wf = AudioCompressionManager.GetWaveFormat(newFormat);
     data = buffer;
 }
        /// <summary>
        /// Selects the sample convertor based on the input WaveFormat and the output ASIOSampleTtype.
        /// </summary>
        /// <param name="waveFormat">The wave format.</param>
        /// <param name="asioType">The type.</param>
        /// <returns></returns>
        public static SampleConvertor SelectSampleConvertor(WaveFormat waveFormat, ASIOSampleType asioType)
        {
            SampleConvertor convertor = null;
            bool is2Channels = waveFormat.Channels == 2;

            // TODO : IMPLEMENTS OTHER CONVERTOR TYPES
            switch (asioType)
            {
                case ASIOSampleType.ASIOSTInt32LSB:
                    switch (waveFormat.BitsPerSample)
                    {
                        case 16:
                            convertor = (is2Channels) ? (SampleConvertor)ConvertorShortToInt2Channels : (SampleConvertor)ConvertorShortToIntGeneric;
                            break;
                        case 32:
                            convertor = (is2Channels) ? (SampleConvertor)ConvertorFloatToInt2Channels : (SampleConvertor)ConvertorFloatToIntGeneric;
                            break;
                    }
                    break;
                case ASIOSampleType.ASIOSTInt16LSB:
                    switch (waveFormat.BitsPerSample)
                    {
                        case 16:
                            convertor = (is2Channels) ? (SampleConvertor)ConvertorShortToShort2Channels : (SampleConvertor)ConvertorShortToShortGeneric;
                            break;
                        case 32:
                            convertor = (is2Channels) ? (SampleConvertor)ConvertorFloatToShort2Channels : (SampleConvertor)ConvertorFloatToShortGeneric;
                            break;
                    }
                    break;
                case ASIOSampleType.ASIOSTInt24LSB:                    
                    switch (waveFormat.BitsPerSample)
                    {
                        case 16:
                            throw new ArgumentException("Not a supported conversion");
                        case 32:
                            convertor = ConverterFloatTo24LSBGeneric;
                            break;
                    }
                    break;
                case ASIOSampleType.ASIOSTFloat32LSB:
                    switch (waveFormat.BitsPerSample)
                    {
                        case 16:
                            throw new ArgumentException("Not a supported conversion");
                        case 32:
                            convertor = ConverterFloatToFloatGeneric;
                            break;
                    }
                    break;

                default:
                    throw new ArgumentException(
                        String.Format("ASIO Buffer Type {0} is not yet supported.",
                                      Enum.GetName(typeof(ASIOSampleType), asioType)));
            }
            return convertor;
        }
Ejemplo n.º 31
0
 private void CreateVoice(WaveFormat waveFormat)
 {
     AudioVoice = new AudioVoice(AudioEngine, this, waveFormat);
 }
Ejemplo n.º 32
0
 public SilenceDecoder(FrameFormat frameFormat)
 {
     _frameSize = (int)frameFormat.FrameSize;
     _format    = frameFormat.WaveFormat;
 }
Ejemplo n.º 33
0
        public void Start()
        {
            if (_sensor != null)
            {
                Stop();
            }

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected && _uniqueKinectId == potentialSensor.UniqueKinectId)
                {
                    _sensor = potentialSensor;
                    break;
                }
            }
            if (_sensor == null)
            {
                Logger.LogMessage("Sensor not found: " + _uniqueKinectId, "KinectStream");
                return;
            }


            if (_skeleton)
            {
                _sensor.SkeletonStream.Enable();
                _sensor.SkeletonFrameReady += SensorSkeletonFrameReady;
            }

            switch (StreamMode)
            {
            case 0:    //color
                _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _sensor.ColorFrameReady += SensorColorFrameReady;
                break;

            case 1:    //depth
                _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
                _sensor.DepthFrameReady += SensorDepthFrameReady;
                // Allocate space to put the depth pixels we'll receive
                _depthPixels = new short[_sensor.DepthStream.FramePixelDataLength];
                // Allocate space to put the color pixels we'll create
                _colorPixels = new byte[_sensor.DepthStream.FramePixelDataLength * sizeof(int)];
                break;

            case 2:    //infrared
                _sensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30);
                _sensor.ColorFrameReady += SensorColorFrameReady;
                break;
            }


            // Start the sensor
            try
            {
                _sensor.Start();
                _audioStream = _sensor.AudioSource.Start();

                RecordingFormat = new WaveFormat(16000, 16, 1);

                _waveProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)
                };


                _sampleChannel = new SampleChannel(_waveProvider);
                _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                    HasAudioStream = null;
                }

                _res = ReasonToFinishPlaying.DeviceLost;

                // create and start new thread
                _thread = new Thread(AudioThread)
                {
                    Name = "kinect audio", IsBackground = true
                };
                _thread.Start();
            }
            catch (Exception ex)//IOException)
            {
                Logger.LogException(ex, "KinectStream");
                _sensor = null;
            }
        }
Ejemplo n.º 34
0
 // ncrunch: no coverage start
 // Justification: we don't want to load the webrtc preprocessing DLL into tests so we're faking a preprocessor in a derived test class)
 [NotNull] protected virtual IPreprocessingPipeline CreatePreprocessor([NotNull] WaveFormat format)
 {
     return(new WebRtcPreprocessingPipeline(format, _isMobilePlatform));
 }
Ejemplo n.º 35
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="WaveWriter" /> class.
 /// </summary>
 /// <param name="fileName">Filename of the destination file. This filename should typically end with the .wav extension.</param>
 /// <param name="waveFormat">
 ///     Format of the waveform-audio data. Note that the <see cref="WaveWriter" /> won't convert any
 ///     data.
 /// </param>
 public WaveWriter(string fileName, WaveFormat waveFormat)
     : this(File.OpenWrite(fileName), waveFormat)
 {
     _disposeStream = false;
 }
Ejemplo n.º 36
0
        /// <summary>
        /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
        /// <param name="latency">Latency of the capture specified in milliseconds.</param>
        /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
        /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
        /// <param name="synchronizationContext">The <see cref="SynchronizationContext"/> to use to fire events on.</param>
        /// <exception cref="PlatformNotSupportedException">The current platform does not support Wasapi. For more details see: <see cref="IsSupportedOnCurrentPlatform"/>.</exception>
        /// <exception cref="ArgumentException">The <paramref name="eventSync"/> parameter is set to true while the <paramref name="shareMode"/> is set to <see cref="AudioClientShareMode.Exclusive"/>.</exception>
        public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat,
                             ThreadPriority captureThreadPriority, SynchronizationContext synchronizationContext)
        {
            if (!IsSupportedOnCurrentPlatform)
            {
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            }
            if (eventSync && shareMode == AudioClientShareMode.Exclusive)
            {
                throw new ArgumentException("Don't use eventSync in combination with exclusive mode.");
            }

            _eventSync  = eventSync;
            _shareMode  = shareMode;
            _waveFormat = defaultFormat;

            _latency = latency;
            _captureThreadPriority  = captureThreadPriority;
            _synchronizationContext = synchronizationContext;

            _recordingState = RecordingState.Stopped;
        }
Ejemplo n.º 37
0
 public NullWaveStream(WaveFormat format, long length)
 {
     WaveFormat  = format;
     this.length = length;
 }
Ejemplo n.º 38
0
 public MasterAdapter(ISignalSource source)
 {
     waveFormat  = WaveFormat.CreateIeeeFloatWaveFormat(44100, 1);
     this.source = source;
 }
Ejemplo n.º 39
0
 public AcmChatCodec(WaveFormat recordFormat, WaveFormat encodeFormat)
 {
     this.RecordFormat = recordFormat;
     this.encodeFormat = encodeFormat;
 }
Ejemplo n.º 40
0
        /// <summary>
        ///   Worker thread.
        /// </summary>
        ///
        private void WorkerThread()
        {
            // Get the selected capture device
            DirectSoundCapture captureDevice = new DirectSoundCapture(device);


            // Set the capture format
            var        bitsPerSample = Signal.GetSampleSize(sampleFormat);
            WaveFormat format        = WaveFormat.CreateCustomFormat(sampleFormat.ToWaveFormat(), sampleRate, 1,
                                                                     sampleRate * bitsPerSample / 8, bitsPerSample / 8, bitsPerSample);

            // Setup the capture buffer
            CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription();

            captureBufferDescription.Format      = format;
            captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlign;
            captureBufferDescription.Flags      |= CaptureBufferCapabilitiesFlags.WaveMapped;
            captureBufferDescription.Flags      &= ~CaptureBufferCapabilitiesFlags.ControlEffects;

            CaptureBuffer captureBuffer = null;

            NotificationPosition[] notifications = new NotificationPosition[2];

            try
            {
                captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription);

                // Setup the notification positions
                int bufferPortionSize = captureBuffer.Capabilities.BufferBytes / 2;
                notifications[0]            = new NotificationPosition();
                notifications[0].Offset     = bufferPortionSize - 1;
                notifications[0].WaitHandle = new AutoResetEvent(false);
                notifications[1]            = new NotificationPosition();
                notifications[1].Offset     = bufferPortionSize - 1 + bufferPortionSize;
                notifications[1].WaitHandle = new AutoResetEvent(false);
                captureBuffer.SetNotificationPositions(notifications);

                // Make a copy of the wait handles
                WaitHandle[] waitHandles = new WaitHandle[notifications.Length];
                for (int i = 0; i < notifications.Length; i++)
                {
                    waitHandles[i] = notifications[i].WaitHandle;
                }



                // Start capturing
                captureBuffer.Start(true);


                if (sampleFormat == SampleFormat.Format32BitIeeeFloat)
                {
                    float[] currentSample = new float[desiredCaptureSize];

                    while (!stopEvent.WaitOne(0, true))
                    {
                        int bufferPortionIndex = WaitHandle.WaitAny(waitHandles);
                        captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None);
                        OnNewFrame(currentSample);
                    }
                }
                else if (sampleFormat == SampleFormat.Format16Bit)
                {
                    short[] currentSample = new short[desiredCaptureSize];

                    while (!stopEvent.WaitOne(0, true))
                    {
                        int bufferPortionIndex = WaitHandle.WaitAny(waitHandles);
                        captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None);
                        OnNewFrame(currentSample);
                    }
                }
            }
            catch (Exception ex)
            {
                if (AudioSourceError != null)
                {
                    AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message));
                }
                else
                {
                    throw;
                }
            }
            finally
            {
                if (captureBuffer != null)
                {
                    captureBuffer.Stop();
                    captureBuffer.Dispose();
                }

                if (captureDevice != null)
                {
                    captureDevice.Dispose();
                }

#if !NETSTANDARD1_4
                for (int i = 0; i < notifications.Length; i++)
                {
                    if (notifications[i].WaitHandle != null)
                    {
                        notifications[i].WaitHandle.Close();
                    }
                }
#endif
            }
        }
Ejemplo n.º 41
0
        /// <summary>
        /// Selects the sample convertor based on the input WaveFormat and the output ASIOSampleTtype.
        /// </summary>
        /// <param name="waveFormat">The wave format.</param>
        /// <param name="asioType">The type.</param>
        /// <returns></returns>
        public static SampleConvertor SelectSampleConvertor(WaveFormat waveFormat, AsioSampleType asioType)
        {
            SampleConvertor convertor   = null;
            bool            is2Channels = waveFormat.Channels == 2;

            switch (asioType)
            {
            case AsioSampleType.Int32LSB:
                switch (waveFormat.BitsPerSample)
                {
                case 16:
                    convertor = (is2Channels) ? (SampleConvertor)ConvertorShortToInt2Channels : (SampleConvertor)ConvertorShortToIntGeneric;
                    break;

                case 32:
                    convertor = (is2Channels) ? (SampleConvertor)ConvertorFloatToInt2Channels : (SampleConvertor)ConvertorFloatToIntGeneric;
                    break;
                }
                break;

            case AsioSampleType.Int16LSB:
                switch (waveFormat.BitsPerSample)
                {
                case 16:
                    convertor = (is2Channels) ? (SampleConvertor)ConvertorShortToShort2Channels : (SampleConvertor)ConvertorShortToShortGeneric;
                    break;

                case 32:
                    convertor = (is2Channels) ? (SampleConvertor)ConvertorFloatToShort2Channels : (SampleConvertor)ConvertorFloatToShortGeneric;
                    break;
                }
                break;

            case AsioSampleType.Int24LSB:
                switch (waveFormat.BitsPerSample)
                {
                case 16:
                    throw new ArgumentException("Not a supported conversion");

                case 32:
                    convertor = ConverterFloatTo24LSBGeneric;
                    break;
                }
                break;

            case AsioSampleType.Float32LSB:
                switch (waveFormat.BitsPerSample)
                {
                case 16:
                    throw new ArgumentException("Not a supported conversion");

                case 32:
                    convertor = ConverterFloatToFloatGeneric;
                    break;
                }
                break;

            default:
                throw new ArgumentException(
                          String.Format("ASIO Buffer Type {0} is not yet supported.",
                                        Enum.GetName(typeof(AsioSampleType), asioType)));
            }
            return(convertor);
        }
Ejemplo n.º 42
0
 public void CanRequestIfFormatIsSupportedIeee()
 {
     GetAudioClient().IsFormatSupported(AudioClientShareMode.Shared, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2));
 }
Ejemplo n.º 43
0
        private void InitializeInternal()
        {
            var defaultFormat = _waveFormat;

            _audioClient = AudioClient.FromMMDevice(Device);

            /*if (_shareMode == AudioClientShareMode.Exclusive)
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }
             * else
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }*/
            _waveFormat = _waveFormat ?? _audioClient.MixFormat;

            _waveFormat = SetupWaveFormat(_waveFormat, _audioClient);

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None | GetStreamFlags(), _latency * ReftimesPerMillisecond, 0, _waveFormat, Guid.Empty);
            }
            else
            {
                if (_shareMode == AudioClientShareMode.Exclusive)
                {
                    try
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), _latency * ReftimesPerMillisecond, _latency * ReftimesPerMillisecond, _waveFormat, Guid.Empty);
                    }
                    catch (CoreAudioAPIException e)
                    {
                        if (e.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                        {
                            int bufferSize = _audioClient.BufferSize;
                            _audioClient.Dispose();
                            long hnsRequestedDuration = (long)(((double)ReftimesPerMillisecond * 1000 / _waveFormat.SampleRate * bufferSize) + 0.5);
                            _audioClient = AudioClient.FromMMDevice(Device);
                            if (defaultFormat == null)
                            {
                                _waveFormat = _audioClient.MixFormat;
                            }
                            _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), hnsRequestedDuration, hnsRequestedDuration, _waveFormat, Guid.Empty);
                        }
                    }
                }
                else
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), 0, 0, _waveFormat, Guid.Empty);
                    if (_audioClient.StreamLatency > 0)
                    {
                        _latency = (int)(_audioClient.StreamLatency / ReftimesPerMillisecond);
                    }
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _audioCaptureClient = AudioCaptureClient.FromAudioClient(_audioClient);
        }
Ejemplo n.º 44
0
        public void TestWriteSamplesToWaveFile()
        {
            using (var stream = new MemoryStream())
            {
                const int mono   = 1;
                var       writer = new Mock <WaveFileWriter>(MockBehavior.Loose, stream, WaveFormat.CreateIeeeFloatWaveFormat(5512, mono));
                naudioFactory.Setup(factory => factory.GetWriter("path-to-audio-file", 5512, mono))
                .Returns(writer.Object);
                const int songLengthInFloats = 16;
                float[]   samples            = GenerateRandomFloatArray(songLengthInFloats);
                writer.Setup(w => w.Close());

                waveFileUtility.WriteSamplesToFile(samples, 5512, "path-to-audio-file");

                var readSamples = GetWrittenSamplesInStream(stream, songLengthInFloats);
                CollectionAssert.AreEqual(samples, readSamples);
            }
        }
Ejemplo n.º 45
0
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class. SynchronizationContext = null.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 /// <param name="latency">Latency of the capture specified in milliseconds.</param>
 /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
 /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat, ThreadPriority captureThreadPriority)
     : this(eventSync, shareMode, latency, defaultFormat, captureThreadPriority, null)
 {
 }
 /// <summary>
 /// Initialises a new instance of the WASAPI capture class
 /// </summary>
 /// <param name="captureDevice">Capture device to use</param>
 public WasapiCapture(MMDevice captureDevice)
 {
     this.audioClient = captureDevice.AudioClient;
     this.waveFormat  = audioClient.MixFormat;
 }
Ejemplo n.º 47
0
 /// <summary>
 /// Creates a new <see cref="MFMediaType"/> based on a specified <paramref name="waveFormat"/>.
 /// </summary>
 /// <param name="waveFormat"><see cref="WaveFormat"/> which should be "converted" to a <see cref="MFMediaType"/>.</param>
 /// <returns>Returns a new <see cref="MFMediaType"/>.</returns>
 public static MFMediaType FromWaveFormat(WaveFormat waveFormat)
 {
     MediaFoundationCore.Startup();
     return(MediaFoundationCore.MediaTypeFromWaveFormat(waveFormat));
 }
Ejemplo n.º 48
0
 // Token: 0x06000855 RID: 2133 RVA: 0x00017EB7 File Offset: 0x000160B7
 public bool Supports(WaveFormat waveFormat)
 {
     return(waveFormat.Encoding == WaveFormatEncoding.Pcm && waveFormat.BitsPerSample == 16 && waveFormat.Channels == 2);
 }
Ejemplo n.º 49
0
 protected AcmChatCodec(WaveFormat recordFormat, WaveFormat encodeFormat)
 {
     RecordFormat      = recordFormat;
     this.encodeFormat = encodeFormat;
 }
Ejemplo n.º 50
0
        private void Init(WaveFormat OutputFormat)

        {
            m_OutputNumber = GetAudioOutputNumber(m_Reader);

            if (m_OutputNumber == InvalidOuput)

            {
                throw new ArgumentException("An audio stream was not found");
            }

            int[] FormatIndexes = GetPCMOutputNumbers(m_Reader, (uint)m_OutputNumber);

            if (FormatIndexes.Length == 0)

            {
                throw new ArgumentException("An audio stream was not found");
            }

            if (OutputFormat != null)

            {
                m_OutputFormatNumber = -1;

                for (int i = 0; i < FormatIndexes.Length; i++)

                {
                    WaveFormat fmt = GetOutputFormat(m_Reader, (uint)m_OutputNumber, (uint)FormatIndexes[i]);

                    if ((fmt.wFormatTag == OutputFormat.wFormatTag) &&
                        (fmt.nAvgBytesPerSec == OutputFormat.nAvgBytesPerSec) &&
                        (fmt.nBlockAlign == OutputFormat.nBlockAlign) &&
                        (fmt.nChannels == OutputFormat.nChannels) &&
                        (fmt.nSamplesPerSec == OutputFormat.nSamplesPerSec) &&
                        (fmt.wBitsPerSample == OutputFormat.wBitsPerSample))

                    {
                        m_OutputFormatNumber = FormatIndexes[i];

                        m_OutputFormat = fmt;

                        break;
                    }
                }

                if (m_OutputFormatNumber < 0)

                {
                    throw new ArgumentException("No PCM output found");
                }
            }

            else

            {
                m_OutputFormatNumber = FormatIndexes[0];

                m_OutputFormat = GetOutputFormat(m_Reader, (uint)m_OutputNumber, (uint)FormatIndexes[0]);
            }

            uint OutputCtns = 0;

            m_Reader.GetOutputCount(out OutputCtns);

            ushort[] StreamNumbers = new ushort[OutputCtns];

            WMT_STREAM_SELECTION[] StreamSelections = new WMT_STREAM_SELECTION[OutputCtns];

            for (uint i = 0; i < OutputCtns; i++)

            {
                m_Reader.GetStreamNumberForOutput(i, out StreamNumbers[i]);

                if (i == m_OutputNumber)

                {
                    StreamSelections[i] = WMT_STREAM_SELECTION.WMT_ON;

                    m_OuputStream = StreamNumbers[i];

                    m_Reader.SetReadStreamSamples(m_OuputStream, false);
                }

                else

                {
                    StreamSelections[i] = WMT_STREAM_SELECTION.WMT_OFF;
                }
            }

            m_Reader.SetStreamsSelected((ushort)OutputCtns, StreamNumbers, StreamSelections);

            IWMOutputMediaProps Props = null;

            m_Reader.GetOutputFormat((uint)m_OutputNumber, (uint)m_OutputFormatNumber, out Props);

            m_Reader.SetOutputProps((uint)m_OutputNumber, Props);


            uint Size = 0;

            Props.GetMediaType(IntPtr.Zero, ref Size);

            IntPtr buffer = Marshal.AllocCoTaskMem((int)Size);

            try

            {
                WM_MEDIA_TYPE mt;

                Props.GetMediaType(buffer, ref Size);

                mt = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));

                m_SampleSize = mt.lSampleSize;
            }

            finally

            {
                Marshal.FreeCoTaskMem(buffer);

                Props = null;
            }


            m_Seekable = false;

            m_Length = -1;

            WMHeaderInfo head = new WMHeaderInfo(HeaderInfo);

            try

            {
                m_Seekable = (bool)head[WM.g_wszWMSeekable];

                m_Length = SampleTime2BytePosition((ulong)head[WM.g_wszWMDuration]);
            }

            catch (COMException e)

            {
                if (e.ErrorCode != WM.ASF_E_NOTFOUND)

                {
                    throw (e);
                }
            }
        }
        public AudioWriter(Stream Output, WaveFormat InputDataFormat)
            : base(Output, Encoding.ASCII)

        {
            m_InputDataFormat = InputDataFormat;
        }
Ejemplo n.º 52
0
        public PropertiesForm(WaveFormat waveFormat)
        {
            InitializeComponent();

            propertyGrid_.SelectedObject = waveFormat;
        }
Ejemplo n.º 53
0
 /// <summary>
 /// Allows you to specify the sample rate and channels for this WaveProvider
 /// (should be initialised before you pass it to a wave player)
 /// </summary>
 public void SetWaveFormat(int sampleRate, int channels)
 {
     this.waveFormat = new WaveFormat(sampleRate, 16, channels);
 }
Ejemplo n.º 54
0
        /// ------------------------------------------------------------------------------------
        public static WaveFileReader ConvertToStandardPcmStream(string inputMediaFile,
                                                                string outputAudioFile, WaveFormat preferredOutputFormat, out Exception error)
        {
            try
            {
                error = null;
                string errorMsg;

                if (CheckConversionIsPossible(outputAudioFile, false, out errorMsg))
                {
                    if (DoPcmConversion(inputMediaFile, outputAudioFile, preferredOutputFormat))
                    {
                        return(new WaveFileReader(outputAudioFile));
                    }

                    errorMsg = LocalizationManager.GetString("SoundFileUtils.FileMayNotBeValidAudioError",
                                                             "No audio track could be found in the specified file. Verify that the file is a valid audio file.");
                }

                var msg = LocalizationManager.GetString("SoundFileUtils.ExtractingAudioError",
                                                        "There was an error extracting audio from the media file '{0}'\r\n\r\n{1}",
                                                        "Second parameter is the error message.");

                error = new Exception(string.Format(msg, inputMediaFile, errorMsg));
            }
            catch (Exception e)
            {
                error = e;
            }

            return(null);
        }
Ejemplo n.º 55
0
 /// <summary>
 /// Create a Mp3Writer with the default MP3 format
 /// </summary>
 /// <param name="output">Stream that will hold the MP3 resulting data</param>
 /// <param name="inputDataFormat">PCM format of input data</param>
 public Mp3Writer(Stream output, WaveFormat inputDataFormat)
     : this(output, inputDataFormat, new BE_CONFIG(inputDataFormat))
 {
 }
Ejemplo n.º 56
0
 private bool CheckForSupportedFormat(AudioClient audioClient, IEnumerable <WaveFormatExtensible> waveFormats, out WaveFormat foundMatch)
 {
     foundMatch = null;
     foreach (var format in waveFormats)
     {
         if (audioClient.IsFormatSupported(_shareMode, format))
         {
             foundMatch = format;
             return(true);
         }
     }
     return(false);
 }
Ejemplo n.º 57
0
            public SilenceSource(WaveFormat format)
            {
                WaveFormat = format;

                _memoryStream = new MemoryStream(new byte[format.MillisecondsToBytes(10)], false);
            }
Ejemplo n.º 58
0
 public SampleProcessor(WaveFormat waveFormat)
 {
     _waveFormat = waveFormat;
 }
Ejemplo n.º 59
0
            private void StreamMP3_New(object state)
            {
                Thread.CurrentThread.Name = state.ToString();
                string url = (string)state;

                byte[] buffer = new byte[16384 * 4];

                Dictionary <int, IMp3FrameDecompressor> Decompressors = new Dictionary <int, IMp3FrameDecompressor>();
                WaveFormat outputFormat = new WaveFormat(44100, 16, 2);

                this.bufferedWaveProvider = new BufferedWaveProvider(outputFormat);
                this.bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(2);

//                WaveToSampleProvider wav2sample = new WaveToSampleProvider(this.bufferedWaveProvider);

                ISampleProvider sampleProvider = new Pcm16BitToSampleProvider(this.bufferedWaveProvider);

                SampleAggregator sa = new SampleAggregator(128);

                sa.NotificationCount = 882;
                sa.PerformFFT        = true;
                sa.FftCalculated    += sa_FftCalculated;
                NotifyingSampleProvider notifyProvider = new NotifyingSampleProvider(sampleProvider);

                notifyProvider.Sample += (a, b) => sa.Add(b.Left);

                volumeProvider = new VolumeSampleProvider(notifyProvider);
                //volumeProvider = new SampleChannel(this.bufferedWaveProvider, true);
                volumeProvider.Volume = 0.0f;
                //volumeProvider.PreVolumeMeter += waveChannel_PreVolumeMeter;

                for (int j = 0; j < 5; ++j)
                {
                    try {
                        using (IWavePlayer waveOut = new WaveOut()) {
                            waveOut.PlaybackStopped += waveOut_PlaybackStopped;
                            waveOut.Init(volumeProvider);

                            using (var readFullyStream = new ShoutcastStream(url)) {
                                waveOut.Play();
                                if (OnStartPlay != null)
                                {
                                    OnStartPlay(this);
                                }

                                do
                                {
                                    if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4)
                                    {
                                        int x = 0;
                                        while (playbackState != StreamingPlaybackState.Stopped && x < 5)
                                        {
                                            x++;
                                            Thread.Sleep(50);
                                        }
                                    }
                                    else
                                    {
                                        Mp3Frame frame = Mp3Frame.LoadFromStream(readFullyStream, true);

                                        if (currentTrack != readFullyStream.StreamTitle)
                                        {
                                            currentTrack = readFullyStream.StreamTitle;
                                            if (!string.IsNullOrEmpty(currentTrack))
                                            {
                                                ThreadPool.QueueUserWorkItem(Search, currentTrack);
                                            }
                                            else
                                            {
                                                CurrentTrack = null;
                                                if (OnNewTrack != null)
                                                {
                                                    OnNewTrack(this, null);
                                                }
                                            }
                                        }

                                        IMp3FrameDecompressor dec;
                                        if (!Decompressors.TryGetValue(frame.SampleRate, out dec))
                                        {
                                            WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate,
                                                                                      frame.ChannelMode == ChannelMode.Mono ? 1 : 2,
                                                                                      frame.FrameLength, frame.BitRate);

                                            var suggFromat = AcmStream.SuggestPcmFormat(waveFormat);

                                            dec = new VbrAcmMp3FrameDecompressor(waveFormat, outputFormat);
                                            Decompressors[frame.SampleRate] = dec;
                                        }


                                        int decompressed = dec.DecompressFrame(frame, buffer, 0);
                                        bufferedWaveProvider.AddSamples(buffer, 0, decompressed);
                                    }
                                } while (playbackState != StreamingPlaybackState.Stopped);

                                waveOut.Stop();
                            }
                        }

                        return;
                    } catch (Exception exe) {
                        int x = 0;
                        while (playbackState != StreamingPlaybackState.Stopped && x < 20)
                        {
                            x++;
                            Thread.Sleep(50);
                        }
                        if (playbackState == StreamingPlaybackState.Stopped)
                        {
                            return;
                        }
                    } finally {
                        foreach (var dc in Decompressors)
                        {
                            dc.Value.Dispose();
                        }

                        Decompressors.Clear();
                    }
                }

                if (OnError != null)
                {
                    OnError(this);
                }
            }
Ejemplo n.º 60
0
 private void InitMixers()
 {
     _clientAudioMixer           = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(OUTPUT_SAMPLE_RATE, 2));
     _clientAudioMixer.ReadFully = true;
 }