Ejemplo n.º 1
0
		/// <summary>
		/// Creates a new Wave input stream
		/// </summary>
		/// <param name="deviceNumber">The device to open - 0 is default</param>
		/// <param name="desiredFormat">The PCM format to record in</param>
		/// <param name="callbackWindow">If this parameter is non-null, the Wave In Messages
		/// will be sent to the message loop of the supplied control. This is considered a
		/// safer way to use the waveIn functionality</param>
		public WaveInStream(int deviceNumber, WaveFormat desiredFormat, Control callbackWindow)
		{
			waveFormat = desiredFormat;
			callback = Callback;
			if (callbackWindow == null)
			{
				MmException.Try(
					WaveInterop.waveInOpen(out waveInHandle, (IntPtr) deviceNumber, desiredFormat, callback, IntPtr.Zero,
					                       WaveInterop.CallbackFunction), "waveInOpen");
			}
			else
			{
				waveInWindow = new WaveWindowNative(callback);
				MmException.Try(
					WaveInterop.waveInOpenWindow(out waveInHandle, (IntPtr) deviceNumber, desiredFormat, callbackWindow.Handle,
					                             IntPtr.Zero, WaveInterop.CallbackWindow), "waveInOpen");
				waveInWindow.AssignHandle(callbackWindow.Handle);
			}

			// Default to three buffers of 100ms each
			int bufferSize = desiredFormat.AverageBytesPerSecond/10;
			numBuffers = 3;

			buffers = new WaveInBuffer[numBuffers];
			for (int n = 0; n < numBuffers; n++)
			{
				buffers[n] = new WaveInBuffer(waveInHandle, bufferSize);
			}
		}
Ejemplo n.º 2
0
		/// <summary>
		/// WaveStream to resample using the DMO Resampler
		/// </summary>
		/// <param name="inputProvider">Input Stream</param>
		/// <param name="outputFormat">Desired Output Format</param>
		public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat)
		{
			this.inputProvider = inputProvider;
			inputStream = inputProvider as WaveStream;
			this.outputFormat = outputFormat;
			resampler = new Resampler();
			if (!resampler.MediaObject.SupportsInputWaveFormat(0, inputStream.WaveFormat))
			{
				throw new ArgumentException("Unsupported Input Stream format", "inputStream");
			}

			resampler.MediaObject.SetInputWaveFormat(0, inputStream.WaveFormat);
			if (!resampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat))
			{
				throw new ArgumentException("Unsupported Output Stream format", "outputStream");
			}

			resampler.MediaObject.SetOutputWaveFormat(0, outputFormat);
			if (inputStream != null)
			{
				position = InputToOutputPosition(inputStream.Position);
			}
			inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond);
			outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond);
		}
Ejemplo n.º 3
0
		/// <summary>
		/// Creates a new 32 bit WaveMixerStream
		/// </summary>
		public WaveMixerStream32()
		{
			autoStop = true;
			waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2);
			bytesPerSample = 4;
			inputStreams = new List<WaveStream>();
		}
Ejemplo n.º 4
0
		/// <summary>
		/// Prepares a Wave input device for recording
		/// </summary>
		public WaveIn(WaveCallbackInfo callbackInfo)
		{
			DeviceNumber = 0;
			WaveFormat = new WaveFormat(8000, 16, 1);
			BufferMilliseconds = 100;
			NumberOfBuffers = 3;
			callback = Callback;
			this.callbackInfo = callbackInfo;
			callbackInfo.Connect(callback);
		}
Ejemplo n.º 5
0
		/// <summary>
		/// Create a new WaveFormat conversion stream
		/// </summary>
		/// <param name="targetFormat">Desired output format</param>
		/// <param name="sourceStream">Source stream</param>
		public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream)
		{
			this.sourceStream = sourceStream;
			this.targetFormat = targetFormat;

			conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat);
			// work out how many bytes the entire input stream will convert to
			length = SourceToDest((int) sourceStream.Length);
			blockAlign = SourceToDest(sourceStream.BlockAlign);
			position = 0;
		}
Ejemplo n.º 6
0
		/// <summary>
		/// Creates a new ACM stream to convert one format to another, using a 
		/// specified driver identified and wave filter
		/// </summary>
		/// <param name="driverId">the driver identifier</param>
		/// <param name="sourceFormat">the source format</param>
		/// <param name="waveFilter">the wave filter</param>
		public AcmStream(IntPtr driverId, WaveFormat sourceFormat, WaveFilter waveFilter)
		{
			int sourceBufferSize = Math.Max(16384, sourceFormat.AverageBytesPerSecond);
			this.sourceFormat = sourceFormat;
			sourceBufferSize -= (sourceBufferSize%sourceFormat.BlockAlign);
			MmException.Try(AcmInterop.acmDriverOpen(out driverHandle, driverId, 0), "acmDriverOpen");
			MmException.Try(AcmInterop.acmStreamOpen(out streamHandle, driverHandle,
			                                         sourceFormat, sourceFormat, waveFilter, IntPtr.Zero, IntPtr.Zero,
			                                         AcmStreamOpenFlags.NonRealTime), "acmStreamOpen");
			streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, SourceToDest(sourceBufferSize));
		}
Ejemplo n.º 7
0
		/// <summary>
		/// Creates a new WaveFloatTo16Provider
		/// </summary>
		/// <param name="sourceProvider">the source provider</param>
		public WaveFloatTo16Provider(IWaveProvider sourceProvider)
		{
			if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
				throw new ApplicationException("Only PCM supported");
			if (sourceProvider.WaveFormat.BitsPerSample != 32)
				throw new ApplicationException("Only 32 bit audio supported");

			waveFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 16, sourceProvider.WaveFormat.Channels);

			this.sourceProvider = sourceProvider;
			volume = 1.0f;
		}
Ejemplo n.º 8
0
		/// <summary>
		/// Creates a new Wave32To16Stream
		/// </summary>
		/// <param name="sourceStream">the source stream</param>
		public Wave32To16Stream(WaveStream sourceStream)
		{
			if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
				throw new ApplicationException("Only 32 bit Floating point supported");
			if (sourceStream.WaveFormat.BitsPerSample != 32)
				throw new ApplicationException("Only 32 bit Floating point supported");

			waveFormat = new WaveFormat(sourceStream.WaveFormat.SampleRate, 16, sourceStream.WaveFormat.Channels);
			volume = 1.0f;
			this.sourceStream = sourceStream;
			length = sourceStream.Length/2;
			position = sourceStream.Position/2;
		}
Ejemplo n.º 9
0
		/// <summary>
		/// Creates a new Wave16toFloatProvider
		/// </summary>
		/// <param name="sourceProvider">the source provider</param>
		public Wave16ToFloatProvider(IWaveProvider sourceProvider)
		{
			if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
				throw new ApplicationException("Only PCM supported");
			if (sourceProvider.WaveFormat.BitsPerSample != 16)
				throw new ApplicationException("Only 16 bit audio supported");

			waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceProvider.WaveFormat.SampleRate,
			                                                  sourceProvider.WaveFormat.Channels);

			this.sourceProvider = sourceProvider;
			volume = 1.0f;
		}
Ejemplo n.º 10
0
		/// <summary>
		/// Creates a WaveFormat with custom members
		/// </summary>
		/// <param name="tag">The encoding</param>
		/// <param name="sampleRate">Sample Rate</param>
		/// <param name="channels">Number of channels</param>
		/// <param name="averageBytesPerSecond">Average Bytes Per Second</param>
		/// <param name="blockAlign">Block Align</param>
		/// <param name="bitsPerSample">Bits Per Sample</param>
		/// <returns></returns>
		public static WaveFormat CreateCustomFormat(WaveFormatEncoding tag, int sampleRate, int channels,
		                                            int averageBytesPerSecond, int blockAlign, int bitsPerSample)
		{
			var waveFormat = new WaveFormat();
			waveFormat.waveFormatTag = tag;
			waveFormat.channels = (short) channels;
			waveFormat.sampleRate = sampleRate;
			waveFormat.averageBytesPerSecond = averageBytesPerSecond;
			waveFormat.blockAlign = (short) blockAlign;
			waveFormat.bitsPerSample = (short) bitsPerSample;
			waveFormat.extraSize = 0;
			return waveFormat;
		}
Ejemplo n.º 11
0
		/// <summary>
		/// WaveFileWriter that actually writes to a stream
		/// </summary>
		/// <param name="outStream">Stream to be written to</param>
		/// <param name="format">Wave format to use</param>
		public WaveFileWriter(Stream outStream, WaveFormat format)
		{
			this.outStream = outStream;
			writer = new BinaryWriter(outStream, Encoding.ASCII);
			writer.Write(Encoding.ASCII.GetBytes("RIFF"));
			writer.Write(0); // placeholder
			writer.Write(Encoding.ASCII.GetBytes("WAVEfmt "));
			this.format = format;

			format.Serialize(writer);

			CreateFactChunk();

			WriteDataChunkHeader();
		}
Ejemplo n.º 12
0
		internal MmResult WaveOutOpen(out IntPtr waveOutHandle, int deviceNumber, WaveFormat waveFormat,
		                              WaveInterop.WaveCallback callback)
		{
			MmResult result;
			if (Strategy == WaveCallbackStrategy.FunctionCallback)
			{
				result = WaveInterop.waveOutOpen(out waveOutHandle, (IntPtr) deviceNumber, waveFormat, callback, IntPtr.Zero,
				                                 WaveInterop.CallbackFunction);
			}
			else
			{
				result = WaveInterop.waveOutOpenWindow(out waveOutHandle, (IntPtr) deviceNumber, waveFormat, Handle, IntPtr.Zero,
				                                       WaveInterop.CallbackWindow);
			}
			return result;
		}
Ejemplo n.º 13
0
		/// <summary>
		/// Creates a new mono waveprovider based on a stereo input
		/// </summary>
		/// <param name="sourceProvider">Stereo 16 bit PCM input</param>
		public StereoToMonoProvider16(IWaveProvider sourceProvider)
		{
			if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
			{
				throw new ArgumentException("Source must be PCM");
			}
			if (sourceProvider.WaveFormat.Channels != 2)
			{
				throw new ArgumentException("Source must be stereo");
			}
			if (sourceProvider.WaveFormat.BitsPerSample != 16)
			{
				throw new ArgumentException("Source must be 16 bit");
			}
			this.sourceProvider = sourceProvider;
			outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 1);
		}
Ejemplo n.º 14
0
		/// <summary>
		/// Selects the sample convertor based on the input WaveFormat and the output ASIOSampleTtype.
		/// </summary>
		/// <param name="waveFormat">The wave format.</param>
		/// <param name="asioType">The type.</param>
		/// <returns></returns>
		public static SampleConvertor SelectSampleConvertor(WaveFormat waveFormat, ASIOSampleType asioType)
		{
			SampleConvertor convertor = null;
			bool is2Channels = waveFormat.Channels == 2;

			if (waveFormat.BitsPerSample != 16 && waveFormat.BitsPerSample != 32)
			{
				throw new ArgumentException(String.Format("WaveFormat BitsPerSample {0} is not yet supported",
				                                          waveFormat.BitsPerSample));
			}

			// TODO : IMPLEMENTS OTHER CONVERTOR TYPES
			switch (asioType)
			{
				case ASIOSampleType.ASIOSTInt32LSB:
					switch (waveFormat.BitsPerSample)
					{
						case 16:
							convertor = (is2Channels) ? ConvertorShortToInt2Channels : (SampleConvertor) ConvertorShortToIntGeneric;
							break;
						case 32:
							convertor = (is2Channels) ? ConvertorFloatToInt2Channels : (SampleConvertor) ConvertorFloatToIntGeneric;
							break;
					}
					break;
				case ASIOSampleType.ASIOSTInt16LSB:
					switch (waveFormat.BitsPerSample)
					{
						case 16:
							convertor = (is2Channels) ? ConvertorShortToShort2Channels : (SampleConvertor) ConvertorShortToShortGeneric;
							break;
						case 32:
							convertor = (is2Channels) ? ConvertorFloatToShort2Channels : (SampleConvertor) ConvertorFloatToShortGeneric;
							break;
					}
					break;
				default:
					throw new ArgumentException(
						String.Format("ASIO Buffer Type {0} is not yet supported. ASIO Int32 buffer is only supported.",
						              Enum.GetName(typeof (ASIOSampleType), asioType)));
			}
			return convertor;
		}
Ejemplo n.º 15
0
		/// <summary>
		/// Initializes a new instance of the DMO MP3 Frame decompressor
		/// </summary>
		/// <param name="sourceFormat"></param>
		public DmoMp3FrameDecompressor(WaveFormat sourceFormat)
		{
			mp3Decoder = new WindowsMediaMp3Decoder();
			if (!mp3Decoder.MediaObject.SupportsInputWaveFormat(0, sourceFormat))
			{
				throw new ArgumentException("Unsupported input format");
			}
			mp3Decoder.MediaObject.SetInputWaveFormat(0, sourceFormat);
			pcmFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.Channels); // 16 bit
			if (!mp3Decoder.MediaObject.SupportsOutputWaveFormat(0, pcmFormat))
			{
				throw new ArgumentException(String.Format("Unsupported output format {0}", pcmFormat));
			}
			mp3Decoder.MediaObject.SetOutputWaveFormat(0, pcmFormat);

			// a second is more than enough to decompress a frame at a time
			inputMediaBuffer = new MediaBuffer(sourceFormat.AverageBytesPerSecond);
			outputBuffer = new DmoOutputDataBuffer(pcmFormat.AverageBytesPerSecond);
		}
Ejemplo n.º 16
0
		/// <summary>
		/// Creates a new stereo waveprovider based on a mono input
		/// </summary>
		/// <param name="sourceProvider">Mono 16 bit PCM input</param>
		public MonoToStereoProvider16(IWaveProvider sourceProvider)
		{
			if (sourceProvider.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
			{
				throw new ArgumentException("Source must be PCM");
			}
			if (sourceProvider.WaveFormat.Channels != 1)
			{
				throw new ArgumentException("Source must be Mono");
			}
			if (sourceProvider.WaveFormat.BitsPerSample != 16)
			{
				throw new ArgumentException("Source must be 16 bit");
			}
			this.sourceProvider = sourceProvider;
			outputFormat = new WaveFormat(sourceProvider.WaveFormat.SampleRate, 2);
			RightVolume = 1.0f;
			LeftVolume = 1.0f;
		}
Ejemplo n.º 17
0
		/// <summary>
		/// Creates a new WaveChannel32
		/// </summary>
		/// <param name="sourceStream">the source stream</param>
		/// <param name="volume">stream volume (1 is 0dB)</param>
		/// <param name="pan">pan control (-1 to 1)</param>
		public WaveChannel32(WaveStream sourceStream, float volume, float pan)
		{
			PadWithZeroes = true;
			if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm)
				throw new ApplicationException("Only PCM supported");
			if (sourceStream.WaveFormat.BitsPerSample != 16)
				throw new ApplicationException("Only 16 bit audio supported");

			// always outputs stereo 32 bit
			waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, 2);
			destBytesPerSample = 8; // includes stereo factoring

			this.sourceStream = sourceStream;
			this.volume = volume;
			this.pan = pan;
			sourceBytesPerSample = sourceStream.WaveFormat.Channels*sourceStream.WaveFormat.BitsPerSample/8;

			length = SourceToDest(sourceStream.Length);
			position = 0;
		}
Ejemplo n.º 18
0
		/// <summary>
		/// Creates a new ACM stream to convert one format to another. Note that
		/// not all conversions can be done in one step
		/// </summary>
		/// <param name="sourceFormat">The source audio format</param>
		/// <param name="destFormat">The destination audio format</param>
		public AcmStream(WaveFormat sourceFormat, WaveFormat destFormat)
		{
			try
			{
				streamHandle = IntPtr.Zero;
				this.sourceFormat = sourceFormat;
				int sourceBufferSize = Math.Max(65536, sourceFormat.AverageBytesPerSecond);
				sourceBufferSize -= (sourceBufferSize%sourceFormat.BlockAlign);
				MmException.Try(
					AcmInterop.acmStreamOpen(out streamHandle, IntPtr.Zero, sourceFormat, destFormat, null, IntPtr.Zero, IntPtr.Zero,
					                         AcmStreamOpenFlags.NonRealTime), "acmStreamOpen");

				int destBufferSize = SourceToDest(sourceBufferSize);
				streamHeader = new AcmStreamHeader(streamHandle, sourceBufferSize, destBufferSize);
				driverHandle = IntPtr.Zero;
			}
			catch
			{
				// suppress the finalise and clean up resources
				Dispose();
				throw;
			}
		}
Ejemplo n.º 19
0
		/// <summary>
		/// Initialize for playing the specified wave stream
		/// </summary>
		/// <param name="waveStream">IWaveProvider to play</param>
		public void Init(IWaveProvider waveStream)
		{
			long latencyRefTimes = latencyMilliseconds*10000;
			outputFormat = waveStream.WaveFormat;
			// first attempt uses the WaveFormat from the WaveStream
			WaveFormatExtensible closestSampleRateFormat;
			if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
			{
				// Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
				// See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
				// They say : "In shared mode, the audio engine always supports the mix format"
				// The MixFormat is more likely to be a WaveFormatExtensible.
				if (closestSampleRateFormat == null)
				{
					WaveFormat correctSampleRateFormat = audioClient.MixFormat;
					/*WaveFormat.CreateIeeeFloatWaveFormat(
                        audioClient.MixFormat.SampleRate,
                        audioClient.MixFormat.Channels);*/

					if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
					{
						// Iterate from Worst to Best Format
						WaveFormatExtensible[] bestToWorstFormats = {
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 32,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 24,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 16,
						                                            		outputFormat.Channels),
						                                            };

						// Check from best Format to worst format ( Float32, Int24, Int16 )
						for (int i = 0; i < bestToWorstFormats.Length; i++)
						{
							correctSampleRateFormat = bestToWorstFormats[i];
							if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								break;
							}
							correctSampleRateFormat = null;
						}

						// If still null, then test on the PCM16, 2 channels
						if (correctSampleRateFormat == null)
						{
							// Last Last Last Chance (Thanks WASAPI)
							correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
							if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								throw new NotSupportedException("Can't find a supported format to use");
							}
						}
					}
					outputFormat = correctSampleRateFormat;
				}
				else
				{
					outputFormat = closestSampleRateFormat;
				}

				// just check that we can make it.
				using (new ResamplerDmoStream(waveStream, outputFormat))
				{
				}
				dmoResamplerNeeded = true;
			}
			else
			{
				dmoResamplerNeeded = false;
			}
			sourceStream = waveStream;

			// If using EventSync, setup is specific with shareMode
			if (isUsingEventSync)
			{
				// Init Shared or Exclusive
				if (shareMode == AudioClientShareMode.Shared)
				{
					// With EventCallBack and Shared, both latencies must be set to 0
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
					                       outputFormat, Guid.Empty);

					// Get back the effective latency from AudioClient
					latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
				}
				else
				{
					// With EventCallBack and Exclusive, both latencies must equals
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
					                       outputFormat, Guid.Empty);
				}

				// Create the Wait Event Handle
				frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
				audioClient.SetEventHandle(frameEventWaitHandle);
			}
			else
			{
				// Normal setup for both sharedMode
				audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
				                       outputFormat, Guid.Empty);
			}

			// Get the RenderClient
			renderClient = audioClient.AudioRenderClient;
		}
Ejemplo n.º 20
0
		/// <summary>
		/// Writes a wave file, including a cues chunk
		/// </summary>
		public CueWaveFileWriter(string fileName, WaveFormat waveFormat)
			: base(fileName, waveFormat)
		{
		}
Ejemplo n.º 21
0
		/// <summary>
		/// Opens MP3 from a stream rather than a file
		/// Will not dispose of this stream itself
		/// </summary>
		/// <param name="inputStream"></param>
		public Mp3FileReader(Stream inputStream)
		{
			// Calculated as a double to minimize rounding errors
			double bitRate;

			mp3Stream = inputStream;
			id3v2Tag = Id3v2Tag.ReadTag(mp3Stream);

			dataStartPosition = mp3Stream.Position;
			var mp3Frame = new Mp3Frame(mp3Stream);
			sampleRate = mp3Frame.SampleRate;
			frameLengthInBytes = mp3Frame.FrameLength;
			bitRate = mp3Frame.BitRate;
			xingHeader = XingHeader.LoadXingHeader(mp3Frame);
			// If the header exists, we can skip over it when decoding the rest of the file
			if (xingHeader != null) dataStartPosition = mp3Stream.Position;

			mp3DataLength = mp3Stream.Length - dataStartPosition;

			// try for an ID3v1 tag as well
			mp3Stream.Position = mp3Stream.Length - 128;
			var tag = new byte[128];
			mp3Stream.Read(tag, 0, 3);
			if (tag[0] == 'T' && tag[1] == 'A' && tag[2] == 'G')
			{
				id3v1Tag = tag;
				mp3DataLength -= 128;
			}

			mp3Stream.Position = dataStartPosition;

			CreateTableOfContents();
			tocIndex = 0;

			// [Bit rate in Kilobits/sec] = [Length in kbits] / [time in seconds] 
			//                            = [Length in bits ] / [time in milliseconds]

			// Note: in audio, 1 kilobit = 1000 bits.
			bitRate = (mp3DataLength*8.0/TotalSeconds());

			mp3Stream.Position = dataStartPosition;

			Mp3WaveFormat = new Mp3WaveFormat(sampleRate, mp3Frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frameLengthInBytes,
			                                  (int) bitRate);
			decompressor = new AcmMp3FrameDecompressor(Mp3WaveFormat); // new DmoMp3FrameDecompressor(this.Mp3WaveFormat); 
			waveFormat = decompressor.OutputFormat;
			bytesPerSample = (decompressor.OutputFormat.BitsPerSample)/8*decompressor.OutputFormat.Channels;
			// no MP3 frames have more than 1152 samples in them
			// some MP3s I seem to get double
			decompressBuffer = new byte[1152*bytesPerSample*2];
		}
Ejemplo n.º 22
0
		/// <summary>
		/// Creates a new buffered WaveProvider
		/// </summary>
		/// <param name="waveFormat">WaveFormat</param>
		public BufferedWaveProvider(WaveFormat waveFormat)
		{
			this.waveFormat = waveFormat;
			queue = new Queue<AudioBuffer>();
			MaxQueuedBuffers = 100;
		}
Ejemplo n.º 23
0
		/// <summary>
		/// Initialises to play
		/// </summary>
		/// <param name="waveProvider"></param>
		public void Init(IWaveProvider waveProvider)
		{
			sourceStream = waveProvider;
			waveFormat = waveProvider.WaveFormat;

			// Select the correct sample convertor from WaveFormat -> ASIOFormat
			convertor = ASIOSampleConvertor.SelectSampleConvertor(waveFormat, driver.Capabilities.OutputChannelInfos[0].type);

			if (!driver.IsSampleRateSupported(waveFormat.SampleRate))
			{
				throw new ArgumentException("SampleRate is not supported. TODO, implement Resampler");
			}
			if (driver.Capabilities.SampleRate != waveFormat.SampleRate)
			{
				driver.SetSampleRate(waveFormat.SampleRate);
			}

			// Plug the callback
			driver.FillBufferCalback = driver_BufferUpdate;

			// Used Prefered size of ASIO Buffer
			nbSamples = driver.CreateBuffers(waveFormat.Channels, false);

			// make a buffer big enough to read enough from the sourceStream to fill the ASIO buffers
			waveBuffer = new byte[nbSamples*waveFormat.Channels*waveFormat.BitsPerSample/8];
		}
Ejemplo n.º 24
0
		/// <summary>
		/// Set output type to the specified wave format
		/// n.b. may need to set input type first
		/// </summary>
		/// <param name="outputStreamIndex">Output stream index</param>
		/// <param name="waveFormat">Wave format</param>
		public void SetOutputWaveFormat(int outputStreamIndex, WaveFormat waveFormat)
		{
			DmoMediaType mediaType = CreateDmoMediaTypeForWaveFormat(waveFormat);
			bool succeeded = SetOutputType(outputStreamIndex, mediaType, DmoSetTypeFlags.None);
			DmoInterop.MoFreeMediaType(ref mediaType);
			if (!succeeded)
			{
				throw new ArgumentException("Media Type not supported");
			}
		}
Ejemplo n.º 25
0
		/// <summary>
		/// Tests if the specified Wave Format is supported for output
		/// n.b. may need to set the input type first
		/// </summary>
		/// <param name="outputStreamIndex">Output stream index</param>
		/// <param name="waveFormat">Wave format</param>
		/// <returns>True if supported</returns>
		public bool SupportsOutputWaveFormat(int outputStreamIndex, WaveFormat waveFormat)
		{
			DmoMediaType mediaType = CreateDmoMediaTypeForWaveFormat(waveFormat);
			bool supported = SetOutputType(outputStreamIndex, mediaType, DmoSetTypeFlags.DMO_SET_TYPEF_TEST_ONLY);
			DmoInterop.MoFreeMediaType(ref mediaType);
			return supported;
		}
Ejemplo n.º 26
0
		/// <summary>
		/// Add a new input to the mixer
		/// </summary>
		/// <param name="waveStream">The wave input to add</param>
		public void AddInputStream(WaveStream waveStream)
		{
			if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
				throw new ArgumentException("Must be IEEE floating point", "waveStream.WaveFormat");
			if (waveStream.WaveFormat.BitsPerSample != 32)
				throw new ArgumentException("Only 32 bit audio currently supported", "waveStream.WaveFormat");

			if (inputStreams.Count == 0)
			{
				// first one - set the format
				int sampleRate = waveStream.WaveFormat.SampleRate;
				int channels = waveStream.WaveFormat.Channels;
				waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
			}
			else
			{
				if (!waveStream.WaveFormat.Equals(waveFormat))
					throw new ArgumentException("All incoming channels must have the same format", "inputStreams.WaveFormat");
			}

			lock (this)
			{
				inputStreams.Add(waveStream);
				length = Math.Max(length, waveStream.Length);
				// get to the right point in this input file
				Position = Position;
			}
		}
Ejemplo n.º 27
0
		public AcmMp3FrameDecompressor(WaveFormat sourceFormat)
		{
			pcmFormat = AcmStream.SuggestPcmFormat(sourceFormat);
			conversionStream = new AcmStream(sourceFormat, pcmFormat);
		}
Ejemplo n.º 28
0
		public static extern MmResult waveOutOpenWindow(out IntPtr hWaveOut, IntPtr uDeviceID, WaveFormat lpFormat,
		                                                IntPtr callbackWindowHandle, IntPtr dwInstance, int dwFlags);
Ejemplo n.º 29
0
		public static extern MmResult waveOutOpen(out IntPtr hWaveOut, IntPtr uDeviceID, WaveFormat lpFormat,
		                                          WaveCallback dwCallback, IntPtr dwInstance, int dwFlags);
Ejemplo n.º 30
0
		/// <summary>
		/// Helper function to make a DMO Media Type to represent a particular WaveFormat
		/// </summary>
		private DmoMediaType CreateDmoMediaTypeForWaveFormat(WaveFormat waveFormat)
		{
			var mediaType = new DmoMediaType();
			int waveFormatExSize = Marshal.SizeOf(waveFormat); // 18 + waveFormat.ExtraSize;
			DmoInterop.MoInitMediaType(ref mediaType, waveFormatExSize);
			mediaType.SetWaveFormat(waveFormat);
			return mediaType;
		}