public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) { if (destinationType == typeof(string)) { SoundFormat soundFormat = (SoundFormat)value; return(string.Format( culture, "{0}, {1:.000} {2}{3}, {4:.000} {5}{6}, {7} {8}, {9}, {10} {11}", soundFormat.Tag, (float)(soundFormat.AverageBytesPerSecond * 8) / bytesPow, kilo, bps, (float)soundFormat.SamplesPerSecond / hertzPow, kilo, hz, soundFormat.BitsPerSample, bit, soundFormat.Channels == 1 ? mono : stereo, soundFormat.BlockAlign, block )); } return(base.ConvertTo(context, culture, value, destinationType)); }
private bool EnumDeviceFormatsCallback(IntPtr lpws, string lpDevice, IntPtr pwfx) { SoundFormat soundFormat = new SoundFormat(pwfx); callbackDeviceFormats.Add(soundFormat); return(true); }
public SoundFormat Clone() { SoundFormat copy = (SoundFormat)this.MemberwiseClone(); if (this.extra != null) { copy.extra = (byte[])this.extra.Clone(); } return(copy); }
public static SoundFormat SuggestFormat(string deviceId, SoundFormatTag?tag, int?samplesPerSecond, int?channels) { if (string.IsNullOrEmpty(deviceId)) { throw new ArgumentNullException("deviceId"); } if (tag == null) { tag = preferredFormat.Tag; } if (samplesPerSecond == null) { samplesPerSecond = preferredFormat.SamplesPerSecond; } if (channels == null) { channels = preferredFormat.Channels; } SoundFormat[] availableFormats = GetFormats(deviceId, true); // Find the closest format to the specified paramters // Priorities: 1) Tag 2) Sample Rate 3) Channels SoundFormat suggestedFormat = null; float suggestedMatchValue = int.MinValue; foreach (SoundFormat format in availableFormats) { float matchValue = 0; if (tag != null && format.Tag == tag.Value) { matchValue += 4.0f; } if (samplesPerSecond != null) { float diffRatio = 2.0f - (Math.Abs(format.SamplesPerSecond - samplesPerSecond.Value) / (samplesPerSecond.Value + format.SamplesPerSecond + 1.0f)); matchValue += diffRatio; } if (channels != null && channels.Value == format.Channels) { matchValue += 1.0f; } if (matchValue > suggestedMatchValue) { suggestedFormat = format; suggestedMatchValue = matchValue; } } return(suggestedFormat); }
public override bool Equals(object obj) { SoundFormat soundFormat = obj as SoundFormat; if (soundFormat == null) { return(false); } return(soundFormat.avgBytesPerSecond.Equals(this.avgBytesPerSecond) && soundFormat.bitsPerSample.Equals(this.bitsPerSample) && soundFormat.blockAlign.Equals(this.blockAlign) && soundFormat.channels.Equals(this.channels) && soundFormat.samplesPerSecond.Equals(this.samplesPerSecond)); }
public AcmEncoder GetEncoder() { bool encoderNeeded; SoundFormat inputFormat = GetInputFormat(out encoderNeeded); if (!encoderNeeded) { return(null); } AcmEncoder encoder = new AcmEncoder(); encoder.InputFormat = inputFormat; encoder.OutputFormat = this.format; return(encoder); }
public static SoundFormat[] WaveFormatToSoundFormats(MMInterop.WaveFormat formats) { List <SoundFormat> soundFormats = new List <SoundFormat>(); Array allValues = Enum.GetValues(typeof(MMInterop.WaveFormat)); foreach (var value in allValues) { MMInterop.WaveFormat vformat = (MMInterop.WaveFormat)value; if ((int)value == 0) // WAVE_INVALID_FORMAT { continue; } if ((formats & vformat) == vformat) { SoundFormat format = WaveFormatToSoundFormat(vformat); soundFormats.Add(format); } } return(soundFormats.ToArray()); }
private SoundFormat GetInputFormat(out bool encoderNeeded) { int nMaxAvgBytesPerSec = 0; // Get device formats SoundFormat[] deviceFormats = this.wrapper.GetDeviceFormats(deviceId); List <SoundFormat> deviceFormatList = new List <SoundFormat>(deviceFormats); SoundFormat inputFormat = null; if (this.format == null) { // If format is not specified, find the format with maximum average bytes per second foreach (SoundFormat deviceFormat in deviceFormatList) { if (inputFormat == null || nMaxAvgBytesPerSec < deviceFormat.AverageBytesPerSecond) { inputFormat = deviceFormat; nMaxAvgBytesPerSec = deviceFormat.AverageBytesPerSecond; } } if (inputFormat == null) { // This happens only if device has not formats throw new InvalidOperationException("Cannot find an appropriate input format."); } encoderNeeded = false; return(inputFormat); } // Check if device supports the format if (deviceFormatList.Contains(this.format)) { encoderNeeded = false; return(this.format); } // Get available input formats for convertion SoundFormat[] availableInputs = convertionMap.GetInputs(this.format); if (availableInputs.Length == 0) { // Get convertion map again // We currenty use PCM format for output. convertionMap.Add(AcmEncoder.GetConvertionMap(deviceFormatList.ToArray(), preferredFormat.Tag)); // Get available input formats for convertion availableInputs = convertionMap.GetInputs(this.format); if (availableInputs.Length == 0) { throw new InvalidOperationException("Cannot find an appropriate input format."); } } // Find the input format that device supports and has // maximum average bytes per second foreach (SoundFormat input in availableInputs) { if (deviceFormatList.Contains(input)) { if (nMaxAvgBytesPerSec < input.AverageBytesPerSecond && (inputFormat == null || input.AverageBytesPerSecond == (input.BitsPerSample / 8) * input.Channels * input.SamplesPerSecond)) { inputFormat = input; nMaxAvgBytesPerSec = (int)input.AverageBytesPerSecond; } } } if (inputFormat == null) { throw new InvalidOperationException("Cannot find an appropriate input format."); } encoderNeeded = true; return(inputFormat); }
private static int SoundFormatComparer(SoundFormat a, SoundFormat b) { return a.AverageBytesPerSecond.CompareTo(b.AverageBytesPerSecond); }
public IntPtr MarshalManagedToNative(object ManagedObj) { SoundFormat wfx = (SoundFormat)ManagedObj; return(wfx.ToPtr()); }
private bool EnumDeviceFormatsCallback(IntPtr lpws, string lpDevice, IntPtr pwfx) { SoundFormat soundFormat = new SoundFormat(pwfx); callbackDeviceFormats.Add(soundFormat); return true; }
private void SetupAudio(SoundFormat audioFormat, AcmEncoder audioEncoder) { IntPtr pwfx = audioFormat.ToPtr(); try { Avi32Interop.AVISTREAMINFO asi = new Avi32Interop.AVISTREAMINFO(); asi.fccType = Avi32Interop.streamtypeAUDIO; asi.dwScale = audioFormat.BlockAlign; asi.dwRate = audioFormat.AverageBytesPerSecond; asi.dwStart = 0; asi.dwLength = -1; asi.dwInitialFrames = 0; asi.dwSuggestedBufferSize = 0; asi.dwQuality = -1; asi.dwSampleSize = audioFormat.BlockAlign; int hr = Avi32Interop.AVIFileCreateStream(this.pAviFile, out this.pAudioStream, ref asi); if (hr != 0) { throw new AviException("AVIStreamSetFormat", hr); } hr = Avi32Interop.AVIStreamSetFormat(this.pAudioStream, 0, pwfx, audioFormat.ToalSize); if (hr != 0) { throw new AviException("AVIStreamSetFormat", hr); } if (audioEncoder != null) { audioEncoder.Open(); } this.audioFormat = audioFormat; this.audioEncoder = audioEncoder; } finally { Marshal.FreeHGlobal(pwfx); } }
public void Open(string fileName, DisplayFormat videoFormat, int fps, VideoCompressor compressor, SoundFormat audioFormat, AcmEncoder audioEncoder) { if (this.opened) { throw new InvalidOperationException(); } if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException("fileName"); } this.video = videoFormat != null; this.audio = audioFormat != null; if (!this.audio && !this.video) { // There is nothing to do! throw new InvalidOperationException(); } // Open AVI File int hr = Avi32Interop.AVIFileOpen(out this.pAviFile, fileName, Avi32Interop.OF_CREATE, IntPtr.Zero); if (hr != 0) { throw new AviException("AVIFileOpen", hr); } try { if (this.video) { this.SetupVideo(videoFormat, compressor, fps); } if (this.audio) { this.SetupAudio(audioFormat, audioEncoder); } this.opened = true; } finally { if (!this.opened) { this.Close(); } } }