static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = null; socket.Stream = null; socket.StreamType = StreamType.Aac; socket.StreamSubType = StreamSubType.AacRaw; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); AudioStreamInfo asi = new AudioStreamInfo(); pin.StreamInfo = asi; asi.StreamType = StreamType.Aac; asi.StreamSubType = StreamSubType.AacRaw; // You can change the sampling rate and the number of the channels //asi.Channels = 1; //asi.SampleRate = 44100; return(socket); }
/// <summary> /// Constructor. It's always reated for a stream, so you need to provide info about it here. /// </summary> public AudioDecoderPCM(AudioStreamInfo streamInfo) { this.streamInfo = streamInfo; // can we decode this stream? if (streamInfo == null) { throw new ArgumentException("Can't initialize stream decoder without proper AudioStreamInfo"); } if (streamInfo.audioFormat != FORMAT_UNCOMPRESSED && streamInfo.audioFormat != FORMAT_UNCOMPRESSED_FLOAT && streamInfo.audioFormat != FORMAT_ALAW && streamInfo.audioFormat != FORMAT_ULAW) { throw new ArgumentException("Unsupported PCM format=0x" + streamInfo.audioFormat.ToString("X")); } int bytesPerChannelSample = streamInfo.sampleSize / streamInfo.channels; if ((streamInfo.audioFormat == FORMAT_UNCOMPRESSED_FLOAT && bytesPerChannelSample != 4) || (streamInfo.audioFormat != FORMAT_UNCOMPRESSED_FLOAT && bytesPerChannelSample > 2) ) { throw new ArgumentException("Only 8bit and 16bit_le int, and 32bit float audio is supported. " + (bytesPerChannelSample * 8) + "bits given"); } }
/// <summary> /// Returns the file extension for specified video type. /// To avoid conflicting file names, the codec extension must be different than the final extension. /// </summary> /// <param name="video">The video type to get file extension for.</param> /// <returns>The file extension.</returns> public static string GetFinalExtension(MediaStreamInfo video, AudioStreamInfo audio) { if ((video == null || video.Container == Container.WebM) && (audio == null || audio.Container == Container.WebM)) { return(".webm"); } else if ((video == null || video.Container == Container.Mp4) && (audio == null || audio.Container == Container.Mp4 || audio.Container == Container.M4A)) { return(".mp4"); } else if (video != null && (video.Container == Container.Mp4 || video.Container == Container.WebM)) { return(".mkv"); } else if (video != null) { return(GetCodecExtension(video.Container)); } else if (audio != null) { return(GetCodecExtension(audio.Container)); } else { return(""); } }
public MediaStreamInfo ToStreamInfo() { MediaStreamInfo stream = null; if (CodecType == "audio") { stream = new AudioStreamInfo { ChannelLayout = this.ChannelLayout, ChannelCount = this.ChannelCount, SampleFormat = FfmpegHelper.ParseSampleFormat(SampleFormat), SampleRate = SampleRate, }; } else if (CodecType == "video") { stream = new VideoStreamInfo { PixelFormat = PixelFormatHelper.Parse(PixelFormat), Width = this.Width, Height = this.Height }; if (Tags != null && Tags.TryGetValue("rotate", out var rotate)) { ((VideoStreamInfo)stream).Rotate = (int)rotate; } } else if (CodecType == "subtitle") { stream = new SubtitleStreamInfo { }; } else if (CodecType == "data") { stream = new DataStreamInfo { }; } else { stream = new MediaStreamInfo(); } stream.Codec = Profile != null?CodecInfo.Create(CodecIdHelper.Parse(CodecName), Profile).Name : CodecName; if (TimeBase != null && Rational.TryParse(TimeBase, out var timeBase)) { stream.TimeBase = timeBase; } stream.Duration = Duration ?? TimeSpan.Zero; stream.StartTime = StartTime ?? TimeSpan.Zero; stream.FrameCount = FrameCount; return(stream); }
private async Task ResolveAdaptiveStreamInfosAsync(PlayerContext context, string encodedData, ICollection <AudioStreamInfo> audioStreamInfos, ICollection <VideoStreamInfo> videoStreamInfos) { foreach (var streamEncoded in encodedData.Split(",")) { var streamInfoDic = UrlHelper.GetDictionaryFromUrlQuery(streamEncoded); var itag = streamInfoDic.Get("itag").ParseInt(); var url = streamInfoDic.Get("url"); var sig = streamInfoDic.GetOrDefault("s"); var contentLength = streamInfoDic.Get("clen").ParseLong(); var bitrate = streamInfoDic.Get("bitrate").ParseLong(); #if RELEASE if (!MediaStreamInfo.IsKnown(itag)) { continue; } #endif // Decipher signature if needed if (sig.IsNotBlank()) { var playerSource = await GetPlayerSourceAsync(context.SourceUrl).ConfigureAwait(false); sig = playerSource.Decipher(sig); url = UrlHelper.SetUrlQueryParameter(url, "signature", sig); } // Set rate bypass url = UrlHelper.SetUrlQueryParameter(url, "ratebypass", "yes"); // Check if audio var isAudio = streamInfoDic.Get("type").Contains("audio/"); // If audio stream if (isAudio) { var streamInfo = new AudioStreamInfo(itag, url, contentLength, bitrate); audioStreamInfos.Add(streamInfo); } // If video stream else { // Parse additional data var size = streamInfoDic.Get("size"); var width = size.SubstringUntil("x").ParseInt(); var height = size.SubstringAfter("x").ParseInt(); var resolution = new VideoResolution(width, height); var framerate = streamInfoDic.Get("fps").ParseInt(); var streamInfo = new VideoStreamInfo(itag, url, contentLength, bitrate, resolution, framerate); videoStreamInfos.Add(streamInfo); } } }
public AudioStreamInfo(AudioStreamInfo ai) { codecFourCC = ai.codecFourCC; audioFormat = ai.audioFormat; sampleCount = ai.sampleCount; sampleSize = ai.sampleSize; channels = ai.channels; sampleRate = ai.sampleRate; lengthBytes = ai.lengthBytes; }
public ucAudioStreamDetail(AudioStreamInfo audioStreamInfo) { InitializeComponent(); InitControls(); RefreshControls(audioStreamInfo); cbLanguage.MouseWheel += Utils.Helpers.Combobox_OnMouseWheel; cbSource.MouseWheel += Utils.Helpers.Combobox_OnMouseWheel; }
public override void Init(Stream dstStream, VideoStreamInfo videoStreamInfo, AudioStreamInfo audioStreamInfo) { if (dstStream == null || videoStreamInfo == null) { throw new ArgumentException("At least destination stream and video stream info is needed"); } base.Init(dstStream, videoStreamInfo, audioStreamInfo); usingMultipleRiffs = false; totalFramesOld = 0; totalFrames = 0; totalSamples = 0; writer = new RiffWriter(dstStream); writer.BeginRiff(AviDemux.ID_AVI_); writer.BeginList(AviDemux.ID_hdrl); // main header offsets.avih = WriteMainHeader(writer, videoStreamInfo, hasAudioStream); // video stream header writer.BeginList(AviDemux.ID_strl); offsets.videoStrh = WriteVideoStreamHeader(writer, videoStreamInfo); WriteVideoFormatHeader(writer, videoStreamInfo); offsets.videoIndx = WriteDummySuperIndex(writer, AviDemux.ID_00dc, maxSuperindexEntries); videoSuperIndexEntryCount = 0; writer.EndList(); // end of strl videoIndex = new AviStreamIndex(); videoIndex.streamId = AviDemux.ID_00dc; if (hasAudioStream) { // audio stream header writer.BeginList(AviDemux.ID_strl); offsets.audioStrh = WriteAudioStreamHeader(writer, audioStreamInfo); WriteAudioFormatHeader(writer, audioStreamInfo); offsets.audioIndx = WriteDummySuperIndex(writer, AviDemux.ID_01wb, maxSuperindexEntries); audioSuperIndexEntryCount = 0; writer.EndList(); // end of strl audioIndex = new AviStreamIndex(); audioIndex.streamId = AviDemux.ID_01wb; } // odml header writer.BeginList(AviDemux.ID_odml); offsets.dmlh = WriteDmlhHeader(writer, videoStreamInfo.frameCount); writer.EndList(); writer.EndList(); // end of hdrl writer.BeginList(AviDemux.ID_movi); offsets.indexBase = writer.binaryWriter.Seek(0, SeekOrigin.Current); }
private async Task <String> RetrieveMusicInfo() { if (string.IsNullOrEmpty(Music.SourceUrl)) { MediaStreamInfoSet infoSet = await _youtubeClient.GetVideoMediaStreamInfosAsync(Music.Id); AudioStreamInfo info = infoSet.Audio.WithHighestBitrate(); return(info?.Url); } return(Music.SourceUrl); }
/* * private string GetAudioUrl(string url) * { * var ytdl = new ProcessStartInfo * { * FileName = "youtube-dl.exe", * RedirectStandardOutput = true, * UseShellExecute = false, * Arguments = $"-g \"{url}\"" * }; * * List<string> received = new List<string>(); * * var proc = Process.Start(ytdl); * proc.OutputDataReceived += (object sender, DataReceivedEventArgs args) => received.Add(args.Data); * proc.BeginOutputReadLine(); * while (!proc.HasExited) * { * // wait * } * * var query = received.Where(x => x.Contains("mime=audio")); * var toReturn = query.Any() ? query.First() : received.LastOrDefault(); * return toReturn; * }*/ public async Task <AudioStreamInfo> GetStreamInfo(string id) { AudioStreamInfo result = null; await Task.Run(async() => { var cl = new YoutubeClient(); result = (await cl.GetVideoMediaStreamInfosAsync(id)).Audio.OrderByDescending(x => x.Bitrate).First(); }); return(result); }
public override void Init(Stream dstStream, VideoStreamInfo videoStreamInfo, AudioStreamInfo audioStreamInfo) { if (dstStream == null || videoStreamInfo == null) { throw new ArgumentException ("At least destination stream and video stream info is needed"); } base.Init (dstStream, videoStreamInfo, audioStreamInfo); usingMultipleRiffs = false; totalFramesOld = 0; totalFrames = 0; totalSamples = 0; writer = new RiffWriter (dstStream); writer.BeginRiff (AviDemux.ID_AVI_); writer.BeginList (AviDemux.ID_hdrl); // main header offsets.avih = WriteMainHeader (writer, videoStreamInfo, hasAudioStream); // video stream header writer.BeginList (AviDemux.ID_strl); offsets.videoStrh = WriteVideoStreamHeader (writer, videoStreamInfo); WriteVideoFormatHeader (writer, videoStreamInfo); offsets.videoIndx = WriteDummySuperIndex (writer, AviDemux.ID_00dc, maxSuperindexEntries); videoSuperIndexEntryCount = 0; writer.EndList (); // end of strl videoIndex = new AviStreamIndex (); videoIndex.streamId = AviDemux.ID_00dc; if (hasAudioStream) { // audio stream header writer.BeginList (AviDemux.ID_strl); offsets.audioStrh = WriteAudioStreamHeader (writer, audioStreamInfo); WriteAudioFormatHeader (writer, audioStreamInfo); offsets.audioIndx = WriteDummySuperIndex (writer, AviDemux.ID_01wb, maxSuperindexEntries); audioSuperIndexEntryCount = 0; writer.EndList (); // end of strl audioIndex = new AviStreamIndex (); audioIndex.streamId = AviDemux.ID_01wb; } // odml header writer.BeginList (AviDemux.ID_odml); offsets.dmlh = WriteDmlhHeader (writer, videoStreamInfo.frameCount); writer.EndList (); writer.EndList (); // end of hdrl writer.BeginList (AviDemux.ID_movi); offsets.indexBase = writer.binaryWriter.Seek (0, SeekOrigin.Current); }
static void PrintAudio(AudioStreamInfo asi) { Console.WriteLine("bitrate: {0} mode: {1}", asi.Bitrate, asi.BitrateMode); Console.WriteLine("bits per sample: {0}", asi.BitsPerSample); Console.WriteLine("bytes per frame: {0}", asi.BytesPerFrame); Console.WriteLine("channel layout: {0:X}", asi.ChannelLayout); Console.WriteLine("channels: {0}", asi.Channels); Console.WriteLine("flags: {0:X}", asi.PcmFlags); Console.WriteLine("sample rate: {0}", asi.SampleRate); }
private static void WriteAudioFormatHeader(RiffWriter rw, AudioStreamInfo asi) { rw.BeginChunk(AviDemux.ID_strf); var bw = rw.binaryWriter; bw.Write((ushort)asi.audioFormat); // wFormatTag bw.Write((short)asi.channels); // nChannels bw.Write(asi.sampleRate); // nSamplesPerSec bw.Write(asi.sampleRate * asi.sampleSize * asi.channels); // nAvgBytesPerSec @xxx true for PCM audio only, but this is a "soft" property bw.Write((short)asi.sampleSize); // nBlockAlign bw.Write((short)(8 * asi.sampleSize / asi.channels)); // wBitsPerSample bw.Write((short)0); // cbSize. no extra coded info rw.EndChunk(); }
static async Task Main(string[] args) { var client = new YoutubeClient(); var videoInfo = await client.GetVideoMediaStreamInfosAsync("jOxzAsnx9-0"); AudioStreamInfo info = videoInfo.Audio[0]; var buffer = new byte[128]; Memory <byte> mem = new Memory <byte>(); MemoryStream memoryStream = new MemoryStream(); int len = 0; long lastPosition = 0; long present = 0; using (var mf = new SavingWaveProvider(new MediaFoundationReader(info.Url), $@"./audio.{info.Container.GetFileExtension()}")) using (var wo = new WaveOutEvent() { DesiredLatency = 30000 }) { //await mf.CopyToAsync(memoryStream); //mf.Seek(0, SeekOrigin.Begin); wo.Init(mf); //var x = mf.WaveFormat.ConvertLatencyToByteSize((wo.DesiredLatency + wo.NumberOfBuffers - 1) / wo.NumberOfBuffers); wo.Play(); while (wo.PlaybackState == PlaybackState.Playing) { await Task.Delay(1000); } } //Console.WriteLine($"Song size: {info.Size}"); //IProgress<double> progress = new Progress<double>(value => // { // Console.WriteLine($"Finish : { Math.Round(value * 100, 2)} %"); // }); //await client.DownloadMediaStreamAsync(info, @".\test.mp3", progress); //await using (var audioFile = new AudioFileReader(@".\test.mp3")) //using (var outputDevice = new WaveOutEvent()) //{ // outputDevice.Init(audioFile); // outputDevice.Play(); // while (outputDevice.PlaybackState == PlaybackState.Playing) // { // await Task.Delay(1000); // } //} Console.WriteLine("Hello World!"); Console.ReadLine(); }
internal static string DumpConfig(this AudioStreamInfo audioConf) { StringBuilder sb = new StringBuilder(); sb.AppendLine("AudioStreamInfo:"); sb.Append("\tmimeType = "); sb.AppendLine(audioConf.mimeType.ToString()); sb.Append("\tsampleRate = "); sb.AppendLine(audioConf.sampleRate.ToString()); sb.Append("\tchannels = "); sb.AppendLine(audioConf.channels.ToString()); return(sb.ToString()); }
StreamInfo Map(Stream stream) { StreamInfo result = null; switch (stream.CodecType) { case "audio": result = new AudioStreamInfo() { Format = GetAudioFormat(stream.CodecName, stream.Profile), ChannelCount = GetInt(stream.Channels), ProfileName = stream.Profile }; break; case "subtitle": result = new SubtitleStreamInfo() { SubtitleType = GetSubtitleType(stream.CodecName), Format = GetSubtitleFormat(stream.CodecName) }; break; case "video": var storageDimensions = new Dimensions(GetInt(stream.Width), GetInt(stream.Height)); result = new VideoStreamInfo() { BitDepth = GetBitDepth(stream.PixFmt), Dimensions = GetActualDimensions(storageDimensions, stream.SampleAspectRatio), StorageDimensions = storageDimensions, DynamicRange = stream.ColorTransfer == "smpte2084" ? DynamicRange.High : DynamicRange.Standard }; break; default: result = new StreamInfo(); break; } result.Index = (int)stream.Index; result.FormatName = stream.CodecName; if (stream.Tags != null) { result.Language = stream.Tags.TryGetValue("language", out var language) ? language : null; } return result; }
bool ConfigureStreams(string filePath) { using (MediaInfo mediaInfo = new MediaInfo()) { mediaInfo.Inputs[0].File = filePath; if (!(mediaInfo.Open())) { return(false); } // configure streams foreach (var socket in mediaInfo.Outputs) { foreach (var pin in socket.Pins) { StreamInfo si = pin.StreamInfo; MediaType mediaType = si.MediaType; if ((MediaType.Video == mediaType) && (null == _videoStreamInfo)) { VideoStreamInfo vsi = (VideoStreamInfo)si; _videoStreamInfo = new VideoStreamInfo(); _videoStreamInfo.FrameWidth = vsi.FrameWidth; _videoStreamInfo.FrameHeight = vsi.FrameHeight; _videoStreamInfo.DisplayRatioWidth = vsi.DisplayRatioWidth; _videoStreamInfo.DisplayRatioHeight = vsi.DisplayRatioHeight; _videoStreamInfo.FrameRate = vsi.FrameRate; } if ((MediaType.Audio == mediaType) && (null == _audioStreamInfo)) { AudioStreamInfo asi = (AudioStreamInfo)si; _audioStreamInfo = new AudioStreamInfo(); _audioStreamInfo.BitsPerSample = asi.BitsPerSample; _audioStreamInfo.Channels = asi.Channels; _audioStreamInfo.SampleRate = asi.SampleRate; } } } } return(true); }
private void Init(AudioStreamInfo ainfo) { if (ainfo.StreamType == StreamType.LPCM) { if (0 == ainfo.BytesPerFrame) { ainfo.BytesPerFrame = ainfo.BitsPerSample / 8 * ainfo.Channels; } frameRate = 10; uncompressedFrameSize = ainfo.BytesPerFrame * ainfo.SampleRate / (int)frameRate; inframe.Buffer = new MediaBuffer(new byte[uncompressedFrameSize]); inframe.Buffer.SetData(0, 0); // needed because setting the buffer actually sets the data as well } else { throw new Exception("Unsupported: audio input is compressed"); } }
public void Close() { if (null != _transcoder) { _transcoder.Dispose(); _transcoder = null; } _decoderThread = null; _videoStreamInfo = null; _audioStreamInfo = null; _videoStreamIndex = -1; _audioStreamIndex = -1; _decoderEOS = false; _cancellationPending = false; }
public void RefreshControls(AudioStreamInfo audioStreamInfo) { _bsControlsData.DataSource = audioStreamInfo; _bsControlsData.ResetBindings(false); ttTitleContent.RemoveAll(); if (audioStreamInfo.HasTitle && !string.IsNullOrEmpty(audioStreamInfo.Title)) { ttTitleContent.SetToolTip(chbTitle, audioStreamInfo.Title); chbTitle.Cursor = Cursors.Help; } else { ttTitleContent.RemoveAll(); chbTitle.Cursor = Cursors.Default; } cbLanguage.SelectedItem = Languages.GetLanguageFromIdentifier(audioStreamInfo.Language); }
static MediaSocket CreateOutputSocket(Options opt) { MediaSocket socket = new MediaSocket(); socket.File = opt.OutputFile; socket.StreamType = StreamType.Aac; socket.StreamSubType = StreamSubType.AacRaw; MediaPin pin = new MediaPin(); socket.Pins.Add(pin); AudioStreamInfo asi = new AudioStreamInfo(); pin.StreamInfo = asi; asi.StreamType = StreamType.Aac; asi.StreamSubType = StreamSubType.AacRaw; return(socket); }
public void A() { var stream = new AudioStreamInfo { Codec = CodecInfo.Create(CodecId.Aac, "LC").Name, ChannelLayout = "Stereo", Duration = TimeSpan.FromMinutes(1), SampleRate = 44100, StartTime = TimeSpan.Zero, SampleFormat = SampleFormat.F32 }; Assert.Equal(@"{ ""type"": ""Audio"", ""codec"": ""mp4a.40.2"", ""duration"": ""00:01:00"", ""sampleFormat"": ""F32"", ""sampleRate"": 44100, ""channelLayout"": ""Stereo"" }", JsonObject.FromObject(stream).ToString()); }
private async void Button_Click(object sender, RoutedEventArgs e) { string link = linkBox.Text; var id = YoutubeClient.ParseVideoId(link); client = new YoutubeClient(); video = await client.GetVideoAsync(id); streamInfoSet = await client.GetVideoMediaStreamInfosAsync(video.Id); streamInfo = streamInfoSet.Audio.WithHighestBitrate(); ext = streamInfo.Container.GetFileExtension(); MediaStreamInfo m = audioStreamInfo = streamInfoSet.Audio .OrderByDescending(s => s.Container == Container.WebM) .ThenByDescending(s => s.Bitrate) .First(); var pic = new BitmapImage(new Uri(video.Thumbnails.HighResUrl)); thumbnail.Source = pic; //pic.CreateOptions = BitmapCreateOptions.None; //WriteableBitmap wb = new WriteableBitmap(pic); //ImageConverter converter = new ImageConverter(); //byte[] b = (byte[])converter.ConvertTo(m, typeof(byte[])); //MemoryStream ms = new MemoryStream(b); var webClient = new WebClient(); byte[] imageBytes = webClient.DownloadData(video.Thumbnails.HighResUrl); picture = new Picture(imageBytes); titleInfo.Text = video.Title; }
private string GetStreamDescription(MediaStreamInfo stream) { if (stream is VideoStreamInfo) { VideoStreamInfo VStream = stream as VideoStreamInfo; return(string.Format("{0} {1}p ({2}mb)", VStream.VideoEncoding, DownloadManager.GetVideoHeight(VStream), VStream.Size / 1024 / 1024)); } else if (stream is AudioStreamInfo) { AudioStreamInfo AStream = stream as AudioStreamInfo; return(string.Format("{0} {1}kbps ({2}mb)", AStream.AudioEncoding, AStream.Bitrate / 1024, AStream.Size / 1024 / 1024)); } else if (stream is MuxedStreamInfo) { MuxedStreamInfo MStream = stream as MuxedStreamInfo; return(string.Format("{0} {1}p ({2}mb) (with audio)", MStream.VideoEncoding, DownloadManager.GetVideoHeight(MStream), MStream.Size / 1024 / 1024)); } else { return(""); } }
static void ConfigureAudioInput(DSGraph graph, Transcoder transcoder) { AMMediaType mt = new AMMediaType(); int hr; try { hr = graph.audioGrabber.GetConnectedMediaType(mt); DsError.ThrowExceptionForHR(hr); if ((mt.majorType != DirectShowLib.MediaType.Audio) || (mt.formatType != DirectShowLib.FormatType.WaveEx)) { throw new COMException("Unexpected format type"); } WaveFormatEx wfx = (WaveFormatEx)Marshal.PtrToStructure(mt.formatPtr, typeof(WaveFormatEx)); AudioStreamInfo audioInfo = new AudioStreamInfo(); audioInfo.BitsPerSample = wfx.wBitsPerSample; audioInfo.Channels = wfx.nChannels; audioInfo.SampleRate = wfx.nSamplesPerSec; audioInfo.StreamType = StreamType.LPCM; MediaSocket inputSocket = new MediaSocket(); MediaPin inputPin = new MediaPin(); inputPin.StreamInfo = audioInfo; inputSocket.Pins.Add(inputPin); inputSocket.StreamType = StreamType.LPCM; graph.audioGrabberCB.Init(transcoder, transcoder.Inputs.Count, graph.mediaControl); transcoder.Inputs.Add(inputSocket); } finally { DsUtils.FreeAMMediaType(mt); } }
private List <MediaType> GetMediaTypeInfo(string mediaId) { List <MediaType> result = new List <MediaType>(); try { if (!String.IsNullOrWhiteSpace(mediaId)) { MediaStreamInfoSet streamInfoSet = this.GetMediaStreamInfo(mediaId); if (streamInfoSet.Audio != null) { AudioStreamInfo asiItem = streamInfoSet.Audio.FirstOrDefault(s => s.Container.GetFileExtension().ToLower().Equals("mp4")); if (asiItem != null) { result.Add(new MediaType(MediaType.ExtensionType.MP3, null, asiItem.Size)); } } //Nie uzywamy Video, bo obsługuje tylko obraz, aby go połączyć z dźwiękiem potrzeba dodatkowo Youtube.Converter + FFMPEG if (streamInfoSet.Muxed != null) { foreach (MuxedStreamInfo vsiItem in streamInfoSet.Muxed.Where(s => s.Container.GetFileExtension().ToLower().Equals("mp4"))) { result.Add(new MediaType(MediaType.ExtensionType.MP4, vsiItem.VideoQualityLabel, vsiItem.Size)); } } } } catch (Exception ex) { throw ex; } finally { this.CancelOperation?.Dispose(); this.CancelOperation = null; } return(result); }
protected virtual MappedAudioStream MapAudioStream(FFmpegConfig config, AudioStreamInfo sourceStream, AudioOutputStream outputStream) { var result = new MappedAudioStream() { Input = GetStreamInput(sourceStream), Codec = new Codec(GetAudioCodecName(config, outputStream.Format)) }; if (outputStream.Mixdown.HasValue) { result.ChannelCount = AudioUtility.GetChannelCount(outputStream.Mixdown.Value); } if (outputStream.Quality.HasValue) { result.Bitrate = $"{outputStream.Quality:0}k"; } return(result); }
public SongInQueue DownloadSong(string link) { string guid = Guid.NewGuid().ToString(); SongInQueue result = new SongInQueue(); string fullFilePath = _musicStorage + guid; YoutubeClient client = new YoutubeClient(); try { _log.Debug($"Started processing { link }"); string parsedYoutubeId = YoutubeClient.ParseVideoId(link); MediaStreamInfoSet streamInfoSet = client.GetVideoMediaStreamInfosAsync(parsedYoutubeId).Result; YoutubeExplode.Models.Video video = client.GetVideoAsync(parsedYoutubeId).Result; result.Name = video.Title; AudioStreamInfo streamInfo = streamInfoSet.Audio.FirstOrDefault(); string ext = streamInfo.Container.GetFileExtension(); fullFilePath += $".{ ext }"; IProgress <double> progress = new YoutubeExtractorClientProgress($"{result.Name} - { guid }"); client.DownloadMediaStreamAsync(streamInfo, fullFilePath, progress).Wait(); var inputFile = new MediaFile(fullFilePath); result.FilePath = fullFilePath; _log.Debug("Finished processing file " + link); return(result); } catch (Exception ex) { _log.Error($"Error while downloading youtube song", ex); throw ex; } }
private static int WriteAudioStreamHeader(RiffWriter rw, AudioStreamInfo asi) { rw.BeginChunk(AviDemux.ID_strh); int offset = (int)rw.binaryWriter.Seek(0, SeekOrigin.Current); var bw = rw.binaryWriter; bw.Write(AviDemux.FCC_auds); bw.Write(asi.codecFourCC); bw.Write((int)0); // dwFlags bw.Write((short)0); // wPriority bw.Write((short)0); // wLanguage bw.Write((int)0); // dwInitialFrames bw.Write((int)1); // dwScale bw.Write(asi.sampleRate); // dwRate @xxx This is true for PCM audio only! bw.Write((int)0); // dwStart bw.Write(asi.sampleCount); // dwLength. will be written over later bw.Write((int)0); // dwSuggestedBufferSize, not suggesting it bw.Write((int)-1); // dwQuality = -1 meaning "default quality" bw.Write(asi.sampleSize); // dwSampleSize bw.Write((long)0); // rcFrame rw.EndChunk(); return(offset); }
private static async Task <SongInfo> GetVideoInfo(EduardoContext context, string input) { List <Video> videos = await VideoHelper.GetOrSearchVideoAsync(input); if (videos == null || videos.Count == 0) { return(null); } Video chosenVideo = videos.FirstOrDefault(); if (chosenVideo == null) { return(null); } MediaStreamInfoSet streamInfo = await VideoHelper.GetMediaStreamInfoAsync(chosenVideo); AudioStreamInfo stream = streamInfo.Audio.OrderByDescending(a => a.Bitrate).FirstOrDefault(); if (stream == null) { return(null); } return(new SongInfo { Name = chosenVideo.Title, Duration = chosenVideo.Duration, VideoId = chosenVideo.Id, Url = chosenVideo.GetShortUrl(), StreamUrl = stream.Url, RequestedBy = context.User as IGuildUser, Description = chosenVideo.Description, ThumbnailUrl = chosenVideo.Thumbnails.StandardResUrl }); }
static void PrintStreams(MediaInfo mediaInfo) { foreach (var socket in mediaInfo.Outputs) { Console.WriteLine("container: {0}", socket.StreamType); Console.WriteLine("streams: {0}", socket.Pins.Count); for (int streamIndex = 0; streamIndex < socket.Pins.Count; streamIndex++) { StreamInfo si = socket.Pins[streamIndex].StreamInfo; Console.WriteLine(); Console.WriteLine("stream #{0} {1}", streamIndex, si.MediaType); Console.WriteLine("type: {0}", si.StreamType); Console.WriteLine("subtype: {0}", si.StreamSubType); Console.WriteLine("id: {0}", si.ID); Console.WriteLine("duration: {0:f3}", si.Duration); if (MediaType.Video == si.MediaType) { VideoStreamInfo vsi = si as VideoStreamInfo; PrintVideo(vsi); } else if (MediaType.Audio == si.MediaType) { AudioStreamInfo asi = si as AudioStreamInfo; PrintAudio(asi); } else { Console.WriteLine(); } } Console.WriteLine(); } }
public void GetMediaInfo(string fileName) { const NumberStyles numStyle = NumberStyles.Number; if (Processing.mediaInfo == null) { Processing.mediaInfo = new MediaInfo(); Processing.mediaInfo.Option("Internet", "No"); } Processing.mediaInfo.Open(fileName); _videoStreams = Processing.mediaInfo.Count_Get(StreamKind.Video); _audioStreams = Processing.mediaInfo.Count_Get(StreamKind.Audio); _imageStreams = Processing.mediaInfo.Count_Get(StreamKind.Image); _textStreams = Processing.mediaInfo.Count_Get(StreamKind.Text); _menuCount = Processing.mediaInfo.Count_Get(StreamKind.Menu); #region Get General Info General.CompleteName = Processing.mediaInfo.Get(StreamKind.General, 0, "CompleteName"); General.FileName = Processing.mediaInfo.Get(StreamKind.General, 0, "FileName"); General.FileExtension = Processing.mediaInfo.Get(StreamKind.General, 0, "FileExtension"); General.Format = Processing.mediaInfo.Get(StreamKind.General, 0, "Format"); General.FormatExtensions = Processing.mediaInfo.Get(StreamKind.General, 0, "Format/Extensions"); General.InternetMediaType = Processing.mediaInfo.Get(StreamKind.General, 0, "InternetMediaType"); DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.General, 0, "Duration/String3"), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out General.DurationTime); General.Title = Processing.mediaInfo.Get(StreamKind.General, 0, "Title"); General.EncodedApplication = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Application"); General.EncodedApplicationUrl = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Application/Url"); General.EncodedLibrary = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library"); General.EncodedLibraryName = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Name"); General.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Version"); General.EncodedLibraryDate = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library/Date"); General.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.General, 0, "Encoded_Library_Settings"); #endregion #region Get Video Info for (int i = 0; i < _videoStreams; i++) { VideoStreamInfo videoStream = new VideoStreamInfo(); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamKindID"), numStyle, AppSettings.CInfo, out videoStream.StreamKindID); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamKindPos"), numStyle, AppSettings.CInfo, out videoStream.StreamKindPos); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "ID"), numStyle, AppSettings.CInfo, out videoStream.ID); videoStream.Format = Processing.mediaInfo.Get(StreamKind.Video, i, "Format"); videoStream.FormatInfo = Processing.mediaInfo.Get(StreamKind.Video, i, "Format/Info"); videoStream.FormatVersion = Processing.mediaInfo.Get(StreamKind.Video, i, "Format_Version"); videoStream.FormatProfile = Processing.mediaInfo.Get(StreamKind.Video, i, "Format_Profile"); videoStream.MultiViewBaseProfile = Processing.mediaInfo.Get(StreamKind.Video, i, "MultiView_BaseProfile"); videoStream.MultiViewCount = Processing.mediaInfo.Get(StreamKind.Video, i, "MultiView_Count"); videoStream.InternetMediaType = Processing.mediaInfo.Get(StreamKind.Video, i, "InternetMediaType"); videoStream.CodecID = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID"); videoStream.CodecIDInfo = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID/Info"); videoStream.CodecIDUrl = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID/Url"); videoStream.CodecIDDescription = Processing.mediaInfo.Get(StreamKind.Video, i, "CodecID_Description"); DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Duration/String3"), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out videoStream.DurationTime); videoStream.BitRateMode = Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Mode"); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate"), numStyle, AppSettings.CInfo, out videoStream.BitRate); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Minimum"), numStyle, AppSettings.CInfo, out videoStream.BitRateMin); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Nominal"), numStyle, AppSettings.CInfo, out videoStream.BitRateNom); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitRate_Maximum"), numStyle, AppSettings.CInfo, out videoStream.BitRateMax); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Width"), numStyle, AppSettings.CInfo, out videoStream.Width); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "Height"), numStyle, AppSettings.CInfo, out videoStream.Height); videoStream.PixelAspectRatio = Processing.mediaInfo.Get(StreamKind.Video, i, "PixelAspectRatio"); videoStream.DisplayAspectRatio = Processing.mediaInfo.Get(StreamKind.Video, i, "DisplayAspectRatio"); videoStream.FrameRateMode = Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Mode"); Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate"), numStyle, AppSettings.CInfo, out videoStream.FrameRate); Processing.GetFPSNumDenom(videoStream.FrameRate, out videoStream.FrameRateEnumerator, out videoStream.FrameRateDenominator); Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Minimum"), numStyle, AppSettings.CInfo, out videoStream.FrameRateMin); Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Nominal"), numStyle, AppSettings.CInfo, out videoStream.FrameRateNom); Single.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameRate_Maximum"), numStyle, AppSettings.CInfo, out videoStream.FrameRateMax); Int64.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "FrameCount"), numStyle, AppSettings.CInfo, out videoStream.FrameCount); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "BitDepth"), numStyle, AppSettings.CInfo, out videoStream.BitDepth); videoStream.ScanType = Processing.mediaInfo.Get(StreamKind.Video, i, "ScanType"); videoStream.ScanOrder = Processing.mediaInfo.Get(StreamKind.Video, i, "ScanOrder"); UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Video, i, "StreamSize"), numStyle, AppSettings.CInfo, out videoStream.StreamSize); videoStream.EncodedApplication = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Application"); videoStream.EncodedApplicationUrl = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Application/Url"); videoStream.EncodedLibrary = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library"); videoStream.EncodedLibraryName = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Name"); videoStream.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Version"); videoStream.EncodedLibraryDate = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library/Date"); videoStream.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.Video, i, "Encoded_Library_Settings"); if (videoStream.Width > 1280) { if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == "")) videoStream.VideoSize = VideoFormat.Videoformat1080P; else videoStream.VideoSize = VideoFormat.Videoformat1080I; } else if (videoStream.Width > 720) { videoStream.VideoSize = VideoFormat.Videoformat720P; } else if ((videoStream.Height > 480) && (videoStream.Height <= 576) && (videoStream.Width <= 720)) { if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == "")) videoStream.VideoSize = VideoFormat.Videoformat576P; else videoStream.VideoSize = VideoFormat.Videoformat576I; } else { if ((videoStream.ScanType == "Progressive") || (videoStream.ScanType == "")) videoStream.VideoSize = VideoFormat.Videoformat480P; else videoStream.VideoSize = VideoFormat.Videoformat480I; } Video.Add(videoStream); } #endregion #region Get Audio Info for (int i = 0; i < _audioStreams; i++) { AudioStreamInfo audioStream = new AudioStreamInfo(); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamKindID"), numStyle, AppSettings.CInfo, out audioStream.StreamKindID); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamKindPos"), numStyle, AppSettings.CInfo, out audioStream.StreamKindPos); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "ID"), numStyle, AppSettings.CInfo, out audioStream.ID); audioStream.Format = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format"); audioStream.FormatInfo = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format/Info"); audioStream.FormatVersion = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format_Version"); audioStream.FormatProfile = Processing.mediaInfo.Get(StreamKind.Audio, i, "Format_Profile"); audioStream.CodecID = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID"); audioStream.CodecIDInfo = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID/Info"); audioStream.CodecIDUrl = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID/Url"); audioStream.CodecIDDescription = Processing.mediaInfo.Get(StreamKind.Audio, i, "CodecID_Description"); Int64.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Duration"), numStyle, AppSettings.CInfo, out audioStream.Duration); audioStream.BitRateMode = Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Mode"); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate"), numStyle, AppSettings.CInfo, out audioStream.BitRate); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Minimum"), numStyle, AppSettings.CInfo, out audioStream.BitRateMin); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Nominal"), numStyle, AppSettings.CInfo, out audioStream.BitRateNom); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitRate_Maximum"), numStyle, AppSettings.CInfo, out audioStream.BitRateMax); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Channel(s)"), numStyle, AppSettings.CInfo, out audioStream.Channels); audioStream.ChannelsString = Processing.mediaInfo.Get(StreamKind.Audio, i, "Channel(s)/String"); audioStream.ChannelPositions = Processing.mediaInfo.Get(StreamKind.Audio, i, "ChannelPositions"); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "SamplingRate"), numStyle, AppSettings.CInfo, out audioStream.SamplingRate); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "BitDepth"), numStyle, AppSettings.CInfo, out audioStream.BitDepth); audioStream.CompressionMode = Processing.mediaInfo.Get(StreamKind.Audio, i, "Compression_Mode"); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "Delay"), numStyle, AppSettings.CInfo, out audioStream.Delay); UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Audio, i, "StreamSize"), numStyle, AppSettings.CInfo, out audioStream.StreamSize); audioStream.EncodedLibrary = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library"); audioStream.EncodedLibraryName = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Name"); audioStream.EncodedLibraryVersion = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Version"); audioStream.EncodedLibraryDate = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library/Date"); audioStream.EncodedLibrarySettings = Processing.mediaInfo.Get(StreamKind.Audio, i, "Encoded_Library_Settings"); audioStream.LanguageFull = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String1"); audioStream.LanguageIso6391 = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String2"); audioStream.LanguageIso6392 = Processing.mediaInfo.Get(StreamKind.Audio, i, "Language/String3"); Audio.Add(audioStream); } #endregion #region Get Image Info for (int i = 0; i < _imageStreams; i++) { ImageStreamInfo imageStream = new ImageStreamInfo(); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "StreamKindID"), numStyle, AppSettings.CInfo, out imageStream.StreamKindID); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "ID"), numStyle, AppSettings.CInfo, out imageStream.ID); imageStream.Format = Processing.mediaInfo.Get(StreamKind.Image, i, "Format"); imageStream.CodecIDInfo = Processing.mediaInfo.Get(StreamKind.Image, i, "CodecID/Info"); UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Image, i, "StreamSize"), numStyle, AppSettings.CInfo, out imageStream.StreamSize); imageStream.LanguageFull = Processing.mediaInfo.Get(StreamKind.Image, i, "Language/String1"); imageStream.LanguageIso6392 = Processing.mediaInfo.Get(StreamKind.Image, i, "Language/String3"); Image.Add(imageStream); } #endregion #region Get Text Info for (int i = 0; i < _textStreams; i++) { TextStreamInfo textStream = new TextStreamInfo(); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "StreamKindID"), numStyle, AppSettings.CInfo, out textStream.StreamKindID); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "ID"), numStyle, AppSettings.CInfo, out textStream.ID); textStream.Format = Processing.mediaInfo.Get(StreamKind.Text, i, "Format"); textStream.CodecIDInfo = Processing.mediaInfo.Get(StreamKind.Text, i, "CodecID/Info"); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "Delay"), numStyle, AppSettings.CInfo, out textStream.Delay); UInt64.TryParse(Processing.mediaInfo.Get(StreamKind.Text, i, "StreamSize"), numStyle, AppSettings.CInfo, out textStream.StreamSize); textStream.LanguageFull = Processing.mediaInfo.Get(StreamKind.Text, i, "Language/String1"); textStream.LanguageIso6392 = Processing.mediaInfo.Get(StreamKind.Text, i, "Language/String3"); Text.Add(textStream); } #endregion #region Get Menu Info for (int i = 0; i < _menuCount; i++) { MenuStreamInfo menuStream = new MenuStreamInfo(); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, "Chapters_Pos_Begin"), numStyle, AppSettings.CInfo, out menuStream.ChaptersPosBegin); Int32.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, "Chapters_Pos_End"), numStyle, AppSettings.CInfo, out menuStream.ChaptersPosEnd); for (int j = menuStream.ChaptersPosBegin; j < menuStream.ChaptersPosEnd; j++) { DateTime tempTime; DateTime.TryParse(Processing.mediaInfo.Get(StreamKind.Menu, i, j, InfoKind.Name), AppSettings.CInfo, DateTimeStyles.AssumeLocal, out tempTime); Chapters.Add(tempTime.TimeOfDay); } } #endregion Processing.mediaInfo.Option("Complete"); Processing.mediaInfo.Close(); }
private static int WriteAudioStreamHeader(RiffWriter rw, AudioStreamInfo asi) { rw.BeginChunk (AviDemux.ID_strh); int offset = (int)rw.binaryWriter.Seek (0, SeekOrigin.Current); var bw = rw.binaryWriter; bw.Write (AviDemux.FCC_auds); bw.Write (asi.codecFourCC); bw.Write ((int)0); // dwFlags bw.Write ((short)0); // wPriority bw.Write ((short)0); // wLanguage bw.Write ((int)0); // dwInitialFrames bw.Write ((int)1); // dwScale bw.Write (asi.sampleRate); // dwRate @xxx This is true for PCM audio only! bw.Write ((int)0); // dwStart bw.Write (asi.sampleCount); // dwLength. will be written over later bw.Write ((int)0); // dwSuggestedBufferSize, not suggesting it bw.Write ((int)-1); // dwQuality = -1 meaning "default quality" bw.Write (asi.sampleSize); // dwSampleSize bw.Write ((long)0); // rcFrame rw.EndChunk (); return offset; }
private static void WriteAudioFormatHeader(RiffWriter rw, AudioStreamInfo asi) { rw.BeginChunk (AviDemux.ID_strf); var bw = rw.binaryWriter; bw.Write ((ushort)asi.audioFormat); // wFormatTag bw.Write ((short)asi.channels); // nChannels bw.Write (asi.sampleRate); // nSamplesPerSec bw.Write (asi.sampleRate * asi.sampleSize * asi.channels); // nAvgBytesPerSec @xxx true for PCM audio only, but this is a "soft" property bw.Write ((short)asi.sampleSize); // nBlockAlign bw.Write ((short)(8 * asi.sampleSize / asi.channels)); // wBitsPerSample bw.Write ((short)0); // cbSize. no extra coded info rw.EndChunk (); }
/// <summary> /// Constructor. It's always reated for a stream, so you need to provide info about it here. /// </summary> public AudioDecoderPCM(AudioStreamInfo streamInfo) { this.streamInfo = streamInfo; // can we decode this stream? if (streamInfo == null) { throw new ArgumentException ("Can't initialize stream decoder without proper AudioStreamInfo"); } if (streamInfo.audioFormat != FORMAT_UNCOMPRESSED && streamInfo.audioFormat != FORMAT_ALAW && streamInfo.audioFormat != FORMAT_ULAW) { throw new ArgumentException ("Unsupported PCM format=0x" + streamInfo.audioFormat.ToString ("X")); } int bytesPerChannelSample = streamInfo.sampleSize / streamInfo.channels; if (bytesPerChannelSample > 2) { throw new ArgumentException ("Only 8bit and 16bit_le audio is supported. " + (bytesPerChannelSample * 8) + "bits given"); } }