/// <summary> /// Starts the recorder. /// </summary> /// <param name="stream">The stream.</param> /// <param name="fileStream">The file stream.</param> /// <param name="sampleRate">The sample rate.</param> /// <returns>Task<System.Boolean>.</returns> public async Task<bool> StartRecorder(IAudioStream stream, Stream fileStream, int sampleRate) { if (_stream != null || stream == null) { return false; } _stream = stream; try { _writer = new BinaryWriter(fileStream, Encoding.UTF8); } catch (Exception) { return false; } _byteCount = 0; _stream.OnBroadcast += OnStreamBroadcast; var result = await _stream.Start(sampleRate); if (result) { _sampleRate = sampleRate; _bitsPerSample = stream.BitsPerSample; _channelCount = stream.ChannelCount; } return result; }
public ColorGenerator(IAudioStream audioStream, int numberOfColors = 3) : base(audioStream, numberOfColors) { if (numberOfColors < 1) { throw new ArgumentOutOfRangeException(); } var lowFreqMaxBucket = this.GetFrequencyBinIndex(LowFrequencyMax); var lowFreqBucket = new ColorGeneratorBucket(this.GetFrequencyBinIndex(LowFrequencyMin), lowFreqMaxBucket, this.IterationsPerSecond, hueShift: 0); this.generatorBuckets.Add(lowFreqBucket); var freqsPerBucket = (this.MaxFrequencyBinIndex - lowFreqMaxBucket) / (numberOfColors - 1); if (freqsPerBucket < 1) { throw new ArgumentOutOfRangeException("Not enough frequency bins for the remaining colors"); } for (int freqIndex = lowFreqMaxBucket + 1; freqIndex <= this.MaxFrequencyBinIndex - 1; freqIndex += freqsPerBucket) { ushort hueShift = (ushort)(ushort.MaxValue / numberOfColors * this.generatorBuckets.Count); this.generatorBuckets.Add( new ColorGeneratorBucket(freqIndex, Math.Min(freqIndex + freqsPerBucket - 1, this.MaxFrequencyBinIndex), this.IterationsPerSecond, hueShift: hueShift)); } }
public bool StartRecorder(IAudioStream stream, string fileName) { if (this.stream != null || stream == null) { return false; } this.stream = stream; try { //this.streamWriter = new StreamWriter(fileName, false); this.writer = new BinaryWriter(this.streamWriter.BaseStream, Encoding.UTF8); } catch (Exception) { return false; } this.byteCount = 0; this.stream.OnBroadcast += OnStreamBroadcast; if (this.stream.Start.CanExecute(this)) { this.stream.Start.Execute(this); return true; } return false; }
public bool StartRecorder(IAudioStream stream, string fileName) { if (this.stream != null || stream == null) { return false; } this.stream = stream; try { this.streamWriter = new StreamWriter(fileName, false); this.writer = new BinaryWriter(this.streamWriter.BaseStream, Encoding.UTF8); } catch (Exception) { return false; } this.byteCount = 0; this.stream.OnBroadcast += OnStreamBroadcast; this.stream.OnActiveChanged += StreamActiveChanged; if (!this.stream.Active) { this.stream.Start(); } return true; }
public override void Init() { RC.ClearColor = new float4(0, 0, 0, 1); Mesh = new Cube(); var sp = MoreShaders.GetDiffuseColorShader(RC); RC.SetShader(sp); _vColor = RC.GetShaderParam(sp, "color"); RC.SetShaderParam(_vColor, new float4(0.8f, 0.1f, 0.1f, 1)); // sound by http://www.soundjay.com _audio1 = Audio.Instance.LoadFile("Assets/beep.ogg"); // excerpt from "the final rewind" by tryad (http://www.tryad.org) - cc-by-sa _audio2 = Audio.Instance.LoadFile("Assets/music.ogg"); _state = 0; _testID = 1; _timeStep = 1.0f; _curTime = 2.0f; _tests = new Tests(); }
public bool Start(IAudioStream stream) { this.stream = stream; this.stream.OnBroadcast += HandleOnBroadcast; this.description = new AudioStreamBasicDescription (AudioFormatType.LinearPCM) { BitsPerChannel = stream.BitsPerSample / stream.ChannelCount, }; }
public override AudioBuffer CreateBuffer(IAudioStream target) { int size = AudioBuffer.DefaultBufferSpan * target.Frequency * target.Channels * target.BitsPerSample / 8; WaveFormatEx fmt = new WaveFormatEx(target.Format, target.Channels, target.Frequency, target.BitsPerSample); DS.DSBufferCapsFlags flags = DS.DSBufferCapsFlags.CtrlVolume | DS.DSBufferCapsFlags.LocDefer | DS.DSBufferCapsFlags.GlobalFocus | DS.DSBufferCapsFlags.GetCurrentPosition2; DS.DSBufferDesc desc = new DS.DSBufferDesc((uint)size, flags, &fmt, Guid.Empty); return new wAudioBuffer(this, ref desc) { _source = target, _owner = this }; }
public int Flow(IAudioStream input, short[] obuf, int count, int volLeft, int volRight) { var obufPos = 0; var inPos = 0; var oend = count; while (obufPos < oend) { // read enough input samples so that opos < 0 while (FRAC_ONE_LOW <= opos) { // Check if we have to refill the buffer if (inLen == 0) { inPos = 0; inLen = input.ReadBuffer(inBuf, RateHelper.IntermediateBufferSize); if (inLen <= 0) return obufPos / 2; } inLen -= (stereo ? 2 : 1); ilast0 = icur0; icur0 = inBuf[inPos++]; if (stereo) { ilast1 = icur1; icur1 = inBuf[inPos++]; } opos -= FRAC_ONE_LOW; } // Loop as long as the outpos trails behind, and as long as there is // still space in the output buffer. while (opos < FRAC_ONE_LOW && obufPos < oend) { // interpolate int out0, out1; out0 = (short)(ilast0 + (((icur0 - ilast0) * opos + FRAC_HALF_LOW) >> FRAC_BITS_LOW)); out1 = stereo ? (short)(ilast1 + (((icur1 - ilast1) * opos + FRAC_HALF_LOW) >> FRAC_BITS_LOW)) : out0; // output left channel RateHelper.ClampedAdd(ref obuf[obufPos + (reverseStereo ? 1 : 0)], (out0 * volLeft) / Mixer.MaxMixerVolume); // output right channel RateHelper.ClampedAdd(ref obuf[obufPos + (reverseStereo ? 0 : 1)], (out1 * volRight) / Mixer.MaxMixerVolume); obufPos += 2; // Increment output position opos += oposInc; } } return obufPos / 2; }
public bool Test1(IAudioStream audio, int state) { if (state == 0) Debug.WriteLine("Test #1: play, pause, stop and global stop"); if (state == 2) { Debug.WriteLine("----> play for five seconds"); audio.Play(); } if (state == 7) { Debug.WriteLine("----> pause music for two seconds"); audio.Pause(); } if (state == 9) { Debug.WriteLine("----> play music for five seconds"); audio.Play(); } if (state == 14) { Debug.WriteLine("----> stop music for two seconds"); audio.Stop(); } if (state == 16) { Debug.WriteLine("----> play music for five seconds"); audio.Play(); } if (state == 21) { Debug.WriteLine("----> global stop for two seconds"); Audio.Instance.Stop(); } if (state == 23) { Debug.WriteLine("----> play music for five seconds"); audio.Play(); } if (state != 28) return false; audio.Stop(); return true; }
public bool Test2(IAudioStream audio, int state) { if (state == 0) Debug.WriteLine("Test #2: lower global volume from 100 to 0"); if (state >= 2) { Audio.Instance.SetVolume(100 - ((state - 2)*20)); Debug.WriteLine("----> global volume set to: " + Audio.Instance.GetVolume()); audio.Play(); } return state == 7; }
public async Task StopRecorder() { if (this.stream != null) { this.stream.OnBroadcast -= OnStreamBroadcast; await this.stream.Stop(); } if (this.writer != null && this.writer.BaseStream.CanWrite) { this.WriteHeader(); this.writer.Dispose(); this.writer = null; } this.stream = null; }
public void StopRecorder() { if (this.stream != null) { this.stream.OnBroadcast -= OnStreamBroadcast; this.stream.OnActiveChanged -= StreamActiveChanged; } if (this.streamWriter != null && this.streamWriter.BaseStream.CanWrite) { this.WriteHeader(); this.streamWriter.Dispose(); this.streamWriter = null; } this.stream = null; }
public int Flow(IAudioStream input, short[] obuf, int count, int volLeft, int volRight) { int pos = 0; int oend = count * 2; while (pos < oend) { // read enough input samples so that opos >= 0 do { // Check if we have to refill the buffer if (inLen == 0) { inPtr = 0; inLen = input.ReadBuffer(inBuf, RateHelper.IntermediateBufferSize); if (inLen <= 0) return pos / 2; } inLen -= (stereo ? 2 : 1); opos--; if (opos >= 0) { inPtr += (stereo ? 2 : 1); } } while (opos >= 0); short out0, out1; out0 = inBuf[inPtr++]; out1 = (stereo ? inBuf[inPtr++] : out0); // Increment output position opos += oposInc; // output left channel RateHelper.ClampedAdd(ref obuf[reverseStereo ? 1 : 0], (out0 * (int)volLeft) / Mixer.MaxMixerVolume); // output right channel RateHelper.ClampedAdd(ref obuf[(reverseStereo ? 1 : 0) ^ 1], (out1 * (int)volRight) / Mixer.MaxMixerVolume); pos += 2; } return pos / 2; }
public async Task ConversionWithoutSpecificFormat() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IConversionResult result = await Conversion.Convert(Resources.MkvWithAudio, output).Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(9, mediaInfo.Duration.Seconds); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("h264", videoStream.Codec); Assert.Equal("aac", audioStream.Codec); }
public async Task AudioPopertiesTest() { IMediaInfo mediaInfo = await MediaInfo.Get(Resources.Mp3).ConfigureAwait(false); Assert.True(File.Exists(mediaInfo.FileInfo.FullName)); Assert.Equal(FileExtensions.Mp3, mediaInfo.FileInfo.Extension); Assert.Equal("audio.mp3", mediaInfo.FileInfo.Name); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); Assert.NotNull(audioStream); Assert.Equal("mp3", audioStream.Format); Assert.Equal(TimeSpan.FromSeconds(13), audioStream.Duration); Assert.Empty(mediaInfo.VideoStreams); Assert.Equal(TimeSpan.FromSeconds(13), mediaInfo.Duration); Assert.Equal(216916, mediaInfo.Size); }
public async Task ConversionWithoutSpecificFormat() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IConversionResult result = await Conversion.Convert(Resources.MkvWithAudio, output).Start().ConfigureAwait(false); Assert.True(result.Success); IMediaInfo mediaInfo = await MediaInfo.Get(output).ConfigureAwait(false); Assert.Equal(TimeSpan.FromSeconds(9), mediaInfo.Duration); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("h264", videoStream.Format); Assert.Equal("aac", audioStream.Format); }
public IAudioStream Play([NotNull] Uri uri) { if (uri.IsFile) { var filePath = Path.GetFullPath(uri.LocalPath); if (!File.Exists(filePath)) { return(null); } _stream = new BassFileStream(filePath); } else { _stream = new BassWebStream(uri); } return(_stream); }
public bool Test3(IAudioStream audio, int state) { if (state == 0) { Audio.Instance.SetVolume(100); audio.Volume = 0; Debug.WriteLine("Test #3: raise individual volume from 0 to 100"); } if (state >= 2) { audio.Volume = ((state - 2)*20); Debug.WriteLine("----> individual volume set to: " + audio.Volume); audio.Play(); } return (state == 7); }
public bool Test3(IAudioStream audio, int state) { if (state == 0) { Audio.Instance.SetVolume(100); audio.Volume = 0; Debug.WriteLine("Test #3: raise individual volume from 0 to 100"); } if (state >= 2) { audio.Volume = ((state - 2) * 20); Debug.WriteLine("----> individual volume set to: " + audio.Volume); audio.Play(); } return(state == 7); }
public async Task OverwriteFilesExceptionTest() { string output = _storageFixture.GetTempFileName(FileExtensions.Mp4); IMediaInfo info = await FFmpeg.GetMediaInfo(Resources.MkvWithAudio); IAudioStream audioStream = info.AudioStreams.First()?.SetCodec(AudioCodec.ac3); IConversionResult conversionResult = await FFmpeg.Conversions.New() .AddStream(audioStream) .SetOutput(output) .Start(); Assert.Contains("-n ", conversionResult.Arguments); await Assert.ThrowsAsync <ConversionException>(() => FFmpeg.Conversions.New() .AddStream(audioStream) .SetOutput(output) .Start()); }
public ResamplingStream(IAudioStream sourceStream, ResamplingQuality quality) : base(sourceStream) { if (!(sourceStream.Properties.Format == AudioFormat.IEEE && sourceStream.Properties.BitDepth == 32)) { throw new ArgumentException("unsupported source format: " + sourceStream.Properties); } properties = new AudioProperties(sourceStream.Properties.Channels, sourceStream.Properties.SampleRate, sourceStream.Properties.BitDepth, sourceStream.Properties.Format); this.quality = quality; SetupResampler(); sourceBuffer = new ByteBuffer(); TargetSampleRate = properties.SampleRate; position = 0; }
public bool Test5(IAudioStream audio, int state) { if (state == 0) { Debug.WriteLine("Test #5: looping for 2 seconds with loop as parameter"); } if (state == 2) { audio.Play(true); } if (state != 4) { return(false); } audio.Loop = false; return(true); }
public async Task OverwriteFilesExceptionTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await MediaInfo.Get(Resources.MkvWithAudio); IAudioStream audioStream = info.AudioStreams.First()?.SetCodec(AudioCodec.Ac3); IConversionResult conversionResult = await Conversion.New() .AddStream(audioStream) .SetOutput(output) .Start(); Assert.True(conversionResult.Success); Assert.Contains("-n ", conversionResult.Arguments); await Assert.ThrowsAsync <ConversionException>(() => Conversion.New() .AddStream(audioStream) .SetOutput(output) .Start()); }
public void StopRecorder() { if (this.stream != null) { this.stream.OnBroadcast -= OnStreamBroadcast; if (this.stream.Stop.CanExecute(this)) { this.stream.Stop.Execute(this); } } if (this.streamWriter != null && this.streamWriter.BaseStream.CanWrite) { this.WriteHeader(); this.streamWriter.Dispose(); this.streamWriter = null; } this.stream = null; }
public async Task AudioPopertiesTest() { IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(Resources.Mp3); Assert.True(File.Exists(mediaInfo.Path)); Assert.Equal(FileExtensions.Mp3, Path.GetExtension(mediaInfo.Path)); Assert.EndsWith("audio.mp3", mediaInfo.Path); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); Assert.NotNull(audioStream); Assert.Equal("mp3", audioStream.Codec); Assert.Equal(13, audioStream.Duration.Seconds); Assert.Empty(mediaInfo.VideoStreams); Assert.Equal(13, mediaInfo.Duration.Seconds); Assert.Equal(216916, mediaInfo.Size); }
public async Task SetAudioBitrateTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await FFmpeg.GetMediaInfo(Resources.MkvWithAudio); IAudioStream audioStream = info.AudioStreams.First()?.SetCodec(AudioCodec.ac3); IConversionResult conversionResult = await FFmpeg.Conversions.New() .AddStream(audioStream) .SetAudioBitrate(128000) .SetOutput(output) .Start(); double lowerBound = 128000 * 0.95; double upperBound = 128000 * 1.05; IMediaInfo resultFile = await FFmpeg.GetMediaInfo(output); Assert.InRange <double>(resultFile.AudioStreams.First().Bitrate, lowerBound, upperBound); }
private void TargetChanged(IAudioSource newTarget) { if (_targetSource == newTarget && _targetSource != null) { return; } Close(); if ((_targetSource = newTarget) == null) { return; } if ((_targetStream = _targetSource.CreateStream()) == null) { return; } //Create provider if (_provider == null) { _provider = AudioProvider.Create(null); _provider.Attach(this); } chkLoop.Checked = false; chkLoop.Enabled = _targetStream.IsLooping; //Create buffer for stream _buffer = _provider.CreateBuffer(_targetStream); _sampleTime = new DateTime((long)_targetStream.Samples * 10000000 / _targetStream.Frequency); trackBar1.Value = 0; trackBar1.TickStyle = TickStyle.None; trackBar1.Maximum = _targetStream.Samples; trackBar1.TickFrequency = _targetStream.Samples / 8; trackBar1.TickStyle = TickStyle.BottomRight; UpdateTimeDisplay(); btnPlay.Enabled = true; }
static void Process(Dictionary <string, double> mapping, DirectoryInfo indir, DirectoryInfo outdir) { Dictionary <FileInfo, double> fileMapping = new Dictionary <FileInfo, double>(); foreach (string fileNamePattern in mapping.Keys) { double factor = mapping[fileNamePattern]; foreach (FileInfo fileInfo in indir.EnumerateFiles(fileNamePattern)) { fileMapping.Add(fileInfo, factor); } } Parallel.ForEach <FileInfo>(fileMapping.Keys, (fileInfo) => { double factor = fileMapping[fileInfo]; FileInfo outputFileInfo = new FileInfo(Path.Combine(outdir.FullName, fileInfo.Name)); if (outputFileInfo.Exists) { Console.WriteLine(fileInfo.Name + " SKIP (file already existing)"); return; } Console.WriteLine(fileInfo.Name); try { IAudioStream inputStream = AudioStreamFactory.FromFileInfoIeee32(fileInfo); IAudioStream resamplingStream = new ResamplingStream(inputStream, ResamplingQuality.VeryHigh, factor); MixerStream sampleRateResetStream = new MixerStream(resamplingStream.Properties.Channels, inputStream.Properties.SampleRate); sampleRateResetStream.Add(resamplingStream); IAudioStream outputStream = sampleRateResetStream; AudioStreamFactory.WriteToFile(outputStream, outputFileInfo.FullName); } catch (Exception e) { Console.WriteLine("Error processing " + fileInfo.Name + ": " + e.Message); } }); }
public int Flow(IAudioStream input, short[] obuf, int count, int volLeft, int volRight) { Debug.Assert(input.IsStereo == stereo); var osamp = count / 2; if (stereo) { osamp *= 2; } // Reallocate temp buffer, if necessary if (osamp > _bufferSize) { _buffer = new short[osamp]; _bufferSize = osamp; } // Read up to 'osamp' samples into our temporary buffer var len = input.ReadBuffer(_buffer, _bufferSize); int iPos = 0; var oPos = 0; var inc = stereo ? 2 : 1; // Mix the data into the output buffer for (; iPos < len; iPos += inc) { var out0 = _buffer[iPos]; var out1 = stereo ? _buffer[iPos + 1] : out0; // output left channel RateHelper.ClampedAdd(ref obuf[oPos + (reverseStereo ? 1 : 0)], (out0 * volLeft) / Mixer.MaxMixerVolume); // output right channel RateHelper.ClampedAdd(ref obuf[oPos + (reverseStereo ? 0 : 1)], (out1 * volRight) / Mixer.MaxMixerVolume); oPos += 2; } return(oPos / 2); }
public async Task ToOgvTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Ogv); IConversionResult result = await(await FFmpeg.Conversions.FromSnippet.ToOgv(Resources.MkvWithAudio, output)) .Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(9, mediaInfo.Duration.Seconds); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("theora", videoStream.Codec); Assert.Equal("vorbis", audioStream.Codec); }
public async Task BasicConversion_InputFileWithSubtitles_SkipSubtitles() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IConversionResult result = await(await FFmpeg.Conversions.FromSnippet.Convert(Resources.MkvWithSubtitles, output)).Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(TimeSpan.FromSeconds(9), mediaInfo.Duration); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("h264", videoStream.Codec); Assert.Equal("aac", audioStream.Codec); Assert.Empty(mediaInfo.SubtitleStreams); }
public override void Dispose() { if (_audioSource != null) { _audioSource.Close(); } if (_stream != null) { _stream.Dispose(); _stream = null; } if (_streamBuffer != null) { _streamBuffer.Dispose(); _streamBuffer = null; } base.Dispose(); }
public async Task WatermarkTest() { string output = Path.ChangeExtension(Path.GetTempFileName(), FileExtensions.Mp4); IConversionResult result = await Conversion.SetWatermark(Resources.Mp4WithAudio, output, Resources.PngSample, Position.Center) .Start().ConfigureAwait(false); Assert.True(result.Success); Assert.Contains("overlay=", result.Arguments); Assert.Contains(Resources.Mp4WithAudio, result.Arguments); IMediaInfo mediaInfo = await MediaInfo.Get(output).ConfigureAwait(false); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("aac", audioStream.Format); Assert.Equal("h264", videoStream.Format); }
public async Task ToTsTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Ts); IConversionResult result = await Conversion.ToTs(Resources.Mp4WithAudio, output) .Start(); Assert.True(result.Success); IMediaInfo mediaInfo = await MediaInfo.Get(output); Assert.Equal(TimeSpan.FromSeconds(13), mediaInfo.Duration); Assert.Equal(1, mediaInfo.VideoStreams.Count()); Assert.Equal(1, mediaInfo.AudioStreams.Count()); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("mpeg2video", videoStream.Format); Assert.Equal("mp2", audioStream.Format); }
public bool Test4(IAudioStream audio, int state) { if (state == 0) { Debug.WriteLine("Test #4: looping for 2 seconds with loop as attribute"); } if (state == 2) { audio.Loop = true; audio.Play(); } if (state != 4) { return(false); } audio.Loop = false; return(true); }
public int Load(string medium, bool loop = false, bool prescan = false, EAudioEffect effect = EAudioEffect.None) { if (!_Initialized) { return(-1); } IAudioStream stream = _CreateStream(_NextID++, medium, loop, effect); if (stream.Open(prescan)) { lock (_Streams) { stream.Volume = 1f; stream.VolumeMax = _GlobalVolume; stream.SetOnCloseListener(this); _Streams.Add(stream); return(stream.ID); } } return(-1); }
public async Task BasicConversion_InputFileWithSubtitles_SkipSubtitles() { string output = _storageFixture.GetTempFileName(FileExtensions.Mp4); IConversionResult result = await(await FFmpeg.Conversions.FromSnippet.Convert(Resources.MkvWithSubtitles, output)).Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(9, mediaInfo.Duration.Seconds); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("h264", videoStream.Codec); Assert.Equal("aac", audioStream.Codec); Assert.Empty(mediaInfo.SubtitleStreams); }
public async Task ToTsTest() { string output = _storageFixture.GetTempFileName(FileExtensions.Ts); IConversionResult result = await(await FFmpeg.Conversions.FromSnippet.ToTs(Resources.Mp4WithAudio, output)) .Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(13, mediaInfo.Duration.Seconds); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("mpeg2video", videoStream.Codec); Assert.Equal("mp2", audioStream.Codec); }
public AudioTrack(FileInfo[] fileInfos, bool initialize) : base(fileInfos) { this.TimeWarps = new TimeWarpCollection(); if (initialize) { using (IAudioStream stream = AudioStreamFactory.FromFileInfo(FileInfo)) { sourceProperties = stream.Properties; if (MultiFile) { // For multi-file tracks, we need to get a concatenated stream of all files for the length InitializeLength(); } else { // Single-file tracks can just reuse this stream to get the length InitializeLength(stream); } } } }
public async Task SetAudioBitrateTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await MediaInfo.Get(Resources.MkvWithAudio).ConfigureAwait(false); IAudioStream audioStream = info.AudioStreams.First()?.SetCodec(AudioCodec.Ac3); IConversionResult conversionResult = await Conversion.New() .AddStream(audioStream) .SetAudioBitrate("128K") .SetOutput(output) .Start().ConfigureAwait(false); double lowerBound = 128000 * 0.95; double upperBound = 128000 * 1.05; Assert.True(conversionResult.Success); IMediaInfo resultFile = conversionResult.MediaInfo.Value; Assert.InRange <double>(resultFile.AudioStreams.First().Bitrate, lowerBound, upperBound); }
/// <summary> /// Searches for a stream of a given type in a hierarchy of nested streams. /// </summary> /// <typeparam name="T">the type of the stream to search for</typeparam> /// <param name="stream">a stream that may envelop a hierarchy of streams</param> /// <returns>the stream of the given type if found, else null</returns> public static T FindStream <T>(this IAudioStream stream) { FieldInfo fieldInfo = typeof(AbstractAudioStreamWrapper) .GetField("sourceStream", BindingFlags.Instance | BindingFlags.NonPublic); while (true) { if (stream is T) { return((T)stream); } else if (stream is AbstractAudioStreamWrapper) { stream = (IAudioStream)fieldInfo.GetValue(stream); } else { return(default(T)); } } }
public async Task SetOutputTimeTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await MediaInfo.Get(Resources.MkvWithAudio).ConfigureAwait(false); IAudioStream audioStream = info.AudioStreams.First(); IVideoStream videoStream = info.VideoStreams.First(); IConversionResult conversionResult = await Conversion.New() .AddStream(videoStream) .AddStream(audioStream) .SetOutputTime(TimeSpan.FromSeconds(5)) .SetOutput(output) .Start().ConfigureAwait(false); Assert.True(conversionResult.Success); IMediaInfo resultFile = conversionResult.MediaInfo.Value; Assert.Equal(TimeSpan.FromSeconds(5), resultFile.AudioStreams.First().Duration); Assert.Equal(TimeSpan.FromSeconds(5), resultFile.VideoStreams.First().Duration); }
public async Task SetInputTimeTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Mp4); IMediaInfo info = await FFmpeg.GetMediaInfo(Resources.MkvWithAudio); IAudioStream audioStream = info.AudioStreams.First(); IVideoStream videoStream = info.VideoStreams.First(); IConversionResult conversionResult = await FFmpeg.Conversions.New() .AddStream(videoStream) .AddStream(audioStream) .SetInputTime(TimeSpan.FromSeconds(5)) .SetOutput(output) .Start(); IMediaInfo resultFile = await FFmpeg.GetMediaInfo(output); Assert.Equal(5, resultFile.AudioStreams.First().Duration.Seconds); Assert.Equal(5, resultFile.VideoStreams.First().Duration.Seconds); }
public static void ToFile(IAudioStream source, string path, int samplePosition = 0, int maxSampleCount = int.MaxValue) { int sampleCount = Math.Min(maxSampleCount, (source.Samples - samplePosition)); using (FileStream stream = new FileStream(path, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None, 8, FileOptions.SequentialScan)) { //Estimate size int outLen = 44 + (sampleCount * source.Channels * 2); //Create file map stream.SetLength(outLen); using (FileMap map = FileMap.FromStreamInternal(stream, FileMapProtect.ReadWrite, 0, outLen)) { RIFFHeader *riff = (RIFFHeader *)map.Address; * riff = new RIFFHeader(1, source.Channels, 16, source.Frequency, sampleCount); source.SamplePosition = samplePosition; source.ReadSamples(map.Address + 44, sampleCount); } } }
public async Task ToTsTest() { string output = Path.Combine(Path.GetTempPath(), Guid.NewGuid() + FileExtensions.Ts); IConversionResult result = await(await FFmpeg.Conversions.FromSnippet.ToTs(Resources.Mp4WithAudio, output)) .Start(); IMediaInfo mediaInfo = await FFmpeg.GetMediaInfo(output); Assert.Equal(TimeSpan.FromSeconds(13), mediaInfo.Duration); Assert.Single(mediaInfo.VideoStreams); Assert.Single(mediaInfo.AudioStreams); IAudioStream audioStream = mediaInfo.AudioStreams.First(); IVideoStream videoStream = mediaInfo.VideoStreams.First(); Assert.NotNull(videoStream); Assert.NotNull(audioStream); Assert.Equal("mpeg2video", videoStream.Codec); Assert.Equal("mp2", audioStream.Codec); }
public SoundHandle PlayStream(SoundType type, IAudioStream stream, int id = -1, int volume = 255, int balance = 0, bool autofreeStream = true, bool permanent = false, bool reverseStereo = false) { lock (_gate) { if (stream == null) { // Console.Error.WriteLine("stream is null"); return new SoundHandle(); } Debug.Assert(IsReady); // Prevent duplicate sounds if (id != -1) { for (var i = 0; i != NumChannels; i++) if (_channels[i] != null && _channels[i].Id == id) { // Delete the stream if were asked to auto-dispose it. // Note: This could cause trouble if the client code does not // yet expect the stream to be gone. The primary example to // keep in mind here is QueuingAudioStream. // Thus, as a quick rule of thumb, you should never, ever, // try to play QueuingAudioStreams with a sound id. if (autofreeStream) stream.Dispose(); return new SoundHandle(); } } // Create the channel var chan = new Channel(this, type, stream, autofreeStream, reverseStereo, id, permanent) { Volume = volume, Balance = balance }; return InsertChannel(chan); } }
public int Flow(IAudioStream input, short[] obuf, int count, int volLeft, int volRight) { Debug.Assert(input.IsStereo == stereo); var osamp = count / 2; if (stereo) osamp *= 2; // Reallocate temp buffer, if necessary if (osamp > _bufferSize) { _buffer = new short[osamp]; _bufferSize = osamp; } // Read up to 'osamp' samples into our temporary buffer var len = input.ReadBuffer(_buffer, _bufferSize); int iPos = 0; var oPos = 0; var inc = stereo ? 2 : 1; // Mix the data into the output buffer for (; iPos < len; iPos += inc) { var out0 = _buffer[iPos]; var out1 = stereo ? _buffer[iPos + 1] : out0; // output left channel RateHelper.ClampedAdd(ref obuf[oPos + (reverseStereo ? 1 : 0)], (out0 * volLeft) / Mixer.MaxMixerVolume); // output right channel RateHelper.ClampedAdd(ref obuf[oPos + (reverseStereo ? 0 : 1)], (out1 * volRight) / Mixer.MaxMixerVolume); oPos += 2; } return oPos / 2; }
public async Task<bool> StartRecorder(IAudioStream stream, Stream fileStream, int sampleRate) { if (this.stream != null || stream == null) { return false; } this.stream = stream; try { this.writer = new BinaryWriter(fileStream, Encoding.UTF8); } catch (Exception) { return false; } this.byteCount = 0; this.stream.OnBroadcast += OnStreamBroadcast; return await this.stream.Start(sampleRate); }
// constructor public RollingCube(Level curLevel) { _curLevel = curLevel; _cubeMesh = _curLevel.GlobalCubeMesh; _cubeMoveSound = Audio.Instance.LoadFile("Assets/cube.ogg"); _cubeMoveSound.Volume = 5; _cubeColor = new float3(1, 0.1f, 0.1f); PosCurXY = new int[2]; PosLastXY = new int[2]; _rotateYX = new float[2]; _curDirXY = new int[2]; _orientQuat = Quaternion.Identity; _posZ = 2; _veloZ = 0.0f; _curBright = 0.0f; ResetCube(0, 0); }
private void Stop() { isStopped = true; isPlaying = false; isPaused = false; if (null == audio) return; audio.Stop(); audio = null; int i = 0; while ((null != audioStreamThread) && (audioStreamThread.IsBusy)) { Thread.Sleep(10); i++; if (i > 100) throw new Exception("Track did not stop after 1 second."); } }
private void PlayUnsuccessfulSound() { string badSoundFileName = Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "badsound.mp3"); if (!fileSystem.FileExists(badSoundFileName)) return; if (isPaused) wasPausedBeforeBadSound = true; if (isPlaying) PlayPause(); badSoundAudio = audioStreamFactory.NewAudioStream(); badSoundAudio.Open(badSoundFileName); IBackgroundWorkerWrapper audioStreamThread = backgroundWorkerFactory.NewBackgroundWorker(); audioStreamThread.DoWork += PlayUnsuccessfulSound; audioStreamThread.RunWorkerCompleted += UnsuccessfulSoundHasStopped; audioStreamThread.RunWorkerAsync(); }
private void Play(string path) { if (!isStopped) Stop(); audio = audioStreamFactory.NewAudioStream(); audio.ConstantUpdateEvent += audio_ConstantUpdateEvent; audio.Open(path); audioStreamThread = backgroundWorkerFactory.NewBackgroundWorker(); audioStreamThread.DoWork += PlayInNewThread; audioStreamThread.RunWorkerCompleted += AudioStreamHasStopped; audioStreamThread.RunWorkerAsync(); isPlaying = true; isPaused = false; isStopped = false; }
private static eAudioDualMonoMode GetDualMonoMode(IAudioStream[] streams, int currentIndex, ref int priority, ref int idxStreamIndexmpeg, ref string mpegBasedOnLang) { eAudioDualMonoMode dualMonoMode = eAudioDualMonoMode.UNSUPPORTED; string leftAudioLang = streams[currentIndex].Language.Substring(0, 3); string rightAudioLang = streams[currentIndex].Language.Substring(3, 3); int indexLeft = _preferredLanguages.IndexOf(leftAudioLang); if (indexLeft >= 0 && indexLeft < priority) { dualMonoMode = eAudioDualMonoMode.LEFT_MONO; mpegBasedOnLang = leftAudioLang; idxStreamIndexmpeg = currentIndex; priority = indexLeft; } int indexRight = _preferredLanguages.IndexOf(rightAudioLang); if (indexRight >= 0 && indexRight < priority) { dualMonoMode = eAudioDualMonoMode.RIGHT_MONO; mpegBasedOnLang = rightAudioLang; idxStreamIndexmpeg = currentIndex; priority = indexRight; } return dualMonoMode; }
private static int GetFirstMpegIndex(IAudioStream[] streams) { int idxFirstMpeg = -1; for (int i = 0; i < streams.Length; i++) { if (!IsStreamAC3(streams[i])) { idxFirstMpeg = i; break; } } return idxFirstMpeg; }
private static bool IsStreamAC3(IAudioStream stream) { return (stream.StreamType == AudioStreamType.AC3 || stream.StreamType == AudioStreamType.EAC3); }
private static void UpdateAudioStreamIndexesBasedOnLang(IAudioStream[] streams, int i, ref int idxStreamIndexmpeg, ref int idxStreamIndexAc3, ref string mpegBasedOnLang, ref int idxLangPriAc3, ref int idxLangPrimpeg, ref string ac3BasedOnLang) { int langPriority = _preferredLanguages.IndexOf(streams[i].Language); string langSel = streams[i].Language; Log.Debug("Stream {0} lang {1}, lang priority index {2}", i, langSel, langPriority); // is the stream language preferred? if (langPriority >= 0) { // has the stream a higher priority than an old one or is this the first AC3 stream with lang pri (idxLangPriAc3 == -1) (AC3) bool isAC3 = IsStreamAC3(streams[i]); if (isAC3) { if (idxLangPriAc3 == -1 || langPriority < idxLangPriAc3) { Log.Debug("Setting AC3 pref"); idxStreamIndexAc3 = i; idxLangPriAc3 = langPriority; ac3BasedOnLang = langSel; } } else //not AC3 { // has the stream a higher priority than an old one or is this the first mpeg stream with lang pri (idxLangPrimpeg == -1) (mpeg) if (idxLangPrimpeg == -1 || langPriority < idxLangPrimpeg) { Log.Debug("Setting mpeg pref"); idxStreamIndexmpeg = i; idxLangPrimpeg = langPriority; mpegBasedOnLang = langSel; } } } }
private static void UpdateAudioStreamIndexesAndPrioritiesBasedOnLanguage(IAudioStream[] streams, int priority, ref int idxStreamIndexmpeg, ref string mpegBasedOnLang, ref int idxStreamIndexAc3, int idxLangPriAc3, int idxLangPrimpeg, ref string ac3BasedOnLang, out eAudioDualMonoMode dualMonoMode) { dualMonoMode = eAudioDualMonoMode.UNSUPPORTED; if (IsPreferredAudioLanguageAvailable()) { for (int i = 0; i < streams.Length; i++) { //now find the ones based on LANG prefs. if (ShouldApplyDualMonoMode(streams[i].Language)) { dualMonoMode = GetDualMonoMode(streams, i, ref priority, ref idxStreamIndexmpeg, ref mpegBasedOnLang); if (dualMonoMode != eAudioDualMonoMode.UNSUPPORTED) { break; } } else { // lower value means higher priority UpdateAudioStreamIndexesBasedOnLang(streams, i, ref idxStreamIndexmpeg, ref idxStreamIndexAc3, ref mpegBasedOnLang, ref idxLangPriAc3, ref idxLangPrimpeg, ref ac3BasedOnLang); } } } }
/// <summary> /// Initializes a new instance of the <see cref="AudioStreamParsedEventArgs"/> class. /// </summary> /// <param name="audioStream">The audio stream.</param> public AudioStreamParsedEventArgs(IAudioStream audioStream) { AudioStream = audioStream; }
private bool LoadAudio(string path) { DisposeSource(); //Get audio stream _sourceStream = WAV.FromFile(path); _audioSource = path; //Create buffer for stream _buffer = _provider.CreateBuffer(_sourceStream); _buffer.Loop = chkLoop.Checked; //Set controls _sampleTime = new DateTime((long)_sourceStream.Samples * 10000000 / _sourceStream.Frequency); txtPath.Text = path; lblFrequency.Text = String.Format("{0} Hz", _sourceStream.Frequency); lblSamples.Text = String.Format("{0}", _sourceStream.Samples); customTrackBar1.Value = 0; customTrackBar1.TickStyle = TickStyle.None; customTrackBar1.Maximum = _sourceStream.Samples; customTrackBar1.TickFrequency = _sourceStream.Samples / 8; customTrackBar1.TickStyle = TickStyle.BottomRight; numLoopStart.Value = 0; numLoopStart.Maximum = numLoopEnd.Maximum = _sourceStream.Samples; numLoopEnd.Value = _sourceStream.Samples; pnlLoopStart.Width = 0; pnlLoopEnd.Width = 0; btnOkay.Enabled = true; if (_type == 0) chkLoopEnable.Checked = true; UpdateTimeDisplay(); return true; }
public void LoadAudioStream(IAudioStream audioStream) { if (audioPlayer != null) { currentTrackSubmitted = false; currentTrackStartTime = DateTime.MinValue; audioPlayer.LoadAudioStream(audioStream); } }