static void PlayPCM(XAudio2 device, string fileName) { //WaveStream stream = new WaveStream(fileName); var s = System.IO.File.OpenRead(fileName); WaveStream stream = new WaveStream(s); s.Close(); AudioBuffer buffer = new AudioBuffer(); buffer.AudioData = stream; buffer.AudioBytes = (int)stream.Length; buffer.Flags = BufferFlags.EndOfStream; SourceVoice sourceVoice = new SourceVoice(device, stream.Format); sourceVoice.SubmitSourceBuffer(buffer); sourceVoice.Start(); // loop until the sound is done playing while (sourceVoice.State.BuffersQueued > 0) { if (GetAsyncKeyState(VK_ESCAPE) != 0) break; Thread.Sleep(10); } // wait until the escape key is released while (GetAsyncKeyState(VK_ESCAPE) != 0) Thread.Sleep(10); // cleanup the voice buffer.Dispose(); sourceVoice.Dispose(); stream.Dispose(); }
/// <summary> /// Add a new input to the mixer /// </summary> /// <param name="waveStream">The wave input to add</param> public void AddInputStream(WaveStream waveStream) { if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Must be IEEE floating point", "waveStream"); if (waveStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit audio currently supported", "waveStream"); if (inputStreams.Count == 0) { // first one - set the format int sampleRate = waveStream.WaveFormat.SampleRate; int channels = waveStream.WaveFormat.Channels; this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); } else { if (!waveStream.WaveFormat.Equals(waveFormat)) throw new ArgumentException("All incoming channels must have the same format", "waveStream"); } lock (inputsLock) { this.inputStreams.Add(waveStream); this.length = Math.Max(this.length, waveStream.Length); // get to the right point in this input file waveStream.Position = Position; } }
private void SeekToSecondInCaseIfRequired(double startAtSecond, WaveStream stream) { if (startAtSecond > 0) { stream.CurrentTime = stream.CurrentTime.Add(TimeSpan.FromSeconds(startAtSecond)); } }
/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; this.inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; this.dmoResampler = new DmoResampler(); if (!dmoResampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", "inputStream"); } dmoResampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat); if (!dmoResampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", "outputStream"); } dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
public int PlayFile(string filename) { waveOutDevice = new WaveOut(); mainOutputStream = CreateInputStream(filename); waveOutDevice.Init(mainOutputStream); waveOutDevice.Play(); return 0; }
private void SeekToSecondInCaseIfRequired(int startAtSecond, WaveStream stream) { if (startAtSecond > 0) { int actualSampleRate = stream.WaveFormat.SampleRate; int bitsPerSample = stream.WaveFormat.BitsPerSample; stream.Seek(actualSampleRate * bitsPerSample / 8 * startAtSecond, System.IO.SeekOrigin.Begin); } }
/// <summary> /// Create a new simple compressor stream /// </summary> /// <param name="sourceStream">Source stream</param> public SimpleCompressorStream(WaveStream sourceStream) { this.sourceStream = sourceStream; this.channels = sourceStream.WaveFormat.Channels; this.bytesPerSample = sourceStream.WaveFormat.BitsPerSample / 8; simpleCompressor = new SimpleCompressor(5.0, 10.0, sourceStream.WaveFormat.SampleRate); simpleCompressor.Threshold = 16; simpleCompressor.Ratio = 6; simpleCompressor.MakeUpGain = 16; }
/// <summary> /// Initializes a new instance of the <see cref="Wave"/> class, loading a wave file and creating an audio buffer from it. /// </summary> /// <param name="path">The path to the wave file.</param> public Wave(string path) { Contract.Requires<ArgumentException>(System.IO.File.Exists(path),"Parameter path must match an existing file"); Data = new WaveStream(path); Buffer = new AudioBuffer() { AudioData = Data, AudioBytes = (int)Data.Length, Flags = BufferFlags.EndOfStream }; }
/// <summary> /// Creates a new Wave32To16Stream /// </summary> /// <param name="sourceStream">the source stream</param> public Wave32To16Stream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat) throw new ArgumentException("Only 32 bit Floating point supported"); if (sourceStream.WaveFormat.BitsPerSample != 32) throw new ArgumentException("Only 32 bit Floating point supported"); waveFormat = new WaveFormat(sourceStream.WaveFormat.SampleRate, 16, sourceStream.WaveFormat.Channels); this.volume = 1.0f; this.sourceStream = sourceStream; length = sourceStream.Length / 2; position = sourceStream.Position / 2; }
/// <summary> /// Creates a new WaveOffsetStream /// </summary> /// <param name="sourceStream">the source stream</param> /// <param name="startTime">the time at which we should start reading from the source stream</param> /// <param name="sourceOffset">amount to trim off the front of the source stream</param> /// <param name="sourceLength">length of time to play from source stream</param> public WaveOffsetStream(WaveStream sourceStream, TimeSpan startTime, TimeSpan sourceOffset, TimeSpan sourceLength) { if (sourceStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm) throw new ArgumentException("Only PCM supported"); // TODO: add support for IEEE float + perhaps some others - // anything with a fixed bytes per sample this.sourceStream = sourceStream; position = 0; bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; this.StartTime = startTime; this.SourceOffset = sourceOffset; this.SourceLength = sourceLength; }
/// <summary> /// Initializes a new WaveOutAudioSource class. /// </summary> /// <param name="waveStream">The WaveStream.</param> internal WaveOutAudioSource(WaveStream waveStream) { waveStream.Seek(0, SeekOrigin.Begin); WaveData = new byte[waveStream.Length]; waveStream.Read(WaveData, 0, WaveData.Length); waveStream.Close(); WaveFormat = waveStream.Format; if (WaveFormat.Channels == 1) //try to convert to stereo for full audiomixer support { try { new MonoToStereoConverter().ConvertAudioData(WaveData, ref WaveFormat); } catch (NotSupportedException) { } } }
/// <summary> /// Creates a stream that can convert to PCM /// </summary> /// <param name="sourceStream">The source stream</param> /// <returns>A PCM stream</returns> public static WaveStream CreatePcmStream(WaveStream sourceStream) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.Pcm) { return sourceStream; } WaveFormat pcmFormat = AcmStream.SuggestPcmFormat(sourceStream.WaveFormat); if (pcmFormat.SampleRate < 8000) { if (sourceStream.WaveFormat.Encoding == WaveFormatEncoding.G723) { pcmFormat = new WaveFormat(8000, 16, 1); } else { throw new InvalidOperationException("Invalid suggested output format, please explicitly provide a target format"); } } return new WaveFormatConversionStream(pcmFormat, sourceStream); }
/// <summary> /// Creates a new WaveChannel32 /// </summary> /// <param name="sourceStream">the source stream</param> /// <param name="volume">stream volume (1 is 0dB)</param> /// <param name="pan">pan control (-1 to 1)</param> public WaveChannel32(WaveStream sourceStream, float volume, float pan) { PadWithZeroes = true; var providers = new ISampleChunkConverter[] { new Mono8SampleChunkConverter(), new Stereo8SampleChunkConverter(), new Mono16SampleChunkConverter(), new Stereo16SampleChunkConverter(), new Mono24SampleChunkConverter(), new Stereo24SampleChunkConverter(), new MonoFloatSampleChunkConverter(), new StereoFloatSampleChunkConverter(), }; foreach (var provider in providers) { if (provider.Supports(sourceStream.WaveFormat)) { this.sampleProvider = provider; break; } } if (this.sampleProvider == null) { throw new ArgumentException("Unsupported sourceStream format"); } // always outputs stereo 32 bit waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sourceStream.WaveFormat.SampleRate, 2); destBytesPerSample = 8; // includes stereo factoring this.sourceStream = sourceStream; this.volume = volume; this.pan = pan; sourceBytesPerSample = sourceStream.WaveFormat.Channels * sourceStream.WaveFormat.BitsPerSample / 8; length = SourceToDest(sourceStream.Length); position = 0; }
/// <summary> /// Create a new WaveFormat conversion stream /// </summary> /// <param name="targetFormat">Desired output format</param> /// <param name="sourceStream">Source stream</param> public WaveFormatConversionStream(WaveFormat targetFormat, WaveStream sourceStream) { this.sourceStream = sourceStream; this.targetFormat = targetFormat; conversionStream = new AcmStream(sourceStream.WaveFormat, targetFormat); /*try { // work out how many bytes the entire input stream will convert to length = conversionStream.SourceToDest((int)sourceStream.Length); } catch { Dispose(); throw; }*/ length = EstimateSourceToDest((int)sourceStream.Length); position = 0; preferredSourceReadSize = Math.Min(sourceStream.WaveFormat.AverageBytesPerSecond, conversionStream.SourceBuffer.Length); preferredSourceReadSize -= (preferredSourceReadSize%sourceStream.WaveFormat.BlockAlign); }
public void Play( Form on ) { var screens = Screen.AllScreens; var screens_left = screens.Min( screen => screen.Bounds.Left ); var screens_right = screens.Max( screen => screen.Bounds.Right ); var screens_width = screens_right-screens_left; var bestScreen = screens.OrderByDescending( screen => { var area = screen.Bounds; area.Intersect( on.Bounds ); return area.Width*area.Height; }).First(); var balances = new[]{1.5f,1.5f}; if ( screens.Length==3 && DisplayBalances.ContainsKey(bestScreen.DeviceName) ) balances = DisplayBalances[bestScreen.DeviceName]; var path = Registry.CurrentUser.OpenSubKey(@"AppEvents\Schemes\Apps\.Default\"+Name+@"\.Current").GetValue(null) as string; var stream = new WaveStream(path); var buffer = new AudioBuffer() { AudioBytes=(int)stream.Length, AudioData=stream, Flags=BufferFlags.EndOfStream }; var voice = new SourceVoice( XAudio2, stream.Format ); voice.SubmitSourceBuffer( buffer ); voice.SetChannelVolumes( balances.Length, balances ); voice.BufferEnd += (sender,ctx) => { try { on.BeginInvoke(new Action(()=>{ voice.Dispose(); buffer.Dispose(); stream.Dispose(); })); } catch ( InvalidOperationException ) { // herp derp on must be disposed/gone } }; voice.Start(); }
/// <summary> /// Disposes this WaveStream /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } base.Dispose(disposing); }
private static string _PlayWavFile(string fileName, double duration) { try { Initialise(); WaveStream waveFile = new WaveStream(fileName); SoundBufferDescription soundBufferDescription = new SoundBufferDescription(); soundBufferDescription.Format = waveFile.Format; soundBufferDescription.Flags = BufferFlags.Defer | BufferFlags.ControlVolume | BufferFlags.ControlPan; soundBufferDescription.SizeInBytes = (int)waveFile.Length; SecondarySoundBuffer secondarySoundBuffer = new SecondarySoundBuffer(directSound, soundBufferDescription); secondarySoundBuffer.Pan = pan; secondarySoundBuffer.Volume = volume; byte[] rawsamples = new byte[soundBufferDescription.SizeInBytes]; waveFile.Read(rawsamples, 0, soundBufferDescription.SizeInBytes); waveFile.Close(); secondarySoundBuffer.Write(rawsamples, 0, LockFlags.EntireBuffer); string name = NextName(); Buffer buffer = new Buffer(name, secondarySoundBuffer, 0, duration); buffers.Add(buffer); Thread thread = new Thread(new ParameterizedThreadStart(DoPlay)); thread.Start(buffer); if (!bAsync) thread.Join(); return name; } catch (Exception ex) { Utilities.OnError(Utilities.GetCurrentMethod(), ex); return ""; } }
public void StopPlayingFile(int stream) { if (waveOutDevice != null) { waveOutDevice.Stop(); } if (mainOutputStream != null) { mainOutputStream.Close(); mainOutputStream = null; } if (waveOutDevice != null) { waveOutDevice.Dispose(); waveOutDevice = null; } }
public NAudioWaveStreamToStreamAudioSourceAdapter(WaveStream stream) { this.SourceStream = stream; this.Format = NAudioUtilities.FromNAudioWaveFormat(stream.WaveFormat); }
public PlayFileAttributes(IWavePlayer wavePlayer, WaveStream waveStream) { WavePlayer = wavePlayer; WaveStream = waveStream; }
public TrimWaveStream(WaveStream source) { this.source = source; this.EndPosition = source.TotalTime; }
public MeteringStream(WaveStream sourceStream) : this(sourceStream, sourceStream.WaveFormat.SampleRate / 10) { }
public void Play() { WaveStream stream; if (!soundManager.SoundDictionary.ContainsKey(filename)) { // Add our sound to the sound library var s = System.IO.File.OpenRead(Path.Combine("Assets", filename)); stream = new WaveStream(s); s.Close(); soundManager.SoundDictionary[filename] = stream; } else { stream = soundManager.SoundDictionary[filename]; } WaveFormat format = stream.Format; buffer = new AudioBuffer(); buffer.AudioData = stream; buffer.AudioBytes = (int)stream.Length; buffer.Flags = BufferFlags.EndOfStream; buffer.AudioData.Position = 0; if (Looping == true) { buffer.LoopCount = XAudio2.LoopInfinite; buffer.LoopLength = 0; } currentlyPlaying = new SourceVoice(soundManager.device, format); currentlyPlaying.Volume = this.Volume; currentlyPlaying.BufferEnd += (s, e) => playing = false; currentlyPlaying.Start(); currentlyPlaying.SubmitSourceBuffer(buffer); playing = true; }
public Sound LoadPCM(string fileName) { DateTime start = DateTime.Now; Console.WriteLine("LoadPCM() start"); var s = System.IO.File.OpenRead(fileName); WaveStream stream = new WaveStream(s); s.Close(); AudioBuffer buffer = new AudioBuffer(); buffer.AudioData = stream; buffer.AudioBytes = (int)stream.Length; buffer.Flags = BufferFlags.EndOfStream; DateTime end = DateTime.Now; Console.WriteLine("LoadPCM() end (" + (end - start).TotalMilliseconds + " ms)"); return new Sound { Buffer = buffer, Stream = stream, Program = this }; }
public void LoadStream(WaveStream stream) { InternalLoad(stream as TrimWaveStream ?? new TrimWaveStream(stream)); _wasStreamCreatedLocally = false; }
public LoopStream(Logger log, WaveStream sourceStream, int loop = 1) { this.log = log; this.sourceStream = sourceStream; Loop = loop; }
/// <summary> /// Creates an input WaveChannel /// (Audio file reader for MP3/WAV/OGG/FLAC/WMA/AIFF/Other formats in the future) /// </summary> /// <param name="filename"></param> public static WaveStream CreateInputWaveChannel(string filename) { WaveStream m_blockAlignedStream = null; WaveStream m_waveReader = null; WaveChannel32 m_waveChannel = null; string fileExt = Path.GetExtension(filename.ToLower()); if (fileExt == MP3Extension) { m_waveReader = new Mp3FileReader(filename); m_blockAlignedStream = new BlockAlignReductionStream(m_waveReader); // Wave channel - reads from file and returns raw wave blocks m_waveChannel = new WaveChannel32(m_blockAlignedStream); } else if (fileExt == WAVExtension) { m_waveReader = new WaveFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == OGGVExtension) { m_waveReader = new OggFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == FLACExtension) { m_waveReader = new FLACFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == WMAExtension) { m_waveReader = new WMAFileReader(filename); if (m_waveReader.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { m_waveReader = WaveFormatConversionStream.CreatePcmStream(m_waveReader); m_waveReader = new BlockAlignReductionStream(m_waveReader); } if (m_waveReader.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(m_waveReader.WaveFormat.SampleRate, 16, m_waveReader.WaveFormat.Channels); m_waveReader = new WaveFormatConversionStream(format, m_waveReader); } m_waveChannel = new WaveChannel32(m_waveReader); } else if (fileExt == AIFFExtension) { m_waveReader = new AiffFileReader(filename); m_waveChannel = new WaveChannel32(m_waveReader); } else { throw new ApplicationException("Cannot create Input WaveChannel - Unknown file type: " + fileExt); } //return m_waveReader; return(m_waveChannel); }
/********* ** Private methods *********/ /// <summary>Load in the music files from the pack's respective Directory/Songs folder. Typically Content/Music/Wav/FolderName/Songs</summary> private void LoadMusicFiles() { DateTime startTime = DateTime.Now; DirectoryInfo songFolder = new DirectoryInfo(Path.Combine(this.ContentPack.DirectoryPath, this.MusicFolderName)); foreach (FileInfo file in songFolder.GetFiles()) { // get name string name = Path.GetFileNameWithoutExtension(file.Name); if (this.Sounds.ContainsKey(name)) { continue; } // load data SoundEffect effect = null; using (Stream waveFileStream = File.OpenRead(file.FullName)) { switch (file.Extension) { case ".wav": effect = SoundEffect.FromStream(waveFileStream); break; case ".mp3": using (Mp3FileReader reader = new Mp3FileReader(waveFileStream)) using (WaveStream pcmStream = WaveFormatConversionStream.CreatePcmStream(reader)) { string tempPath = Path.Combine(songFolder.FullName, $"{name}.wav"); ModCore.ModMonitor.Log($"Converting: {tempPath}"); WaveFileWriter.CreateWaveFile(tempPath, pcmStream); using (Stream tempStream = File.OpenRead(tempPath)) effect = SoundEffect.FromStream(tempStream); File.Delete(tempPath); } break; case ".ogg": // Credits: https://social.msdn.microsoft.com/Forums/vstudio/en-US/100a97af-2a1c-4b28-b464-d43611b9b5d6/converting-multichannel-ogg-to-stereo-wav-file?forum=csharpgeneral using (VorbisWaveReader vorbisStream = new VorbisWaveReader(file.FullName)) { string tempPath = Path.Combine(songFolder.FullName, $"{name}.wav"); ModCore.DebugLog($"Converting: {tempPath}"); WaveFileWriter.CreateWaveFile(tempPath, vorbisStream.ToWaveProvider16()); using (Stream tempStream = File.OpenRead(tempPath)) effect = SoundEffect.FromStream(tempStream); File.Delete(tempPath); } break; default: ModCore.ModMonitor.Log($"Unsupported file extension {file.Extension}.", LogLevel.Warn); break; } } if (effect == null) { continue; } // add sound SoundEffectInstance instance = effect.CreateInstance(); this.Sounds.Add(name, instance); } // log loading time if (ModCore.Config.EnableDebugLog) { ModCore.ModMonitor.Log($"Time to load WAV music pack {this.Name}: {startTime.Subtract(DateTime.Now)}"); } }
/// <summary> /// Read mono from file /// </summary> /// <param name = "filename">Name of the file</param> /// <param name = "samplerate">Sample rate</param> /// <param name = "milliseconds">milliseconds to read</param> /// <param name = "startmillisecond">Start millisecond</param> /// <returns>Array of samples</returns> public static float[] ReadMonoFromFileOld(string filename, int samplerate, int milliseconds, int startmillisecond, int channelToUse = 1) { int totalmilliseconds = milliseconds <= 0 ? Int32.MaxValue : milliseconds + startmillisecond; float[] data = null; // read as stereo file - do the mono thing later List <float> floatList = new List <float>(); WaveFormat waveFormat = new WaveFormat(samplerate, 2); WaveStream wave32 = ResampleToWaveStream(filename, waveFormat); int sampleCount = 0; // check the input file number of channels if (wave32.WaveFormat.Channels == 1) { // we already have a mono file float sampleValue = 0; // read until we have read the number of samples (measured in ms) we are supposed to do while (TryReadFloat(wave32, out sampleValue) == true && (float)(sampleCount) / samplerate * 1000 < totalmilliseconds) { floatList.Add(sampleValue); // increment with size of data sampleCount++; } data = floatList.ToArray(); } else if (wave32.WaveFormat.Channels == 2) { // we are getting a stereo file back // convert to mono by taking an average of both values: // f_mono = function(l, r) { // return (l + r) / 2; //} float sampleValueLeft = 0; float sampleValueRight = 0; float sampleValueMono = 0; // read until we have read the number of samples (measured in ms) we are supposed to do while (TryReadFloat(wave32, out sampleValueLeft, out sampleValueRight) == true && (float)(sampleCount) / samplerate * 1000 < totalmilliseconds) { // TODO: Use the summed mono signal to anything? sampleValueMono = (sampleValueLeft + sampleValueRight) / 2; // return channel 1 as default if (channelToUse == 2) { floatList.Add(sampleValueRight); } else { floatList.Add(sampleValueLeft); } // increment with size of data sampleCount++; } data = floatList.ToArray(); } if ((float)(sampleCount) / samplerate * 1000 < (milliseconds + startmillisecond)) { // not enough samples to return the requested data return(null); } // Select specific part of the song int start = (int)((float)startmillisecond * samplerate / 1000); int end = (milliseconds <= 0) ? sampleCount : (int)((float)(startmillisecond + milliseconds) * samplerate / 1000); if (start != 0 || end != sampleCount) { float[] temp = new float[end - start]; Array.Copy(data, start, temp, 0, end - start); data = temp; } return(data); }
private void LoadMusic() { blockAlignedStream = new BlockAlignReductionStream(WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader(musicPath))); waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback()); waveOut.Init(blockAlignedStream); }
public void Playback(SourceVoice sourceVoice) { playing = true; int currentByteArrayIndex = 0; //WaveStream waveStream = new WaveStream(null, 1234) while (playing) { bufferReady.WaitOne(); DataStream ds = dataBuffers[currentByteArrayIndex]; WaveStream stream = new WaveStream(ds); int bytesPerSample = stream.Format.Channels * stream.Format.BitsPerSample / 8; //AudioBuffer buffer = new AudioBuffer //{ // AudioData = stream, // AudioBytes = (int)stream.Length, // PlayBegin = 0, // PlayLength = readSamples //}; currentByteArrayIndex = (currentByteArrayIndex + 1) % MAX_BUFFER_COUNT; } }
/// <summary> /// Plays the given sequence of Morse symbols using the internal WaveGenerator module. This "queues" /// audio to play, rather than relying on Thread.Sleep to gap the DITs/DAHs (symbols) from each other. /// </summary> /// <param name="morseStream">The sequence of MorseSymbol items to be played.</param> /// <param name="cancelToken">Tracks whether or not this operation should be cancelled.</param> /// <see cref="MorseSymbol"/> /// <see cref="WaveGenerator"/> public void PlayWaveStream(List <MorseSymbol> morseStream, CancellationToken cancelToken) { // First, get the audio constructed. var wavFormat = new WaveAudioFormat(16000, 16, 1); //doesn't need to be the most high-def sound var wavType = WaveGenerator.WaveType.SINE; //always a SINE wave (for now; configurable later) var streamSequence = new List <WaveSample>(); //container for the resulting stream of samples streamSequence.Add(new WaveSample(wavFormat, 0)); //initialize the stream with at least ONE empty sample ////Loading icon?? foreach (MorseSymbol sym in morseStream) { if (cancelToken != null && cancelToken.IsCancellationRequested) { return; } if (sym.hasSound()) { WaveAudioTools.AppendSamples( ref streamSequence, (100.0, WaveGenerator.CreateNewWave(wavFormat, wavType, this.Gain * 100.0, sym.getDuration() / 1000.0, this.Frequency)) ); } else { WaveAudioTools.AppendSamples( ref streamSequence, (100.0, WaveGenerator.CreateEmptySpace(wavFormat, sym.getDuration() / 1000.0)) ); } } // Construct the output stream in WAV format. using var finalStream = new WaveStream(streamSequence); ////Release loading icon?? // Then, play the stream, while at the same time updating the text stream to sync as // best as possible to the currently-playing audio. // Define a lambda of sorts for updating the live text view for transmissions. Action <MorseSymbol> updateTextStream = s => { Program.mainForm.Invoke((System.Windows.Forms.MethodInvoker) delegate { Program.txLiveView.AppendText(s.getRepresentation()); }); }; // Construct the player. using var queuedStream = new System.IO.MemoryStream(finalStream.GetRawWaveStream()); using var mediaPlayer = new System.Media.SoundPlayer(queuedStream); //...and...... GO! mediaPlayer.Play(); int infiniteLoopPrevention = 0; // Give the player 2s to load the stream. while (!mediaPlayer.IsLoadCompleted && infiniteLoopPrevention < 100) { if (cancelToken != null && cancelToken.IsCancellationRequested) { mediaPlayer.Stop(); mediaPlayer.Dispose(); break; } Thread.Sleep(20); infiniteLoopPrevention++; } // During stream playback, send out the symbols. for (int index = 0; index < morseStream.Count; index++) { var sym = morseStream[index]; if (cancelToken != null && cancelToken.IsCancellationRequested) { mediaPlayer.Stop(); mediaPlayer.Dispose(); break; } //Thread.Sleep((int)sym.getDuration()); Task.Delay((int)sym.getDuration()).Wait(); updateTextStream(sym); // Handle progress bar updates. Program.mainForm.Invoke((System.Windows.Forms.MethodInvoker) delegate { try { Program.pbTXProgress.Value = (((index * 100) / morseStream.Count) + 1); } catch { } }); } }
private void Editbtn_Click(object sender, EventArgs e) { if (reader == null) { return; } try { float volume; if (!float.TryParse(Volumetb.Text, out volume)) { throw new Exception("volume value not valid"); } var data = GetDataOf16Bit2ChannelFile(reader); volume /= 100; for (int i = 0; i < data.Length; i++) { data[i] = (short)Math.Max(Math.Min(data[i] * volume, (int)short.MaxValue), (int)short.MinValue); } MemoryStream mstream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(mstream); foreach (var item in data) { writer.Write(item); } var useTempo = radioButton1.Checked; reader = new RawSourceWaveStream(mstream, reader.WaveFormat); speedControl = new VarispeedSampleProvider(reader.ToSampleProvider(), 100, new SoundTouchProfile(useTempo, false)); var state = wout?.PlaybackState == PlaybackState.Playing; wout?.Stop(); hScrollBar11.Value = 0; progressBar1.Value = 0; wout = new WaveOut();/// wout.Init(speedControl); if (state) { wout.Play(); } return; /*var state = wout.PlaybackState == PlaybackState.Playing; * wout?.Stop(); * wout = new WaveOut();/// * wout.Init(speedControl); * * if (state) * wout.Play(); * //wout.Init(reader2);*/ /* * var data = GetDataOf16Bit2ChannelFile(reader); * //short[] newData = new short[data.Length]; * for (int i = 0; i < data.Length; i++) * { * data[i] *= volume; * } * * MemoryStream mstream = new MemoryStream(); * BinaryWriter writer = new BinaryWriter(mstream); * foreach (var item in data) * writer.Write(item); * * var useTempo = radioButton1.Checked; * reader = new RawSourceWaveStream(mstream, reader.WaveFormat); * speedControl = new VarispeedSampleProvider(reader.ToSampleProvider(), 100, new SoundTouchProfile(useTempo, false)); * * var state = wout?.PlaybackState == PlaybackState.Playing; * wout?.Stop(); * hScrollBar1.Value = 0; * progressBar1.Value = 0; * wout = new WaveOut();/// * wout.Init(speedControl); * * if (state) * wout.Play(); * * return;*/ } catch (Exception ex) { MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public LoopStream(WaveStream sourceStream) { this.sourceStream = sourceStream; this.EnableLooping = true; }
public ListItem(string title, WaveStream provider) { Title = title; Provider = provider; }
public void UpdateStreamAtPosition(WaveStream stream, int position) { DisposeOldStreamAtKey(position); streams[position] = stream; }
void load_sound_file(ref MemoryStream sound_stream, ref WaveStream wave_stream, ref byte[] data_array, ref SoundBufferDescription buf_desc, ref SecondarySoundBuffer buf, string file, ref bool executed) { try { if ((((Form1)(form1_reference)).retrieve_resources != null) && (((Form1)(form1_reference)).retrieve_resources.EntryFileNames.Contains(file))) { sound_stream = new MemoryStream(); ((Form1)(form1_reference)).retrieve_resources.Extract(file, sound_stream); data_array = new byte[Convert.ToInt32(sound_stream.Length)]; data_array = sound_stream.ToArray(); wave = new WaveFormat(); wave.FormatTag = WaveFormatTag.Pcm; wave.BitsPerSample = (short)((data_array[35] << 8) | data_array[34]); wave.BlockAlignment = (short)((data_array[33] << 8) | data_array[32]); wave.Channels = (short)((data_array[23] << 8) | data_array[22]); wave.SamplesPerSecond = (int)((data_array[27] << 24) | (data_array[26] << 16) | (data_array[25] << 8) | (data_array[24])); wave.AverageBytesPerSecond = (int)((data_array[27] << 24) | (data_array[26] << 16) | (data_array[25] << 8) | (data_array[24])); sound_stream = new MemoryStream(data_array); wave_stream = new WaveStream(sound_stream); buf_desc = new SoundBufferDescription(); buf_desc.Flags = BufferFlags.GlobalFocus; buf_desc.SizeInBytes = (int)sound_stream.Length; buf_desc.Format = wave; if (sound_stream.Length > 0) { buf = new SecondarySoundBuffer(device, buf_desc); wave_stream.Read(data_array, 0, buf_desc.SizeInBytes); buf.Write(data_array, 0, LockFlags.EntireBuffer); } executed = false; sound_stream.Close(); } else { buf_desc = new SoundBufferDescription(); buf_desc.Flags = BufferFlags.GlobalFocus; if (File.Exists(file)) { wave_stream = new WaveStream(file); buf_desc.Format = wave_stream.Format; buf_desc.SizeInBytes = (int)wave_stream.Length; data_array = new byte[wave_stream.Length]; buf = new SecondarySoundBuffer(device, buf_desc); wave_stream.Read(data_array, 0, buf_desc.SizeInBytes); buf.Write(data_array, 0, LockFlags.EntireBuffer); } executed = false; } } catch (DirectSoundException e) { MessageBox.Show(e.ToString(), "Error!", MessageBoxButtons.OK); } }
public async Task Play(string filePath, int volume, string deviceName) { if (!string.IsNullOrEmpty(filePath)) { if (string.IsNullOrEmpty(deviceName)) { deviceName = ChannelSession.Settings.DefaultAudioOutput; } if (this.MixItUpOverlay.Equals(deviceName)) { IOverlayEndpointService overlay = ChannelSession.Services.Overlay.GetOverlay(ChannelSession.Services.Overlay.DefaultOverlayName); if (overlay != null) { var overlayItem = new OverlaySoundItemModel(filePath, volume); await overlay.ShowItem(overlayItem, new CommandParametersModel()); } } else { #pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed Task.Run(async() => { int deviceNumber = -1; if (!string.IsNullOrEmpty(deviceName)) { deviceNumber = this.GetOutputDeviceID(deviceName); } float floatVolume = MathHelper.Clamp(volume, 0, 100) / 100.0f; using (WaveOutEvent outputDevice = (deviceNumber < 0) ? new WaveOutEvent() : new WaveOutEvent() { DeviceNumber = deviceNumber }) { WaveStream waveStream = null; if (File.Exists(filePath)) { AudioFileReader audioFile = new AudioFileReader(filePath); audioFile.Volume = floatVolume; waveStream = audioFile; } else if (filePath.StartsWith("http", StringComparison.InvariantCultureIgnoreCase)) { waveStream = new MediaFoundationReader(filePath); outputDevice.Volume = floatVolume; } if (waveStream != null) { outputDevice.Init(waveStream); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { await Task.Delay(500); } waveStream.Dispose(); } } }); #pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed } } }
private void GetWaveform(WaveStream reader, int width, int height, int peakCount, Action <Texture2D> callback) { int bytesPerSample = reader.WaveFormat.BitsPerSample / 8; var samples = reader.Length / bytesPerSample; var samplesPerPixel = (int)(samples / width); IPeakProvider peakProvider = new RmsPeakProvider(1); peakProvider.Init(reader.ToSampleProvider(), samplesPerPixel); int p = 0; var currentPeak = peakProvider.GetNextPeak(); int peakNum = 0; Texture2D texture = new Texture2D(waveFormWidth, waveFormHeight); for (int x = 0; x < waveFormWidth; x++) { for (int y = 0; y < waveFormHeight; y++) { texture.SetPixel(x, y, Color.clear); } } float[] peakSizes = new float[peakCount]; float peakSize = 0f; float maxPeakSize = 0f; while (p < width) { peakSize += Mathf.Abs(currentPeak.Max) + Mathf.Abs(currentPeak.Min); if (p % (peakWidth + spaceWidth) == 0) { peakSizes[peakNum] = peakSize; maxPeakSize = maxPeakSize < peakSize ? peakSize : maxPeakSize; peakSize = 0; peakNum++; } var nextPeak = peakProvider.GetNextPeak(); currentPeak = nextPeak; p++; } peakNum = 0; p = 0; float k = waveFormHeight / (2 * maxPeakSize + peakWidth); while (p < width) { peakSize = peakSizes.Length <= peakNum ? 0f : peakSizes[peakNum]; peakSize = peakSize * k; var pos = p % (peakWidth + spaceWidth); if (pos > spaceWidth) { int pX = pos - spaceWidth; pX = pX <= peakWidth / 2 ? pX : peakWidth - pX; var pS = peakSize + Mathf.Sqrt(3 / 4 * peakWidth - 2 * pX * pX + 2 * pX * peakWidth); for (int y = 0; y < waveFormHeight; y++) { if (Mathf.Abs(y - waveFormHeight / 2) < pS) { texture.SetPixel(p, y, new Color(1, 1, 1, pS - Mathf.Abs(y - waveFormHeight / 2))); } } } if ((p + spaceWidth) % (spaceWidth + peakWidth) == 0) { peakNum++; } p++; } texture.Apply(); texture.filterMode = FilterMode.Bilinear; callback?.Invoke(texture); }
// Set playback position of WaveStream by seconds public static void SetPosition(this WaveStream strm, double seconds) { strm.SetPosition((long)(seconds * strm.WaveFormat.AverageBytesPerSecond)); }
/// <summary> /// Creates a WaveChannel32 with default settings /// </summary> /// <param name="sourceStream">The source stream</param> public WaveChannel32(WaveStream sourceStream) : this(sourceStream, 1.0f, 0.0f) { }
// Set playback position of WaveStream by time (as a TimeSpan) public static void SetPosition(this WaveStream strm, TimeSpan time) { strm.SetPosition(time.TotalSeconds); }
/// <summary> /// Disposes this WaveStream /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (sourceStream != null) { sourceStream.Dispose(); sourceStream = null; } } else { System.Diagnostics.Debug.Assert(false, "WaveOffsetStream was not Disposed"); } base.Dispose(disposing); }
// Set playback position of WaveStream relative to current position public static void Seek(this WaveStream strm, double offset) { strm.SetPosition(strm.Position + (long)(offset * strm.WaveFormat.AverageBytesPerSecond)); }
/// <summary> /// Creates a WaveOffsetStream with default settings (no offset or pre-delay, /// and whole length of source stream) /// </summary> /// <param name="sourceStream">The source stream</param> public WaveOffsetStream(WaveStream sourceStream) : this(sourceStream, TimeSpan.Zero, TimeSpan.Zero, sourceStream.TotalTime) { }
public static void Main(string[] args) { if (args.Length == 0) { var assembly = Assembly.GetExecutingAssembly(); var resourceName = "TestEchoPrintSharp.Resources.NIN-999999-11025.wav"; using (Stream stream = assembly.GetManifestResourceStream(resourceName)) using (BinaryReader reader = new BinaryReader(stream, System.Text.Encoding.ASCII)) { string chunkId = new string(reader.ReadChars(4)); UInt32 chunkSize = reader.ReadUInt32(); string riffType = new string(reader.ReadChars(4)); string fmtId = new string(reader.ReadChars(4)); UInt32 fmtSize = reader.ReadUInt32(); UInt16 formatTag = reader.ReadUInt16(); UInt16 channels = reader.ReadUInt16(); UInt32 samplesPerSec = reader.ReadUInt32(); UInt32 avgBytesPerSec = reader.ReadUInt32(); UInt16 blockAlign = reader.ReadUInt16(); UInt16 bitsPerSample = reader.ReadUInt16(); string dataID = new string(reader.ReadChars(4)); UInt32 dataSize = reader.ReadUInt32(); if (chunkId != "RIFF" || riffType != "WAVE" || fmtId != "fmt " || dataID != "data" || fmtSize != 16) { Console.WriteLine("Malformed WAV header"); return; } if (channels != 1 || samplesPerSec != 11025 || avgBytesPerSec != 22050 || blockAlign != 2 || bitsPerSample != 16 || formatTag != 1 || chunkSize < 48) { Console.WriteLine("Unexpected WAV format, need 11025 Hz mono 16 bit (little endian integers)"); return; } uint numberOfsamples = Math.Min(dataSize / 2, 330750); // max 30 seconds var pcmData = new Int16[numberOfsamples]; for (int i = 0; i < numberOfsamples; i++) { pcmData[i] = reader.ReadInt16(); } var echoPrint = new CodeGen(); Console.WriteLine(echoPrint.Generate(pcmData)); Console.WriteLine(""); Console.WriteLine("The above is the EchoPrint code for the song '999,999' by Nine Inch Nails."); Console.WriteLine("To generate codes for your own mp3s or wavs add them as parameters to "); Console.WriteLine("TestEchoPrintSharp.exe in a command line or in the debug settings"); Console.WriteLine("or execution setting of the TestEchoPrintSharp project."); Console.WriteLine("(mp3s only work on Windows, because NAudio depends on Msacm32.dll.)"); } } else { foreach (string audioFile in args) { if (audioFile.Length >= 4 && audioFile.Substring(audioFile.Length - 4).Equals(".wav", StringComparison.InvariantCultureIgnoreCase)) { using (WaveFileReader wav = new WaveFileReader(audioFile)) { using (WaveStream pcm = WaveFormatConversionStream.CreatePcmStream(wav)) { using (BinaryReader reader = new BinaryReader(pcm, System.Text.Encoding.ASCII)) { HandleFile(pcm, audioFile); } } } } else if (audioFile.Length >= 4 && audioFile.Substring(audioFile.Length - 4).Equals(".mp3", StringComparison.InvariantCultureIgnoreCase)) { try { using (Mp3FileReader mp3 = new Mp3FileReader(audioFile)) { using (WaveStream pcm = WaveFormatConversionStream.CreatePcmStream(mp3)) { HandleFile(pcm, audioFile); } } } catch (DllNotFoundException e) { Console.WriteLine("Sorry, a necessary dll is not found on your system. Here come the details:\r\n\r\n{0}", e); } }
//Using this constructor only in case of looping public Sound(WaveStream Source) { this.loopSource = Source; this.EnableLooping = true; }
public LoopStream(WaveStream source) { throw new NotImplementedException(); }
/// <summary> /// Opens the specified stream. /// </summary> /// /// <param name="stream">Stream to open.</param> /// /// <returns>Returns number of frames found in the specified stream.</returns> /// public int Open(WaveStream stream) { this.waveStream = stream; this.channels = stream.Format.Channels; this.blockAlign = stream.Format.BlockAlignment; this.numberOfFrames = (int)stream.Length / blockAlign; this.sampleRate = stream.Format.SamplesPerSecond; this.numberOfSamples = numberOfFrames * Channels; this.duration = (int)(numberOfFrames / (double)sampleRate * 1000.0); this.bitsPerSample = stream.Format.BitsPerSample; this.averageBitsPerSecond = stream.Format.AverageBytesPerSecond; return numberOfFrames; }
/// <summary> /// Initializing stuff. /// </summary> public MainController() { if (_pow.BatteryChargeStatus == BatteryChargeStatus.NoSystemBattery) { // If a user tries to run program from computer with no battery to track... this is stupid. And sad. ShowError("You're trying to run Battery Bud from desktop PC. What were you thinking? :|", "wut"); Application.ExitThread(); Environment.Exit(1); } _trayIcon.Visible = true; // Loading save info. try { Load(); if (_autostartEnabled) { SetAutostart(null, null); } else { ResetAutostart(null, null); } } catch (FileNotFoundException) // Happens when some idiot deletes save file. { SetAutostart(null, null); _skinName = GetDefaultSkin(); } catch (DirectoryNotFoundException) // Happens on first launch. { _skinName = GetDefaultSkin(); Directory.CreateDirectory(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\Battery Bud"); SetAutostart(null, null); ShowGreeting(); } // Loading save info. if (!InitSkin(_skinName, true)) // If something failed, abort. { _skinName = GetDefaultSkin(); ShowError("Failed to load custom skin. Resetting to default and trying again.", ":c"); if (!InitSkin(_skinName, false)) { Application.ExitThread(); Environment.Exit(1); } else { Save(); } } InitContextMenu(); _waveStream = new WaveFileReader(AppDomain.CurrentDomain.BaseDirectory + "\\" + _resourceDir + "low_battery.wav"); _soundPlayer = new WaveOutEvent(); _soundPlayer.Init(_waveStream); UpdateBattery(null, null); // Timer. _timer.Interval = _updateInterval; _timer.Tick += UpdateBattery; _timer.Enabled = true; }
public LoopStream(WaveStream sourceStream) { _sourceStream = sourceStream; EnableLooping = true; }
public TrimWaveStream(WaveStream source) { _source = source; EndPosition = source.TotalTime; }
public void Run() { Stopwatch sw = new Stopwatch(); var s = System.IO.File.OpenRead(fileName); //Console.WriteLine(String.Format("OpenRead: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); WaveStream stream = new WaveStream(s); //Console.WriteLine(String.Format("new WaveStream: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); int lengthInBytes = (int)stream.Length; int bytesPerSample = stream.Format.Channels * stream.Format.BitsPerSample / 8; int nSamples = lengthInBytes / bytesPerSample; int samplesPerBuffer = STREAMING_BUFFER_SIZE / bytesPerSample; int currentBytePosition = 0; int currentSamplePosition = 0; sourceVoice = new SourceVoice(audioDevice, stream.Format); sourceVoice.BufferEnd += new EventHandler<ContextEventArgs>(sourceVoice_BufferEnd); sourceVoice.FrequencyRatio = 2f; DateTime startTime = DateTime.Now; while (currentBytePosition < lengthInBytes) { int readBytes = System.Math.Min(STREAMING_BUFFER_SIZE, lengthInBytes - currentBytePosition); int readSamples = readBytes / bytesPerSample; //if (readBytes < STREAMING_BUFFER_SIZE) //Console.WriteLine(String.Format("Read bytes: {0}, Read samples: {1}, Read samples (float): {2}", readBytes, readSamples, (float)readBytes / bytesPerSample)); Console.WriteLine("---------------------------------- " + (DateTime.Now - startTime).TotalSeconds); Console.WriteLine(String.Format("Read bytes: {0}\tBytes left: {1}\tPosition: {2}", readBytes, lengthInBytes - currentBytePosition, currentBytePosition)); Console.WriteLine(String.Format("Read samples: {0}\tSamples left: {1}\tPosition: {2}", readSamples, nSamples - currentSamplePosition, currentSamplePosition)); //Console.WriteLine(String.Format("To AudioBuffer creation: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); var ab = new AudioBuffer { AudioData = stream, AudioBytes = lengthInBytes, PlayBegin = currentSamplePosition, PlayLength = readSamples }; //Console.WriteLine(String.Format("After AudioBuffer creation: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); //Console.WriteLine("Buffers queued: " + sourceVoice.State.BuffersQueued); if (sourceVoice.State.BuffersQueued >= MAX_BUFFER_COUNT - 1) bufferPlaybackEndEvent.WaitOne(); VoiceDetails voiceDetails = sourceVoice.VoiceDetails; long samplesPlayed = sourceVoice.State.SamplesPlayed; Console.WriteLine("Time: " + samplesPlayed / (float)voiceDetails.InputSampleRate); //Console.WriteLine(String.Format("Pre-submit: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); sourceVoice.SubmitSourceBuffer(ab); //Console.WriteLine(String.Format("Post-submit: {0} ms", sw.ElapsedMilliseconds)); sw.Reset(); sw.Start(); bufferReady.Set(); currentBytePosition += readBytes; currentSamplePosition += readSamples; } while (sourceVoice.State.BuffersQueued > 0) bufferPlaybackEndEvent.WaitOne(); if (StreamEnd != null) StreamEnd(this, null); }
private void button1_Click(object sender, EventArgs e) { if (String.IsNullOrEmpty(textBox2.Text) && String.IsNullOrEmpty(textBox1.Text) && String.IsNullOrEmpty(textBox3.Text)) { MessageBox.Show("Enter the file directory, the format and the save path!", "Error", MessageBoxButtons.OK, MessageBoxIcon.Warning); return; } if (textBox1.Text.Any(c => c < 48 || c > 57) && String.IsNullOrEmpty(textBox2.Text)) { MessageBox.Show("Enter the format first!", "Invalid format", MessageBoxButtons.OK, MessageBoxIcon.Warning); return; } if (textBox2.Text.Any(c => c < 48 || c > 57) && textBox1.Text.Any(c => c < 48 || c > 57) && String.IsNullOrEmpty(textBox3.Text)) { MessageBox.Show("Enter the save path!", "Invalid save path", MessageBoxButtons.OK, MessageBoxIcon.Warning); return; } if (textBox2.Text.Any(c => c < 48 || c > 57) && textBox1.Text.Any(c => c < 48 || c > 57) && textBox3.Text.Any(c => c < 48 || c > 57)) { if (textBox2.Text == ".mp4") { try { this.Cursor = Cursors.WaitCursor; var converter = new NReco.VideoConverter.FFMpegConverter(); converter.ConvertMedia(textBox1.Text, textBox3.Text + "videoconverted.mp4", NReco.VideoConverter.Format.mp4); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } ////MessageBox.Show("Sorry something went wrong. Retry.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } else if (textBox2.Text == ".avi") { try { this.Cursor = Cursors.WaitCursor; var converter = new NReco.VideoConverter.FFMpegConverter(); //timer1.Start(); converter.ConvertMedia(textBox1.Text, textBox3.Text + "videoconverted.avi", NReco.VideoConverter.Format.avi); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } // MessageBox.Show("Sorry something went wrong. Retry.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } else if (textBox2.Text == ".m4v") { try { this.Cursor = Cursors.WaitCursor; var converter = new NReco.VideoConverter.FFMpegConverter(); //timer1.Start(); converter.ConvertMedia(textBox1.Text, textBox3.Text + "videoconverted.m4v", NReco.VideoConverter.Format.m4v); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } } if (textBox2.Text.Contains(".flv")) { try { this.Cursor = Cursors.WaitCursor; var converter = new NReco.VideoConverter.FFMpegConverter(); //timer1.Start(); converter.ConvertMedia(textBox1.Text, textBox3.Text + "videoconverted.flv", NReco.VideoConverter.Format.flv); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } } else if (textBox2.Text == ".webm") { try { this.Cursor = Cursors.WaitCursor; var converter = new NReco.VideoConverter.FFMpegConverter(); //timer1.Start(); converter.ConvertMedia(textBox1.Text, textBox3.Text + "videoconverted.webm", NReco.VideoConverter.Format.webm); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } } else if (textBox2.Text == ".wave") { try { this.Cursor = Cursors.WaitCursor; using (Mp3FileReader f = new Mp3FileReader(textBox1.Text)) { using (WaveStream pcm = WaveFormatConversionStream.CreatePcmStream(f)) { WaveFileWriter.CreateWaveFile(textBox3.Text + "audioconverted.wave", pcm); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } } } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } } else if (textBox2.Text == ".mp3") { try { this.Cursor = Cursors.WaitCursor; cnv(textBox1.Text, textBox3.Text + "audioconverted.mp3"); MessageBox.Show("Conversion done.", "Done", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); this.Cursor = Cursors.Default; } catch (Exception) { MessageBox.Show("Sorry something went wrong", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Cursor = Cursors.Default; } } /*DsReader dr = new DsReader(textBox1.Text); * IntPtr formatPcm = dr.ReadFormat(); * byte[] dataPcm = dr.ReadData(); * dr.Close(); * IntPtr formatMp3 = AudioCompressionManager.GetCompatibleFormat(formatPcm, * AudioCompressionManager.MpegLayer3FormatTag); * byte[] dataMp3 = AudioCompressionManager.Convert(formatPcm, formatMp3, dataPcm, false); * Mp3Writer mw = new Mp3Writer(File.Create(textBox3.Text + "audioconverted.mp3")); * * mw.WriteData(dataMp3); * mw.Close(); * MessageBox.Show("Done");*/ } }
public MediaFoundationTransform GetResampler(WaveStream streamToResample, int sampleRate, int numberOfChannels) { return new MediaFoundationResampler(streamToResample, GetWaveFormat(sampleRate, numberOfChannels)); }
private void LoadDialog(ZipArchive storyArchive, string storyArchiveName, ZipArchive resourceArchive) { if (storyArchive != null) { foreach (ZipArchiveEntry file in storyArchive.Entries) { if (file.FullName == "dialog.ydkl") { currentArchive = storyArchive; currentArchiveName = storyArchiveName; Data dialog = Data.FromStream(file.Open()); if (File.Exists(MyPath + "\\Config\\autosave.s")) { File.Delete(MyPath + "\\Config\\autosave.s"); } Tools.Write(MyPath + "\\Config\\autosave.s", storyArchiveName); currentDialog = dialog; string wav = "\\wav\\" + dialog.GetValue(254); if (resourceArchive.GetEntry(wav) != null) { if (outputDevice != null) { outputDevice.Stop(); outputDevice.Dispose(); outputDevice = null; waveStream.Dispose(); waveStream = null; } if (outputDevice == null) { outputDevice = new WaveOutEvent(); } if (waveStream == null) { WaveStream stream = (WaveStream)resourceArchive.GetEntry(wav).Open(); outputDevice.Init(stream); } outputDevice.Play(); } richTextBox1.AppendText(dialog.GetValue(0) + "\n"); try { richTextBox1.Select( richTextBox1.Text.Length - dialog.GetValue(0).Length - 1, dialog.GetValue(0).Length ); } catch { // UHHHH } richTextBox1.SelectionColor = otherText; richTextBox1.DeselectAll(); if (storyTTSToolStripMenuItem.Checked) { SpeechSynthesizer.SpeakAsync(dialog.GetValue(0)); } if (resourceArchive.GetEntry("\\sprites\\" + dialog.GetValue(255)) != null) { pictureBox1.Image = Bitmap.FromStream(resourceArchive.GetEntry("\\sprites\\" + dialog.GetValue(255)).Open()); } listView1.Items.Clear(); for (int i = 1; i < dialog.values.Length; i++) { if (dialog.GetValue(i) != null && i < 250) { string text = dialog.GetValue(i); listView1.Items.Add(text); } } } else if (file.Name == "image.png") { //pictureBox1.Image = Bitmap.FromFile(file.FullName); } } } }
private void CloseFile() { fileStream?.Dispose(); fileStream = null; }
public static void PlayPS4Sound(Sound Soundtoplay) { switch (Soundtoplay) { case Sound.Notification: { new Thread(() => { //set the thread as a background worker Thread.CurrentThread.IsBackground = true; IWavePlayer waveOutDevice = new WaveOut(); WaveStream mp3file = CreateInputStream(Properties.Resources.PS4_Notification); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.7f; waveOutDevice.Play(); /* run your code here */ Thread.Sleep(ts); waveOutDevice.Dispose(); waveOutDevice.Stop(); }).Start(); } break; case Sound.Error: { new Thread(() => { //set the thread as a background worker Thread.CurrentThread.IsBackground = true; IWavePlayer waveOutDevice = new WaveOut(); WaveStream mp3file = CreateInputStream(Properties.Resources.Ps4_Error_Sound); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.5f; waveOutDevice.Play(); /* run your code here */ Thread.Sleep(ts); waveOutDevice.Dispose(); waveOutDevice.Stop(); }).Start(); } break; case Sound.Shutdown: { WaveStream mp3file = CreateInputStream(Properties.Resources.PS4_Shutdown); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.5f; waveOutDevice.Play(); new Thread(() => { Thread.CurrentThread.IsBackground = true; /* run your code here */ Thread.Sleep(ts); waveOutDevice.Stop(); //waveOutDevice.Dispose(); }).Start(); } break; case Sound.PS4_Info_Pannel_Sound: { new Thread(() => { //set the thread as a background worker Thread.CurrentThread.IsBackground = true; IWavePlayer waveOutDevice = new WaveOut(); WaveStream mp3file = CreateInputStream(Properties.Resources.PS4_Notification); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.5f; waveOutDevice.Play(); Thread.Sleep(ts); waveOutDevice.Stop(); }).Start(); break; } case Sound.Options: { WaveStream mp3file = CreateInputStream(Properties.Resources.PS4_Options_Pannel); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.5f; waveOutDevice.Play(); new Thread(() => { Thread.CurrentThread.IsBackground = true; /* run your code here */ Thread.Sleep(ts); waveOutDevice.Stop(); //waveOutDevice.Dispose(); }).Start(); break; } case Sound.Navigation: { new Thread(() => { Thread.CurrentThread.IsBackground = true; IWavePlayer waveOutDevice = new WaveOut(); WaveStream mp3file = CreateInputStream(Properties.Resources.PS4_Navigation_Sound); TimeSpan ts = mp3file.TotalTime; waveOutDevice.Init(mp3file); waveOutDevice.Volume = 0.5f; waveOutDevice.Play(); /* run your code here */ Thread.Sleep(ts); waveOutDevice.Stop(); //waveOutDevice.Dispose(); }).Start(); break; } case Sound.PS4_Music: { WaveStream mp3file = CreateInputStream(Properties.Resources.ps4BGM); ////PS4BGMDevice = new AsioOut("ASIO4ALL v2"); ////PS4BGMDevice.Init(mp3file); ////PS4BGMDevice.Play(); TimeSpan ts = mp3file.TotalTime; PS4BGMDevice.Init(mp3file); PS4BGMDevice.Volume = 0.5f; PS4BGMDevice.Play(); } break; default: break; } }
/// <summary> /// Remove a WaveStream from the mixer /// </summary> /// <param name="waveStream">waveStream to remove</param> public void RemoveInputStream(WaveStream waveStream) { lock (inputsLock) { if (inputStreams.Remove(waveStream)) { // recalculate the length long newLength = 0; foreach (var inputStream in inputStreams) { newLength = Math.Max(newLength, inputStream.Length); } length = newLength; } } }
public LoopWAV(WaveStream source) { this.sourceStream = source; }