private void microphoneInputLevelTrackBar_Scroll(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.MicrophoneInputLevel = ((microphoneInputLevelTrackBar.Value - microphoneInputLevelTrackBar.Minimum) * 3000) / (microphoneInputLevelTrackBar.Maximum - microphoneInputLevelTrackBar.Minimum) - 3000; controller.Model.AudioManager.AudioProperties = properties; }
/// <summary> /// Simulates a realtime audio stream by generating a sine wave in realtime and ingesting it into the FIFO buffer. /// </summary> private static void StartSineWaveRealtimeGenerator(AudioProperties audioProperties, IAudioWriterStream targetStream) { // Create a stream that generates 1 second of a sine wave var sineWaveStream = new SineGeneratorStream(audioProperties.SampleRate, 440, new TimeSpan(0, 0, 1)); // Store the sine wave in a buffer // We can concatenate this buffer over and over again to create an infinitely long sine wave var sineWaveBuffer = new byte[sineWaveStream.Length]; var bytesRead = sineWaveStream.Read(sineWaveBuffer, 0, sineWaveBuffer.Length); if (bytesRead < sineWaveBuffer.Length) { throw new Exception("incomplete buffer read"); } Task.Factory.StartNew(() => { // Each realtime second, write the 1-second sine wave to the target stream to // simulate an infinitely long realtime sine wave stream. // // For low-latency processing use-cases, writes would ideally be shorter and happen // more frequently to keep the delay between input and output of the FIFO stream // as low as possible. while (true) { Thread.Sleep(1000); Console.WriteLine("Writing 1 second into buffer"); targetStream.Write(sineWaveBuffer, 0, sineWaveBuffer.Length); _dataGenerated += sineWaveBuffer.Length; } }); }
public void SetPosition03() { var audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); TimeWarpStream s = new TimeWarpStream( new NullStream(audioProperties, TimeUtil.TimeSpanToBytes(new TimeSpan(0, 1, 0), audioProperties))); TimeSpan length = TimeUtil.BytesToTimeSpan(s.Length, s.Properties); s.Mappings.Add(new TimeWarp { From = new TimeSpan(length.Ticks / 2), To = new TimeSpan(length.Ticks / 4) }); s.Mappings.Add(new TimeWarp { From = length, To = new TimeSpan(length.Ticks / 4 * 2) }); byte[] buffer = new byte[5000]; Assert.AreEqual(0, s.Position); s.Read(buffer, 0, buffer.Length); s.Position = 44440; while (s.Read(buffer, 0, buffer.Length) > 0) { } Assert.AreEqual(s.Length, s.Position); }
private void mutePlaybackCheckBox_CheckedChanged(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.MuteSoundEffects = muteSoundEffectsCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
private void muteRecordingCheckBox_CheckedChanged(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.MuteRecording = muteRecordingCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
public void MonoOverlapAddRectangle() { var properties = new AudioProperties(1, 44100, 32, AudioFormat.IEEE); var sourceStream = new SineGeneratorStream(44100, 440, TimeSpan.FromSeconds(1)); var targetStream = new MemoryWriterStream(new System.IO.MemoryStream(), properties); // Rectangular window (samples unchanged) with no overlap should just reconstruct the original stream int windowSize = 100; int hopSize = 100; var window = WindowType.Rectangle; var sw = new StreamWindower(sourceStream, windowSize, hopSize, window); var ola = new OLA(targetStream, windowSize, hopSize); var frameBuffer = new float[windowSize]; while (sw.HasNext()) { sw.ReadFrame(frameBuffer); ola.WriteFrame(frameBuffer); } ola.Flush(); Assert.AreEqual(sourceStream.Length, targetStream.Length); // Validate ola target stream content sourceStream.Position = targetStream.Position = 0; long similarFloats = StreamUtil.CompareFloats(sourceStream, targetStream); Assert.AreEqual(sourceStream.Length / sourceStream.SampleBlockSize, similarFloats); }
private void disableAutoconfigurationCheckBox_CheckedChanged(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.DisableAutoconfiguration = disableAutoconfigurationCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
public static float[][] Uninterleave(AudioProperties audioProperties, byte[] buffer, int offset, int count, bool downmix) { int channels = audioProperties.Channels; int downmixChannel = downmix ? 1 : 0; float[][] uninterleavedSamples = CreateArray <float>(channels + downmixChannel, count / (audioProperties.BitDepth / 8) / audioProperties.Channels); unsafe { fixed(byte *sampleBuffer = &buffer[offset]) { float *samples = (float *)sampleBuffer; int sampleCount = 0; for (int x = 0; x < count / 4; x += channels) { float sum = 0; for (int channel = 0; channel < channels; channel++) { sum += samples[x + channel]; uninterleavedSamples[channel + downmixChannel][sampleCount] = samples[x + channel]; } if (downmix) { uninterleavedSamples[0][sampleCount] = sum / channels; } sampleCount++; } } } return(uninterleavedSamples); }
public void CreateOverlapTooLarge() { var properties = new AudioProperties(1, 44100, 16, AudioFormat.IEEE); var stream = new MemoryWriterStream(new System.IO.MemoryStream(), properties); new OLA(stream, 1000, 1001); }
private void VoiceDialog_Load(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; // mute muteAllCheckBox.Checked = properties.MuteAll; muteSoundEffectsCheckBox.Checked = properties.MuteSoundEffects; muteRecordingCheckBox.Checked = properties.MuteRecording; muteSoundEffectsCheckBox.Enabled = !muteAllCheckBox.Checked; muteRecordingCheckBox.Enabled = !muteAllCheckBox.Checked; // recording control microphoneInputLevelTrackBar.Value = microphoneInputLevelTrackBar.Minimum + ((properties.MicrophoneInputLevel + 3000) * (microphoneInputLevelTrackBar.Maximum - microphoneInputLevelTrackBar.Minimum)) / 3000; voiceActivationThresholdTrackBar.Value = voiceActivationThresholdTrackBar.Minimum + (properties.ActivationThreshold * (voiceActivationThresholdTrackBar.Maximum - voiceActivationThresholdTrackBar.Minimum)) / 99; activateEchoSuppressionCheckBox.Checked = properties.ActivateEchoSuppression; // jitter control automaticJitterControlCheckBox.Checked = properties.UseAutomaticJitterControl; jitterControlTrackBar.Value = jitterControlTrackBar.Minimum + (properties.JitterControl * (jitterControlTrackBar.Maximum - jitterControlTrackBar.Minimum)) / 99; jitterControlTrackBar.Enabled = !automaticJitterControlCheckBox.Checked; // audio mixer configuration disableAutoconfigurationCheckBox.Checked = properties.DisableAutoconfiguration; // server mode serverModeTrackBar.Value = Settings.Default.VoiceServerMode; serverModeTrackBar_Scroll(null, null); }
private void activateEchoSuppressionCheckBox_CheckedChanged(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.ActivateEchoSuppression = activateEchoSuppressionCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
private void jitterControlTrackBar_Scroll(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.JitterControl = ((jitterControlTrackBar.Value - jitterControlTrackBar.Minimum) * 99) / (jitterControlTrackBar.Maximum - jitterControlTrackBar.Minimum); controller.Model.AudioManager.AudioProperties = properties; }
public void SetPosition02() { var audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); TimeWarpStream s = new TimeWarpStream( new NullStream(audioProperties, TimeUtil.TimeSpanToBytes(new TimeSpan(0, 1, 0), audioProperties))); TimeSpan length = TimeUtil.BytesToTimeSpan(s.Length, s.Properties); //s.Mappings.Add(new TimeWarp { // From = StreamUtil.AlignToBlockSize(length / 2, s.SampleBlockSize), // To = StreamUtil.AlignToBlockSize(length / 4, s.SampleBlockSize) //}); s.Mappings.Add(new TimeWarp { From = length, To = new TimeSpan(length.Ticks / 4 * 2) }); byte[] buffer = new byte[5000]; int bytesRead; long totalBytesRead; Assert.AreEqual(0, s.Position); totalBytesRead = 0; s.Position = 11104; Assert.AreEqual(11104, s.Position); //Assert.AreEqual(0, s.BufferedBytes); while ((bytesRead = s.Read(buffer, 0, buffer.Length)) > 0) { totalBytesRead += bytesRead; } Assert.AreEqual(totalBytesRead, s.Position - 11104); Assert.AreEqual(s.Length, s.Position); }
private void voiceActivationThresholdTrackBar_Scroll(object sender, EventArgs e) { AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.ActivationThreshold = ((voiceActivationThresholdTrackBar.Value - voiceActivationThresholdTrackBar.Minimum) * 99) / (voiceActivationThresholdTrackBar.Maximum - voiceActivationThresholdTrackBar.Minimum); controller.Model.AudioManager.AudioProperties = properties; }
public void CreateMonoIEEE() { var properties = new AudioProperties(1, 44100, 16, AudioFormat.IEEE); var stream = new MemoryWriterStream(new System.IO.MemoryStream(), properties); new OLA(stream, 1000, 500); }
public void Dispose() { EventsHelper.CanRaiseEvent = false; CompositionTarget.Rendering -= CompositionTargetRendering; Dispatcher.BeginInvoke( new Action( delegate { FreeEvents(); if (IsPlaying) { Stop(); } AudioProperties.Dispose(); VideoProperties.Dispose(); LogProperties.Dispose(); AudioOutputDevices.Dispose(); VlcContext.InteropManager.MediaPlayerInterops.ReleaseInstance.Invoke(VlcContext.HandleManager.MediaPlayerHandles[this]); VlcContext.HandleManager.MediaPlayerHandles.Remove(this); myVideoLockCallbackHandle.Free(); myVideoSetFormatHandle.Free(); myVideoCleanupHandle.Free(); })); }
/// <summary> /// Adjusts the beginning and the end of a time-interval to the sample interval length so that the /// adjusted input interval includes the preceding and following sample. /// Since audio samples can be between two integer ticks, the outputInterval.From is less or equal its /// matching sample's time, and outputInterval.To is greater or equal its matching sample's time. Recursive /// usage may therefore enlarge the output interval with every execution. /// /// Example: /// audio stream samples: X-----X-----X-----X-----X-----X-----X-----X-----X-----X----- /// input interval: [---------------] /// output interval: [-----------------------) /// </summary> /// <param name="intervalToAlign">the interval that should be sample-aligned</param> /// <param name="audioProperties">the audio properties containing the sample rate</param> /// <returns>the sample aligned interval</returns> public static Interval AlignToSamples(Interval intervalToAlign, AudioProperties audioProperties) { double sampleLength = CalculateSampleTicks(audioProperties); return(new Interval( (long)(intervalToAlign.From - ((double)intervalToAlign.From % sampleLength)), (long)(intervalToAlign.To + (sampleLength - ((double)intervalToAlign.To % sampleLength))))); }
public void CalculateSampleTicksTest() { AudioProperties audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); double expected = TimeUtil.SECS_TO_TICKS / (double)audioProperties.SampleRate; double actual; actual = AudioUtil.CalculateSampleTicks(audioProperties); Assert.AreEqual(expected, actual); }
public SettingsSingleton() { _fileSourceAudio = new FilePersistency <AudioProperties>("AudioSource"); _audioProperties = new AudioProperties(); _fileSourceText = new FilePersistency <TextChanger>("TextSource"); _textChange = new TextChanger(); _folder = _fileSourceAudio.Folder.Path; }
private void automaticJitterControlCheckBox_CheckedChanged(object sender, EventArgs e) { jitterControlTrackBar.Enabled = !automaticJitterControlCheckBox.Checked; AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.UseAutomaticJitterControl = automaticJitterControlCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
private void muteAllCheckBox_CheckedChanged(object sender, EventArgs e) { muteSoundEffectsCheckBox.Enabled = !muteAllCheckBox.Checked; muteRecordingCheckBox.Enabled = !muteAllCheckBox.Checked; AudioProperties properties = controller.Model.AudioManager.AudioProperties; properties.MuteAll = muteAllCheckBox.Checked; controller.Model.AudioManager.AudioProperties = properties; }
/// <summary>Constructor.</summary> public Model(Form mainForm, AudioProperties audioProperties, string playerFirstName, string playerLastName, Guid playerGuid) { audioManager = new AudioManager(mainForm, audioProperties); networkClient = new DXClient(mainForm, audioManager); commandManager = new CommandManager(this); animationManager = new AnimationManager(this); gameLibrary = new GameLibrary(); thisPlayer = new Player(networkClient.PlayerId, playerFirstName, playerLastName, playerGuid, 0xffffff00, Point.Empty, true); players = new Player[] { thisPlayer }; }
public void getAudioProperties(XAudio audio) { AudioProperties property = AudioInFrameProps.Find(prop => prop.Frame == currentFrame && prop.Audio == audio); fadeinTextBox.Text = property.FadeIn.ToString(); fadeoutTextBox.Text = property.FadeOut.ToString(); //queueCheckBox.IsChecked = property.Queue; loopCheckBox.IsChecked = property.Loop; imagePropsPanel.Visibility = Visibility.Hidden; }
private void audio_Checked(object sender, RoutedEventArgs e) { currentAudio = (sender as CheckBox).Tag as XAudio; bool isLooped = false; if (AudioInFrameProps.Any(prop => (prop.StopFrame == currentFrame || (prop.StopFrames != null && prop.StopFrames.Intersect(previousFrames) == currentFrame)) && prop.Audio == currentAudio)) { AudioProperties audio = AudioInFrameProps.First(prop => (prop.StopFrame == currentFrame || (prop.StopFrames != null && prop.StopFrames.Intersect(previousFrames) == currentFrame)) && prop.Audio == currentAudio); if (audio.StopFrame == currentFrame) { audio.StopFrame = null; } else { audio.StopFrames.Remove(currentFrame); } (sender as CheckBox).IsChecked = null; } else { if (currentAudio.Type == "music ") { isLooped = true; selectOnlyCurrentaudio(lastMusicChecked, currentAudio); lastMusicChecked = currentAudio; addAudioToLayer(currentAudio.Path, music, panelMusic, labelMusic); } else if (currentAudio.Type == "sound ") { selectOnlyCurrentaudio(lastSoundChecked, currentAudio); lastSoundChecked = currentAudio; addAudioToLayer(currentAudio.Path, sound, panelSound, labelSound); } else { selectOnlyCurrentaudio(lastVoiceChecked, currentAudio); lastVoiceChecked = currentAudio; addAudioToLayer(currentAudio.Path, voice, panelVoice, labelVoice); } if (addorselect) { AudioInFrameProps.Add(new AudioProperties() { Frame = currentFrame, Audio = currentAudio, Loop = isLooped }); } getAudioProperties(currentAudio); } audioPropsPanel.Visibility = Visibility.Visible; show = true; waschecked = true; }
public void AlignToSamplesTest() { Interval intervalToAlign = new Interval(1000, 10000); AudioProperties audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); double sampleTicks = AudioUtil.CalculateSampleTicks(audioProperties); Interval expected = new Interval((long)(intervalToAlign.From - ((double)intervalToAlign.From % sampleTicks)), (long)(intervalToAlign.To + sampleTicks - (intervalToAlign.To % sampleTicks))); Interval actual; actual = AudioUtil.AlignToSamples(intervalToAlign, audioProperties); Assert.AreEqual(expected, actual); }
protected void Dispose(bool disposing) { if (disposing) { AudioProperties.Dispose(); VideoProperties.Dispose(); LogProperties.Dispose(); AudioOutputDevices.Dispose(); FreeEvents(); VlcContext.InteropManager.MediaPlayerInterops.ReleaseInstance.Invoke(VlcContext.HandleManager.MediaPlayerHandles[this]); VlcContext.HandleManager.MediaPlayerHandles.Remove(this); } }
public void Write() { var stream = new MemoryStream(); var properties = new AudioProperties(2, 44100, 16, AudioFormat.IEEE); var writer = new MemoryWriterStream(stream, properties); Assert.AreEqual(0, writer.Length); int size = writer.SampleBlockSize * 1000; writer.Write(new byte[size], 0, size); Assert.AreEqual(size, writer.Length); }
public void CalculateSamplesTest() { AudioProperties audioProperties = new AudioProperties(2, 44100, 32, AudioFormat.IEEE); double sampleTicks = AudioUtil.CalculateSampleTicks(audioProperties); for (int x = 0; x < audioProperties.SampleRate * 60; x++) { TimeSpan timeSpan = new TimeSpan((long)Math.Ceiling(x * sampleTicks)); int expected = x; int actual; actual = AudioUtil.CalculateSamples(audioProperties, timeSpan); Assert.AreEqual(expected, actual); } }
public void MonoOverlapAddHann() { var properties = new AudioProperties(1, 44100, 32, AudioFormat.IEEE); IAudioStream sourceStream = new SineGeneratorStream(44100, 440, TimeSpan.FromSeconds(1)); IAudioWriterStream targetStream = new MemoryWriterStream(new System.IO.MemoryStream(), properties); // 50% overlap with a Hann window is an optimal combination, Hann window is optimally uneven with a 1 as middle point int windowSize = 21; int hopSize = 11; var window = WindowType.Hann; // Adjust source length to window/hop size so no samples remain at the end // (a potential last incomplete frame is not returned by the stream windower) // With remaining samples, the source and target length cannot be compared sourceStream = new CropStream(sourceStream, 0, sourceStream.Length - (sourceStream.Length - windowSize * sourceStream.SampleBlockSize) % (hopSize * sourceStream.SampleBlockSize)); var sw = new StreamWindower(sourceStream, windowSize, hopSize, window); var ola = new OLA(targetStream, windowSize, hopSize); var frameBuffer = new float[windowSize]; while (sw.HasNext()) { sw.ReadFrame(frameBuffer); ola.WriteFrame(frameBuffer); } ola.Flush(); Assert.AreEqual(sourceStream.Length, targetStream.Length); // Validate ola target stream content // Crop the streams to ignore windowed start and end when no overlap-add is performed and content definitely differs var sourceStreamCropped = new CropStream(sourceStream, hopSize * sourceStream.SampleBlockSize * 2, sourceStream.Length - hopSize * sourceStream.SampleBlockSize); var targetStreamCropped = new CropStream(targetStream, hopSize * sourceStream.SampleBlockSize * 2, sourceStream.Length - hopSize * sourceStream.SampleBlockSize); sourceStreamCropped.Position = targetStreamCropped.Position = 0; long similarFloats = StreamUtil.CompareFloats(sourceStreamCropped, targetStreamCropped); Assert.AreEqual(sourceStreamCropped.Length / sourceStreamCropped.SampleBlockSize, similarFloats); }
/// <summary>Closes ZunTzu.</summary> internal void Quit() { if (model.NetworkClient.Status != NetworkStatus.Disconnected) { model.NetworkClient.Disconnect(); } // save user settings DisplayProperties displayProperties = view.DisplayProperties; Settings.Default.DisplayTextureFormat = (int)displayProperties.TextureQuality; Settings.Default.DisplayMapAndCounterDetail = (int)displayProperties.MapsAndCountersDetailLevel; Settings.Default.DisplayWaitForVerticalBlank = displayProperties.WaitForVerticalBlank; Settings.Default.DisplayPreferedFullscreenMode = displayProperties.PreferredFullscreenMode; Settings.Default.DisplayDiceModelComplexity = (int)displayProperties.DiceModelsDetailLevel; Settings.Default.DisplayWidescreen = (displayProperties.GameAspectRatio == ZunTzu.Graphics.AspectRatioType.SixteenToTen); AudioProperties audioProperties = model.AudioManager.AudioProperties; Settings.Default.VoiceMuteAll = audioProperties.MuteAll; Settings.Default.AudioDisableSoundEffects = audioProperties.MuteSoundEffects; Settings.Default.VoiceMuteRecording = audioProperties.MuteRecording; Settings.Default.VoiceMutePlayback = audioProperties.MutePlayback; Settings.Default.VoiceActivationThreshold = audioProperties.ActivationThreshold; Settings.Default.VoiceEchoSuppression = audioProperties.ActivateEchoSuppression; Settings.Default.VoiceAutomaticJitterControl = audioProperties.UseAutomaticJitterControl; Settings.Default.VoiceJitterBuffer = audioProperties.JitterControl; Settings.Default.VoiceDisableAudioMixerAutoconfig = audioProperties.DisableAutoconfiguration; Settings.Default.VoiceMicrophoneInputLevel = audioProperties.MicrophoneInputLevel; Settings.Default.DisplayWindowSize = mainForm.ClientSize; Settings.Default.DisplayMaximizeWindow = (mainForm.WindowState == FormWindowState.Maximized); Settings.Default.PlayerFirstName = model.ThisPlayer.FirstName; Settings.Default.PlayerLastName = model.ThisPlayer.LastName; Settings.Default.Save(); // delete autosave file string fileName = AutosaveFileName; if (File.Exists(fileName)) { File.Delete(fileName); } mainForm.Closing -= new CancelEventHandler(onMainFormClosing); mainForm.Close(); }
private void Read(ReadStyle properties_style) { // Look for FLAC metadata, including vorbis comments Scan(); if (!scanned) { SetValid(false); return; } if (has_xiph_comment) comment = new OggXiphComment(XiphCommentData); else comment = new OggXiphComment(); if (properties_style != ReadStyle.None) properties = new TagLib.FlacProperties(StreamInfoData, StreamLength, properties_style); }