public ArcPath(bool raw, bool keepFraction, InputResampler inputResampler, Texture texture, Color4 colour, SpriteText output) { InputResampler = inputResampler; const int target_raw = 1024; RelativeSizeAxes = Axes.Both; Texture = texture; Colour = colour; for (int i = 0; i < target_raw; i++) { float x = (float)(Math.Sin(i / (double)target_raw * (Math.PI * 0.5)) * 200) + 50.5f; float y = (float)(Math.Cos(i / (double)target_raw * (Math.PI * 0.5)) * 200) + 50.5f; Vector2 v = keepFraction ? new Vector2(x, y) : new Vector2((int)x, (int)y); if (raw) { AddRawVertex(v); } else { AddSmoothedVertex(v); } } output.Text += ": Smoothed=" + NumVertices + ", Raw=" + NumRaw; }
public void PcmToFloatAndBackTest() { for (int i = 0; i < 200; i++) { short sample = RandomPCMSample(); float floatResult = InputResampler.PCMtoFloat(sample); int shortResult = InputResampler.FloatToPCM(floatResult); int delta = Math.Abs(sample - shortResult); Assert.IsTrue(delta < 2, "Conversion to float and back should be within one unit. Delta was {0}", delta); } }
private void HandleAudio(WaveInEventArgs args) { if (IsRecording && !isDisposed) { byte[] toEncode = args.Buffer; int length = args.BytesRecorded; if (length > 0) { if (waveIn.WaveFormat != codec.RecordFormat) { if (waveIn.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) { var floatSamples = InputResampler.ReadIeeeWav(toEncode, args.BytesRecorded, waveIn.WaveFormat); foreach (var sample in floatSamples) { aggregator.Add(sample); } toEncode = InputResampler.Resample(floatSamples, floatSamples.Length, waveIn.WaveFormat, codec.RecordFormat, out length); } else { for (int i = 0; i < args.BytesRecorded + 1; i += 2) { aggregator.Add(InputResampler.PCMtoFloat(toEncode, i / 2)); } toEncode = InputResampler.Resample(toEncode, args.BytesRecorded, waveIn.WaveFormat, codec.RecordFormat, out length); } } if (toEncode == null) { Console.WriteLine("Encode Error: Disabling input. Please choose another record format and report this bug.."); StopRecording(); } else { if (CoughScalar < 1.0f) { InputResampler.ScalePCM16VolumeDb(ref toEncode, length, CoughScalar); } byte[] encoded = codec.Encode(toEncode, length); if (encoded.Length > 0 && NetworkWPF.Client != null) { NetworkWPF.Client.SendAudio(codec.CodecID, encoded, encoded.Length); } } } } }
protected bool AddSmoothedVertex(Vector2 pos) { NumRaw++; bool foundOne = false; foreach (Vector2 relevant in InputResampler.AddPosition(pos)) { AddVertex(relevant); NumVertices++; foundOne = true; } return(foundOne); }
public void MonoRateResampleTest() { int samples = 1000; byte[] stream = CreateTestStream(samples, 0); WaveFormat sourceFormat = new WaveFormat(44100, 1); WaveFormat destFormat = new WaveFormat(8000, 1); int sourceLength = stream.Length; int expectedResultLen = GetExpectedConversionLength(sourceLength, sourceFormat, destFormat); int resultLength; byte[] resultStream = InputResampler.Resample(stream, sourceLength, sourceFormat, destFormat, out resultLength); Assert.AreEqual(expectedResultLen, resultLength); }
public void LongerSourceStereoResampleTest() { int samples = 500; byte[] testStream = CreateStereoSampleStream(samples); byte[] stream = new byte[testStream.Length + 1000]; testStream.CopyTo(stream, 0); WaveFormat sourceFormat = new WaveFormat(48000, 2); WaveFormat destFormat = new WaveFormat(44100, 1); int sourceLength = testStream.Length; int expectedResultLen = GetExpectedConversionLength(sourceLength, sourceFormat, destFormat); int resultLength; byte[] resultStream = InputResampler.Resample(stream, sourceLength, sourceFormat, destFormat, out resultLength); Assert.AreEqual(expectedResultLen, resultLength); }
public ArcPath(bool raw, InputResampler inputResampler, Texture texture, Color4 colour, SpriteText output) { InputResampler = inputResampler; const int target_raw = 1024; RelativeSizeAxes = Axes.Both; Texture = texture; Colour = colour; for (int i = 0; i < target_raw; i++) { float x = (float)(Math.Sin(i / (double)target_raw * (Math.PI * 0.5)) * 200) + 50.5f; float y = (float)(Math.Cos(i / (double)target_raw * (Math.PI * 0.5)) * 200) + 50.5f; if (!raw) { x = (int)x; y = (int)y; } AddSmoothedVertex(new Vector2(x, y)); } output.Text += ": Smoothed=" + NumVertices + ", Raw=" + NumRaw; }
public void ChannelCountChangeTest() { int samples = 1000; byte[] left = CreateTestStream(samples, 0); byte[] right = CreateTestStream(samples, 1); byte[] stream = Multiplex(samples, left, right); WaveFormat sourceFormat = new WaveFormat(8000, 2); WaveFormat destFormat = new WaveFormat(8000, 1); int sourceLength = stream.Length; int expectedResultLen = sourceLength / 2; int resultLength; byte[] resultStream = InputResampler.Resample(stream, sourceLength, sourceFormat, destFormat, out resultLength); Assert.AreEqual(expectedResultLen, resultLength); for (int i = 0; i < samples; i++) { int leftValue = left[i * 2 + 1] << 8 | left[i * 2]; int rightValue = right[i * 2 + 1] << 8 | right[i * 2]; int resultValue = resultStream[i * 2 + 1] << 8 | resultStream[i * 2]; if (leftValue + rightValue < short.MaxValue) { Assert.AreEqual(leftValue + rightValue, resultValue); } else { Assert.Inconclusive("Clipping occurred."); } } }
public void HandleAudio(Audio.Codecs.CodecID codecid, byte[] encoded) { if (isDisposed) { return; } Audio.Codecs.INetworkChatCodec remoteCodec = Codecs.SingleOrDefault(m => m.CodecID == codecid); if (remoteCodec == null) { Console.WriteLine("Bad Audio Packet: Codec ID {0}", codecid); return; } if (codecid != LastCodec) { LastCodec = codecid; CodecName = remoteCodec.Name(); } TimeSpan buffered = waveOut == null ? TimeSpan.Zero : waveProvider.BufferedDuration; bool isPlaying = buffered != TimeSpan.Zero; if (waveOut == null || waveProvider.WaveFormat != remoteCodec.RecordFormat || (!isPlaying && ShouldTryRestartOutput)) { Start(remoteCodec); } else if (!isPlaying) { UnderRuns++; } if (buffered <= FrameDropThresholdMs) { byte[] decoded = remoteCodec.Decode(encoded, encoded.Length); int length = decoded.Length; if (!isPlaying && AudioOutWPF.shouldRampUnderruns) { InputResampler.RampPCM16Volume(ref decoded, length, InputResampler.RampDirection.ZeroToFull); } var volume = LevelManager.LevelScalar; if (volume < 1.0f) { InputResampler.ScalePCM16VolumeDb(ref decoded, length, volume); } if (length > 0 && ShouldDropSilence) { int dropped = DropSilence(silenceThreshhold, ref decoded, ref length); DroppedSilence += dropped; } else if (ShouldAddSilence && length > 5) { bool silent = true; for (int i = 0; i < 5; i += 2) { if (decoded[i + 1] != 0 || decoded[i] > addSilenceThreshold) { silent = false; break; } } if (silent) { var silenceBytes = length / 4; var silence = new byte[silenceBytes]; byte silenceLevel = (byte)(addSilenceThreshold / 2); for (int i = 0; i < silenceBytes - 1; i += 2) { silence[i + 1] = 0; silence[i] = silenceLevel; } waveProvider.AddSamples(silence, 0, silenceBytes); AddedSilence += length; } } waveProvider.AddSamples(decoded, 0, length); } else { DroppedPackets++; } if (shouldUpdateDuration) { BufferedDuration = buffered; shouldUpdateDuration = false; } LastReceived = DateTime.UtcNow; }
public void Cleanup() { InputResampler.Dispose(); }