public WahWah(ISampleSource source) : base(source) { freq = 1.5f; startphase = 0; depth = 0.7f; freqofs = 0.3f; res = 2.5f; //setup: lfoskip = (float)(freq * 2 * Math.PI / WaveFormat.SampleRate); _skipcount = 0; xn1 = 0; xn2 = 0; yn1 = 0; yn2 = 0; b0 = 0; b1 = 0; b2 = 0; a0 = 0; a1 = 0; a2 = 0; _phase = startphase; //todo }
/// <summary> /// Initializes a new instance of the <see cref="PeakMeter"/> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> /// <exception cref="System.ArgumentNullException">source</exception> public PeakMeter(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); ChannelPeakValues = new float[source.WaveFormat.Channels]; Interval = 250; }
/// <summary> /// Initializes a new instance of the <see cref="NotificationSource" /> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> /// <exception cref="System.ArgumentNullException">source is null.</exception> public NotificationSource(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); BlockCount = (int) (source.WaveFormat.SampleRate * (40.0 / 1000.0)); _buffer = new List<float>(BlockCount * source.WaveFormat.Channels); }
/// <summary> /// Creates a new instance of the <see cref="SampleAggregatorBase" /> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> public SampleAggregatorBase(ISampleSource source) { if (source == null) throw new ArgumentNullException("source"); _baseSource = source; DisposeBaseSource = true; }
/// <summary> /// Initializes a new instance of the <see cref="MonoToStereoSource"/> class. /// </summary> /// <param name="source">The underlying mono source.</param> /// <exception cref="ArgumentException">The <paramref name="source"/> has more or less than one channel.</exception> public MonoToStereoSource(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); if (source.WaveFormat.Channels != 1) throw new ArgumentException("The WaveFormat of the source has be a mono format (one channel).", "source"); _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 2, AudioEncoding.IeeeFloat); }
public SampleToWaveBase(ISampleSource source, int bits, AudioEncoding encoding) { if (source == null) throw new ArgumentNullException("source"); if (bits < 1) throw new ArgumentOutOfRangeException("bits"); _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, (short)bits, source.WaveFormat.Channels, encoding); _source = source; _ratio = 32.0 / bits; }
/// <summary> /// Initializes a new instance of the <see cref="StereoToMonoSource"/> class. /// </summary> /// <param name="source">The underlying stereo source.</param> /// <exception cref="ArgumentException">The <paramref name="source"/> has more or less than two channels.</exception> public StereoToMonoSource(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); if (source.WaveFormat.Channels != 2) throw new ArgumentException("The WaveFormat of the source has be a stereo format (two channels).", "source"); _waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 1, AudioEncoding.IeeeFloat); }
public SampleSourceBase(IWaveStream source) { if (source == null) throw new ArgumentNullException("source"); if (source is ISampleSource) _source = (source as ISampleSource); else { _source = WaveToSampleBase.CreateConverter(source as IWaveSource); } }
/// <summary> /// Initializes a new instance of the <see cref="SampleToWaveBase"/> class. /// </summary> /// <param name="source">The underlying <see cref="ISampleSource"/> which has to get converted to a <see cref="IWaveSource"/>.</param> /// <param name="bits">The <see cref="CSCore.WaveFormat.BitsPerSample"/> of the Output-<see cref="WaveFormat"/>.</param> /// <param name="encoding">The <see cref="CSCore.WaveFormat.WaveFormatTag"/> of the Output-<see cref="WaveFormat"/>.</param> /// <exception cref="ArgumentNullException">The <paramref name="source"/> is null.</exception> /// <exception cref="ArgumentOutOfRangeException">Invalid number of bits per sample specified by the <paramref name="bits"/> argument.</exception> protected SampleToWaveBase(ISampleSource source, int bits, AudioEncoding encoding) { if (source == null) throw new ArgumentNullException("source"); if (bits < 1) throw new ArgumentOutOfRangeException("bits"); _waveFormat = (WaveFormat) source.WaveFormat.Clone(); _waveFormat.BitsPerSample = bits; _waveFormat.SetWaveFormatTagInternal(encoding); Source = source; _ratio = 32.0 / bits; }
public void AddSource(ISampleSource source) { if (source == null) throw new ArgumentNullException("source"); //if(source.WaveFormat.Channels != WaveFormat.Channels || // source.WaveFormat.SampleRate != WaveFormat.SampleRate) // throw new ArgumentException("Invalid format.", "source"); lock (_lockObj) { if (!Contains(source)) _sampleSources.Add(source); } }
public SampleDataProvider(IWaveStream source) { if (!(source is ISampleSource) && source is IWaveSource) { source = WaveToSampleBase.CreateConverter(source as IWaveSource); } else if (source is ISampleSource) { } else { throw new ArgumentException("source has to of type IWaveSource or ISampleSource"); } _source = source as ISampleSource; BlockSize = (int)(source.WaveFormat.SampleRate * (40.0 / 1000.0)); _sampleBuffer = new Queue<float>(); _sampleBuffer1 = new Queue<float>(); }
private async void Open_Click(object sender, RoutedEventArgs e) { var ofn = new OpenFileDialog {Filter = CodecFactory.SupportedFilesFilterEn}; if (ofn.ShowDialog() == true) { _soundOut.Stop(); if (_notificationSource != null) _notificationSource.Dispose(); var source = CodecFactory.Instance.GetCodec(ofn.FileName); //since the mediafoundationdecoder isn't really accurate what position and length concerns //read the whole file into a cache if (source is MediaFoundationDecoder) { if (source.Length < 10485760) //10MB { source = new CachedSoundSource(source); } else { Stopwatch stopwatch = Stopwatch.StartNew(); source = new FileCachedSoundSource(source); stopwatch.Stop(); Debug.WriteLine(stopwatch.Elapsed.ToString()); } } source.Position = 0; //load the waveform await LoadWaveformsAsync(source); source.Position = 0; _sampleSource = source.ToSampleSource(); _notificationSource = new NotificationSource(_sampleSource) {Interval = 100}; _notificationSource.BlockRead += (o, args) => { UpdatePosition(); }; _soundOut.Initialize(_notificationSource.ToWaveSource()); _soundOut.Play(); } }
/// <summary> /// </summary> /// <param name="aSampleSource"></param> private void SetupSampleSource(ISampleSource aSampleSource) { const FftSize fftSize = FftSize.Fft4096; //create a spectrum provider which provides fft data based on some input var spectrumProvider = new BasicSpectrumProvider(aSampleSource.WaveFormat.Channels, aSampleSource.WaveFormat.SampleRate, fftSize); //linespectrum and voiceprint3dspectrum used for rendering some fft data //in oder to get some fft data, set the previously created spectrumprovider _lineSpectrum = new LineSpectrum(fftSize) { SpectrumProvider = spectrumProvider, UseAverage = true, BarCount = 50, BarSpacing = 2, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; _voicePrint3DSpectrum = new VoicePrint3DSpectrum(fftSize) { SpectrumProvider = spectrumProvider, UseAverage = true, PointCount = 200, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; //the SingleBlockNotificationStream is used to intercept the played samples var notificationSource = new SingleBlockNotificationStream(aSampleSource); //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them) notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right); _source = notificationSource.ToWaveSource(16); }
public void Load(Uri uri) { Dispose(); _uri = uri; var codec = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri); _waveSource = codec.ToSampleSource().ToMono().ToWaveSource(); spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels, _waveSource.WaveFormat.SampleRate, CSCore.DSP.FftSize.Fft4096); //the SingleBlockNotificationStream is used to intercept the played samples var notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource()); //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them) notificationSource.SingleBlockRead += (s, a) => SpectrumProvider.Add(a.Left, a.Right); _waveSource = notificationSource.ToWaveSource(16); // Load the sample source _sampleSource = codec.ToSampleSource(); RaiseSourceEvent(ESourceEventType.Loaded); hasMedia = true; LoadSoundOut(); }
public LoudMaxStream(ISampleSource provider, double frequencyReduce) : this(provider) { reduceHighPitch = true; velocityThresh = 1 / frequencyReduce; }
public SampleSourceWithDelay(ISampleSource sampleSource, int delay) { this.sampleSource = sampleSource; this.delay = delay; }
public string GetLevelsFromAudioFX(string audioType, string audioFile) { string audioFilename = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile); string levelsFilename = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile + ".levels"); if (!File.Exists(levelsFilename)) { using (ISampleSource source = CodecFactory.Instance.GetCodec(audioFilename).ToSampleSource()) { var fftProvider = new FftProvider(source.WaveFormat.Channels, FftSize.Fft1024); int millisecondsPerFrame = 1000 / 40; long maxBufferLengthInSamples = source.GetRawElements(millisecondsPerFrame); long bufferLength = Math.Min(source.Length, maxBufferLengthInSamples); float[] buffer = new float[bufferLength]; int read = 0; int totalSamplesRead = 0; var fftData = new float[1024]; var list = new List <float>(); float highest = 0; do { //determine how many samples to read int samplesToRead = (int)Math.Min(source.Length - totalSamplesRead, buffer.Length); read = source.Read(buffer, 0, samplesToRead); if (read == 0) { break; } totalSamplesRead += read; //add read data to the fftProvider fftProvider.Add(buffer, read); fftProvider.GetFftData(fftData); float highestAmplitude = 0; for (int i = 0; i < fftData.Length / 2; i++) { if (fftData[i] > highestAmplitude) { highestAmplitude = fftData[i]; } } list.Add(highestAmplitude); if (highestAmplitude > highest) { highest = highestAmplitude; } } while (totalSamplesRead < source.Length); if (highest > 0) { // Adjust to equalize float adjustment = 1 / highest; for (int i = 0; i < list.Count; i++) { list[i] *= adjustment; } } using (var fs = File.Create(levelsFilename)) { fs.Write(list.Select(x => (byte)(x * 255)).ToArray(), 0, list.Count); } } } return(levelsFilename); }
public SynchronizerSampleSource(ISampleSource upstream, TimeSpan resetDesyncTime, IDecoderPipeline pipeline) { _upstream = upstream; _resetDesyncTime = resetDesyncTime; _pipeline = pipeline; }
/// <summary> /// Returns a new instance of the <see cref="Equalizer" /> class with 10 preset <see cref="EqualizerFilter" />. /// </summary> /// <param name="source">The underlying sample source which provides the data for the equalizer.</param> /// <returns>A new instance of the <see cref="Equalizer" /> class with 10 preset <see cref="EqualizerFilter" />.</returns> public static Equalizer Create10BandEqualizer(ISampleSource source) { return(Create10BandEqualizer(source, 18, 0)); }
private float delay = 1.0f; //second #endregion Fields #region Constructors public Echo(ISampleSource source) : base(source) { backBuffer = new Queue<float>(); }
public void RemoveSource(ISampleSource source) { //don't throw null ex here lock (_lockObj) { if (Contains(source)) _sampleSources.Remove(source); } }
public SampleToPcm24(ISampleSource source) : base(source, 24, AudioEncoding.Pcm) { }
public PureDataSource(WaveFormat waveFormat, ISampleSource source) { _WaveFormat = waveFormat; this.source = source; }
/// <summary> /// Initializes a new instance of the <see cref="SampleToPcm16"/> class. /// </summary> /// <param name="source">The underlying <see cref="ISampleSource"/> which has to get converted to a 16-bit PCM <see cref="IWaveSource"/>.</param> /// <exception cref="ArgumentNullException"><paramref name="source"/> is null.</exception> public SampleToPcm16(ISampleSource source) : base(source, 16, AudioEncoding.Pcm) { if (source == null) throw new ArgumentNullException("source"); }
public List<Onset> Detect(ISampleSource audio) { _onsets.Clear(); _completed = 0; _sliceCount = 0; _onsets = new List<float>(); _amplitudes = new List<float>(); var onsets = new List<Onset>(); //init detection specific variables int sliceSampleSize = (int)Math.Ceiling(_options.SliceLength * audio.WaveFormat.SampleRate); //the size of each slice's sample int slicePaddingSize = (int)Math.Ceiling(_options.SlicePaddingLength * audio.WaveFormat.SampleRate); _sliceCount = (int)Math.Ceiling((float)audio.Length/audio.WaveFormat.Channels / sliceSampleSize); //the number of slices needed var samples = (int)audio.Length / audio.WaveFormat.Channels; //init parallel specific variables var pOptions = new ParallelOptions(); if (_options.MaxDegreeOfParallelism != -1) pOptions.MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism; ParallelLoopState loopState; List<Wav> wavSlices = new List<Wav>(); for (int i = 0; i < _sliceCount; i++) { int baseStart = i * sliceSampleSize; int adjustedStart = (baseStart - sliceSampleSize > 0) ? baseStart - slicePaddingSize : 0; int count = (sliceSampleSize + slicePaddingSize + baseStart > samples) ? samples - adjustedStart : sliceSampleSize + (baseStart - adjustedStart) + slicePaddingSize; float delay = (float)adjustedStart / audio.WaveFormat.SampleRate; float[] buffer = new float[count * audio.WaveFormat.Channels]; audio.SetPosition(TimeConverter.SampleSourceTimeConverter.ToTimeSpan(audio.WaveFormat, adjustedStart * audio.WaveFormat.Channels)); audio.Read(buffer, 0, count * audio.WaveFormat.Channels); wavSlices.Add(new Wav(buffer, audio.WaveFormat.SampleRate, count, audio.WaveFormat.Channels) { Delay = delay, Padding = ((delay > 0) ? slicePaddingSize : 0) / audio.WaveFormat.SampleRate }); } int bucketSize = 5; int bucketcount = (int)Math.Ceiling((double)wavSlices.Count / bucketSize); MemoryAllocator _allocator = new MemoryAllocator(); for (int i = 0; i < bucketcount; i++) { _allocator.Reset(); int count = bucketSize; if ((i + 1) * bucketSize > wavSlices.Count) count = wavSlices.Count - i * bucketSize; if (count < 0) continue; List<Wav> parallel = wavSlices.GetRange(i * bucketSize, count); var ploopResult = Parallel.ForEach(parallel, pOptions, (w, state) => GetOnsets(w, _allocator)); if (!ploopResult.IsCompleted) throw new Exception(); } onsets = _onsets.Zip(_amplitudes, (onset, amplitude) => new Onset { OnsetTime = onset, OnsetAmplitude = amplitude }).ToList(); onsets = onsets.OrderBy(f => f.OnsetTime).ToList(); float prev = 0; float combine = 0.03f; var ret = new List<Onset>(); for (int i = 0; i < onsets.Count; i++) { if (onsets[i].OnsetTime - prev < _options.MinimumTimeDelta / 1000.0f) continue; prev = onsets[i].OnsetTime; ret.Add(onsets[i]); } return ret; }
/// <summary> /// Initializes a new instance of the <see cref="PitchShifter"/> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> public PitchShifter(ISampleSource source) : base(source) { PitchShiftFactor = 1.0f; }
public SampleToPcm8(ISampleSource source) : base(source, 8, AudioEncoding.Pcm) { }
static void Main(string[] args) { myConfig = System.Reflection.Assembly.GetExecutingAssembly().GetName().Name + ".cfg"; if (!ReadConfig() & args.Length == 0) { WriteLog("ERROR! (Main): Can't read a configuration file. Please, create a new one."); Array.Resize(ref args, 1); args[0] = "/config"; } string myFullName = System.Reflection.Assembly.GetExecutingAssembly().GetName().FullName; WriteLog("(Main): Program started. (" + myFullName + ")"); string sArgs = string.Join(" ", args); WriteLog("(Main): Arguments: " + sArgs); if (sArgs.Contains(@"/?")) { Console.WriteLine(""); Console.WriteLine("/? - Print this message and exit."); Console.WriteLine("/config - Configure program and exit."); Console.WriteLine("\n Press any key to exit..."); Console.ReadKey(); Exit(0); } if (sArgs.Contains(@"/config")) { Console.WriteLine(""); Console.WriteLine($"Please, enter a full path of media player application (default is: {appPath}): "); string a = Console.ReadLine(); if (a.Length == 0) { a = appPath; } if (!IsValidFullPath(a)) { Console.WriteLine("ERROR! Wrong application path."); a = appPath; } appPath = a; appName = string.Join(".", Pop(System.IO.Path.GetFileName(appPath).Split('.'))); Console.WriteLine($"Please, enter arguments for application if needed (default is: {appARGV}): "); a = Console.ReadLine(); if (a != null & a.Length > 0) { appARGV = a; } Console.WriteLine($"Please, enter a value of delay (in seconds >=5 ) before application will be restarted (default is: {delayBeforeRestartProgram}): "); a = Console.ReadLine(); bool e = false; if (a.Length == 0) { a = delayBeforeRestartProgram.ToString(); } int b = StrToInt(a, ref e); if (b >= 5 & !e) { delayBeforeRestartProgram = b; } else { Console.WriteLine("ERROR! Wrong delay value! Should be >=5."); } while (true) { Console.WriteLine("Is configuration below correct?"); Console.WriteLine($"\nappPath: {appPath}\nappName: {appName}\nappARGV: {appARGV}\ndelayBeforeRestartProgram: {delayBeforeRestartProgram}"); Console.Write("(Y/N): "); a = Console.ReadLine(); if (a == "Y" || a == "y") { WriteConfig(); break; } else if (a == "N" || a == "n") { Console.WriteLine("Please, rerun this application with /config argument to try again."); break; } } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); Exit(0); } var soundIn = new WasapiLoopbackCapture(); WriteLog("(Main): Working with: " + soundIn.Device.FriendlyName); try { soundIn.Initialize(); } catch { WriteLog("ERROR! (Main): Error while initializing device(39). Exiting."); Exit(1); } var soundInSource = new SoundInSource(soundIn); try { ISampleSource source = soundInSource.ToSampleSource(); soundInSource.DataAvailable += (s, aEvent) => NewData(source); } catch { WriteLog("ERROR! (Main): Error while initializing device(50). Exiting."); Exit(1); } WriteLog("(Main): Trying to start sound capturing..."); try { soundIn.Start(); Thread.Sleep(2000); if (!newDataIsRunning & !noSoundIsRunning) { Thread noSound = new Thread(NoSound); noSound.IsBackground = true; noSound.Start(); } } catch { WriteLog("ERROR! (Main): Error while sound capturing. Exiting."); Exit(1); } WriteLog("(Main): Started."); }
internal AudioSample(ISampleSource source) : base(source) { RemoveFromChannelOnFinish = false; }
public SynchronizerSampleSource(ISampleSource upstream, TimeSpan resetDesyncTime) { _upstream = upstream; _resetDesyncTime = resetDesyncTime; }
/// <summary> /// Initializes a new instance of the <see cref="SingleBlockNotificationStream"/> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> /// <exception cref="System.ArgumentNullException">source</exception> public SingleBlockNotificationStream(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); }
/// <summary> /// Initializes a new instance of the <see cref="SimpleNotificationSource" /> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> /// <exception cref="System.ArgumentNullException">source</exception> public SimpleNotificationSource(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); }
public AntiClipping(ISampleSource provider, double frequencyReduce) : this(provider) { reduceHighPitch = true; velocityThresh = 1 / frequencyReduce; }
static private void ApplyEffects(ref IWaveSource src, SoundEffectSettings ap) // ap may be null { if (ap != null && ap.Any) { int extend = 0; if (ap.echoenabled) { extend = ap.echodelay * 2; } if (ap.chorusenabled) { extend = Math.Max(extend, 50); } if (ap.reverbenabled) { extend = Math.Max(extend, 50); } if (extend > 0) { //System.Diagnostics.Debug.WriteLine("Extend by " + extend + " ms due to effects"); src = src.AppendSource(x => new ExtendWaveSource(x, extend)); } if (ap.chorusenabled) { src = src.AppendSource(x => new DmoChorusEffect(x) { WetDryMix = ap.chorusmix, Feedback = ap.chorusfeedback, Delay = ap.chorusdelay, Depth = ap.chorusdepth }); } if (ap.reverbenabled) { src = src.AppendSource(x => new DmoWavesReverbEffect(x) { InGain = 0, ReverbMix = ap.reverbmix, ReverbTime = ((float)ap.reverbtime) / 1000.0F, HighFrequencyRTRatio = ((float)ap.reverbhfratio) / 1000.0F }); } if (ap.distortionenabled) { src = src.AppendSource(x => new DmoDistortionEffect(x) { Gain = ap.distortiongain, Edge = ap.distortionedge, PostEQCenterFrequency = ap.distortioncentrefreq, PostEQBandwidth = ap.distortionfreqwidth }); } if (ap.gargleenabled) { src = src.AppendSource(x => new DmoGargleEffect(x) { RateHz = ap.garglefreq }); } if (ap.echoenabled) { src = src.AppendSource(x => new DmoEchoEffect(x) { WetDryMix = ap.echomix, Feedback = ap.echofeedback, LeftDelay = ap.echodelay, RightDelay = ap.echodelay }); } if (ap.pitchshiftenabled) { ISampleSource srs = src.ToSampleSource(); srs = srs.AppendSource(x => new PitchShifter(x) { PitchShiftFactor = ((float)ap.pitchshift) / 100.0F }); src = srs.ToWaveSource(); } } }
public SoftClipSampleSource(ISampleSource upstream) { _upstream = upstream; _clipper = new OpusNative.OpusSoftClip(upstream.WaveFormat.Channels); }
public EffectBase(ISampleSource source) : base(source) { }
/// <summary> /// Initializes a new instance of the <see cref="PanSource"/> class. /// </summary> /// <param name="source">Underlying base source which provides audio data.</param> /// <exception cref="System.ArgumentException">Source has to be stereo.</exception> public PanSource(ISampleSource source) : base(source) { if (source.WaveFormat.Channels != 2) throw new ArgumentException("Source has to be stereo.", "source"); }
/// <summary> /// Initializes a new instance of the <see cref="FadeInOut" /> class. /// </summary> /// <param name="source">The underlying source to use.</param> public FadeInOut(ISampleSource source) : base(source) { if (source == null) throw new ArgumentNullException("source"); }
public static SpatializerStream CreateSpatializerStream(ISampleSource source) => new SpatializerStream(source);
public CSCore.Streams.Effects.Equalizer Create10BandEqualizer(ISampleSource source) { return(this.Create10BandEqualizer(source, 18, 0)); }
public bool Contains(ISampleSource source) { if (source == null) return false; return _sampleSources.Contains(source); }
public LoudMaxStream(ISampleSource provider) { Provider = provider; WaveFormat = provider.WaveFormat; }
public override void Start(Game game) { //a fix to a bug game.DisplayInstance.windowManager.CenterWindow(); Components = new List <Component>(); savePercent = (((float)game.NotesHit / (float)game.TotalNotes) * 100); percent = (float)Math.Floor(savePercent); grade = "NA"; grade = getGrade(savePercent); _lastScore = PlayerSettings.Instance.chartScores[game.SongHash].percent; if (savePercent > PlayerSettings.Instance.chartScores[game.SongHash].percent) { PlayerSettings.Instance.SaveScore(game.SongHash, grade, percent); } //visual stuff! //songTitle songTitle = new Visual(); songTitle.LoadBMP(Path.Combine(_assetPath, "songTitle.bmp")); songTitle.Active = true; songTitle.writeText(50 - (game.SongName.Length / 2), 47, game.SongName, ConsoleColor.Black, ConsoleColor.White); Components.Add(songTitle); //grade bar gradeBar = new Visual(); gradeBar.LoadBMP(Path.Combine(_assetPath, "gradeBar.bmp")); Components.Add(gradeBar); //grade letter (may merge) gradeletter = new Visual(); gradeletter.z = 1; gradeletter.LoadBMP(Path.Combine(_assetPath, $"gradeF.bmp")); Components.Add(gradeletter); //percent bar percentBar = new Visual(); percentBar.LoadBMP(Path.Combine(_assetPath, "percentBar.bmp")); Components.Add(percentBar); //guage (god help me) gaugeOutline = new Visual(); gaugeOutline.z = 1; gaugeOutline.LoadBMP(Path.Combine(_assetPath, "gaugeOutline.bmp")); Components.Add(gaugeOutline); gauge = new Visual(); gauge.Active = false; gauge.LoadBMP(Path.Combine(_assetPath, "gauge.bmp")); Components.Add(gauge); gauge.overrideColor = true; gauge.overridefront = ConsoleColor.Red; //numbers numVisuals = new Visual[3]; for (int i = 0; i < numVisuals.Length; i++) { numVisuals[i] = new Visual(); numVisuals[i].z = 1; numVisuals[i].x = numOff * i; numVisuals[i].Active = true;//fornow numVisuals[i].LoadBMP(Path.Combine(_assetPath, "0.bmp")); Components.Add(numVisuals[i]); } //last best lastBest = new Visual(); lastBest.Active = false; lastBest.z = -1; lastBest.LoadBMP(Path.Combine(_assetPath, "gauge.bmp"), new int[] { -100 + (int)Math.Ceiling(_lastScore), 0 }); Components.Add(lastBest); lastBest.overrideColor = true; lastBest.overridefront = ConsoleColor.Gray; //easter egg for doing really badly yousuck = new Visual(); yousuck.Active = false; yousuck.z = 10; yousuck.LoadBMP(Path.Combine(_assetPath, "yousuck.bmp")); Components.Add(yousuck); scoreDing = CodecFactory.Instance.GetCodec(Path.Combine(_assetPath, "exp.wav")).ChangeSampleRate(AudioManager.sampleRate).ToStereo().ToSampleSource(); bgmMusic = game.AudioManagerInstance.addTrack(Path.Combine(_assetPath, bgmName)); }
public void Initialize(ISampleSource soundSource, GeneratorOptions options) { totalLength = FrameMath.CalculateTotalFrames(soundSource, options); duration *= options.Fps; }
public SampleToIeeeFloat32(ISampleSource source) : base(source, 32, AudioEncoding.IeeeFloat) { }
/// <summary> /// Initializes a new instance of the <see cref="GainSource"/> class. /// </summary> /// <param name="source">The underlying base source.</param> public GainSource(ISampleSource source) : base(source) { ClipOverflows = true; }
public void Initialize(ISampleSource soundSource, GeneratorOptions options) { totalFrames = FrameMath.CalculateTotalFrames(soundSource, options); }
public BiQuadFilterSource(ISampleSource source) : base(source) { }
public SoundTouchSource(ISampleSource sampleSource, int latency) : this(sampleSource, latency, new SoundTouch()) { }
/// <summary> /// Disposes the <see cref="SampleAggregatorBase" /> and the underlying <see cref="BaseSource" />. /// </summary> /// <param name="disposing"> /// True to release both managed and unmanaged resources; false to release only unmanaged /// resources. /// </param> protected virtual void Dispose(bool disposing) { if (DisposeBaseSource && BaseSource != null) { BaseSource.Dispose(); _baseSource = null; } }
private async Task Cmd(string cmd) { Console.ForegroundColor = ConsoleColor.Gray; Console.BackgroundColor = ConsoleColor.Black; if (cmd.StartsWith("vol ")) { float.TryParse(cmd.Substring(4), out targetVolume); Console.Write("set volume to " + targetVolume.ToString()); return; } if (cmd.StartsWith("pos")) { Console.Write("current pos is " + wasapiOut.WaveSource.GetPosition().ToString()); return; } if (cmd.StartsWith("playlist ")) { if (initialized) { wasapiOut.Stop(); initialized = false; } string path = cmd.Substring(9); bool onComputer = true; if (!File.Exists(@path)) { onComputer = false; } Stream stream = Stream.Null; if (onComputer) { stream = File.OpenRead(@path); } else { try { HttpWebRequest req = (HttpWebRequest)WebRequest.Create(path); HttpWebResponse response = (HttpWebResponse)req.GetResponse(); stream = response.GetResponseStream(); } catch { } } if (stream == Stream.Null) { Console.Write("couldn't read " + path); return; } string extension = Path.GetExtension(path); IPlaylistParser <IBasePlaylist> parser = PlaylistParserFactory.GetPlaylistParser(extension); IBasePlaylist playlist = parser.GetFromStream(stream); foreach (string str in playlist.GetTracksPaths()) { currentStream = new Mp3WebStream(str, false); ISampleSource source = currentStream.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter); var notificationSource = new SingleBlockNotificationStream(source); notificationSource.SingleBlockRead += (s, a) => { leftPitch = Math.Abs(a.Left) * 10; rightPitch = Math.Abs(a.Right) * 10; }; currentStream = notificationSource.ToWaveSource(); currentPath = path; wasapiOut.Initialize(currentStream); wasapiOut.Volume = 0.0f; initialized = true; } Console.Write("set playlist to " + path); return; } if (cmd.StartsWith("thread")) { string board = "a"; if (cmd.Length > 6) { board = cmd.Substring(7); } Dictionary <int, int> a_threads = new Dictionary <int, int>(); Dictionary <int, int> smug_threads = new Dictionary <int, int>(); using (HttpClient a_client = new HttpClient()) using (HttpResponseMessage a_response = await a_client.GetAsync("https://8ch.net/" + board + "/catalog.html")) using (HttpContent a_content = a_response.Content) { string soykaf = await a_content.ReadAsStringAsync(); string pattern = "data-reply=\""; for (int i = 0; i < soykaf.Length - pattern.Length; ++i) { if (soykaf.Substring(i, pattern.Length) == pattern) { int replyCountEnd = FindNext(soykaf.Substring(i + pattern.Length), "\""); string replyCount = soykaf.Substring(i + pattern.Length, replyCountEnd); int threadIdBegin = i + pattern.Length + FindNext(soykaf.Substring(i + pattern.Length), "data-id=\""); string threadId = soykaf.Substring(threadIdBegin + 9, FindNext(soykaf.Substring(threadIdBegin + 9), "\"")); int threadNameBegin = threadIdBegin + 9 + FindNext(soykaf.Substring(threadIdBegin + 9), "data-subject=\""); string threadName = soykaf.Substring(threadNameBegin + 14, FindNext(soykaf.Substring(threadNameBegin + 14), "\"")); if (FindNext(threadName.ToLower(), "r/a/dio") >= 0 || FindNext(threadName.ToLower(), "radio") >= 0) { int.TryParse(threadId, out int ID); int.TryParse(replyCount, out int REPLY); a_threads.Add(ID, REPLY); } } } } Console.Write("got " + a_threads.Count + " r/a/dio thread" + (a_threads.Count > 1 ? "s" : "") + " from 8/" + board + "/"); if (board == "a") { using (HttpClient smug_client = new HttpClient()) using (HttpResponseMessage smug_response = await smug_client.GetAsync("https://smuglo.li/a/catalog.html")) using (HttpContent smug_content = smug_response.Content) { string soykaf = await smug_content.ReadAsStringAsync(); string pattern = "data-reply=\""; for (int i = 0; i < soykaf.Length - pattern.Length; ++i) { if (soykaf.Substring(i, pattern.Length) == pattern) { int replyCountEnd = FindNext(soykaf.Substring(i + pattern.Length), "\""); string replyCount = soykaf.Substring(i + pattern.Length, replyCountEnd); int threadIdBegin = i + pattern.Length + FindNext(soykaf.Substring(i + pattern.Length), "data-id=\""); string threadId = soykaf.Substring(threadIdBegin + 9, FindNext(soykaf.Substring(threadIdBegin + 9), "\"")); int threadNameBegin = threadIdBegin + 9 + FindNext(soykaf.Substring(threadIdBegin + 9), "data-subject=\""); string threadName = soykaf.Substring(threadNameBegin + 14, FindNext(soykaf.Substring(threadNameBegin + 14), "\"")); if (FindNext(threadName.ToLower(), "r/a/dio") >= 0 || FindNext(threadName.ToLower(), "radio") >= 0) { if (int.TryParse(threadId, out int ID) && int.TryParse(replyCount, out int REPLY)) { smug_threads.Add(ID, REPLY); } } } } } Console.Write("\ngot " + smug_threads.Count + " r/a/dio thread" + (smug_threads.Count > 1 ? "s" : "") + " from the bunker"); } Thread.Sleep(500); Console.Write("\nopening the most active thread(s)"); Thread.Sleep(1000); foreach (var x in a_threads) { Process.Start("https://8ch.net/a/res/" + x.Key + ".html"); break; } foreach (var x in smug_threads) { Process.Start("https://smuglo.li/a/res/" + x.Key + ".html"); break; } return; } if (cmd.StartsWith("play")) { if (M3uCheck()) { return; } wasapiOut.Play(); Console.Write("started playing"); return; } if (cmd.StartsWith("stop")) { if (M3uCheck()) { return; } wasapiOut.Stop(); Console.Write("stopped playing"); return; } if (cmd.StartsWith("pause")) { if (M3uCheck()) { return; } wasapiOut.Pause(); Console.Write("paused playing"); return; } if (cmd.StartsWith("resume")) { if (M3uCheck()) { return; } wasapiOut.Resume(); Console.Write("resumed playing"); return; } if (cmd.StartsWith("help")) { Console.Write(HELP_MESSAGE); return; } Console.ForegroundColor = ConsoleColor.Black; Console.BackgroundColor = ConsoleColor.Red; Console.Write("nANI!?"); Console.ForegroundColor = ConsoleColor.Red; Console.BackgroundColor = ConsoleColor.Black; Console.Write("?"); return; }
public AntiClipping(ISampleSource provider) { Provider = provider; WaveFormat = provider.WaveFormat; }
public HighShelfFilter(WaveFormat waveFormat, Scalar.Scalar frequency, Scalar.Scalar q, Scalar.Scalar gain, ISampleSource source) : base(waveFormat, frequency, q, gain, source) { }
public BiQuadFilter(WaveFormat waveFormat, Scalar.Scalar frequency, ISampleSource source) : this(waveFormat, frequency, new Scalar.ConstantScalar((float)(1.0 / Math.Sqrt(2))), new Scalar.ConstantScalar(6), source) { }
public BiQuadFilter(WaveFormat waveFormat, Scalar.Scalar frequency, Scalar.Scalar q, Scalar.Scalar gain, ISampleSource source) : base(waveFormat) { Source = source; Frequency = frequency; Q = q; GainDB = gain; Z1 = 0; Z2 = 0; }
public SampleToPcm16(ISampleSource source) : base(source, 16, AudioEncoding.Pcm) { }
public DataNotificationSource(ISampleSource source) : base(source) { }