Beispiel #1
0
        public IWaveProvider Resample(IWaveProvider inputProvider)
        {
            var output = inputProvider;

            if (_inFormat.Encoding == WaveFormatEncoding.IeeeFloat)
            {
                //This needs to change if the Discord sound format changes
                output = new WaveFloatTo16Provider(inputProvider);
                _log.Debug("Resampling from IeeeFloat using FloatTo16Provider");
            }
            else if (_inFormat.BitsPerSample != SoundSettings.DiscordFormat.BitsPerSample)
            {
                output = new WaveFormatConversionProvider(new WaveFormat(_inFormat.SampleRate, SoundSettings.DiscordFormat.BitsPerSample, _inFormat.Channels), output);
                _log.Debug($"Resampling from {_inFormat.BitsPerSample} to {SoundSettings.DiscordFormat.BitsPerSample} bit using FormatConversionProvider");
            }

            if (_inFormat.Channels != SoundSettings.DiscordFormat.Channels)
            {
                output = new WaveFormatConversionProvider(new WaveFormat(_inFormat.SampleRate, SoundSettings.DiscordFormat.BitsPerSample, SoundSettings.DiscordFormat.Channels), output);
                _log.Debug($"Resampling from {_inFormat.Channels} to {SoundSettings.DiscordFormat.Channels} using FormatConversionProbider");
            }

            if (_inFormat.SampleRate != SoundSettings.DiscordFormat.SampleRate)
            {
                output = new WaveFormatConversionProvider(SoundSettings.DiscordFormat, output);
                _log.Debug($"Resampling from {_inFormat.SampleRate} to {SoundSettings.DiscordFormat.SampleRate} using FormatConversionProbider");
            }

            return(output);
        }
Beispiel #2
0
        private static void UpdateCue()
        {
            Context con = new Context(true);

            if (cue.Count < 1)
            {
                return;
            }

            foreach (Audio audio in cue)
            {
                if (!audio.Prepared && !audio.Prepairing)
                {
                    con.WriteLine("Prepairing " + audio.VideoInfo() + "...");
                    Task t = new Task(() => { audio.Prepare(); new Context(true).WriteLine(audio.VideoInfo() + " is prepared"); });

                    t.Start();

                    SpinWait.SpinUntil(() => { return(audio.Prepairing | audio.Prepared); });
                }
            }

            if (current != cue[0])
            {
                Close();
                current = cue[0];

                if (current.Prepairing)
                {
                    con.WriteLine("Still prepairing File...");
                    SpinWait.SpinUntil(() => { return(!current.Prepairing); });

                    Thread.Sleep(100);

                    if (!current.Prepared)
                    {
                        con.WriteLine("There was a problem, skipping!");
                        ShiftCue();
                        return;
                    }
                }

                fileReader = new AudioFileReader(current.path);

                prov = fileReader.ToSampleProvider().ToMono().ToWaveProvider16();

                convert = new WaveFormatConversionProvider(new WaveFormat(48000, 16, 1), prov);

                read = convert.ToSampleProvider();
                fileReader.Volume = cachedVol;
            }
        }
Beispiel #3
0
        private byte[] convert32bitFloat48000HzStereoPCMTo16bitMonoPCM_Alpha(WaveInEventArgs e, int sampleRate)
        {
            byte[] recorded_buf    = e.Buffer;
            int    recorded_length = e.BytesRecorded;

            byte[] result_buf = null;
            int    result_len = -1;

            try
            {
                //// 生データを再生可能なデータに変換
                var waveBufferResample = new BufferedWaveProvider(this._WaveIn.WaveFormat);
                waveBufferResample.DiscardOnBufferOverflow = true;
                waveBufferResample.ReadFully    = false; // leave a buffer?
                waveBufferResample.BufferLength = recorded_length;
                var sampleStream = new WaveToSampleProvider(waveBufferResample);

                // Downsample
                var resamplingProvider = new WdlResamplingSampleProvider(sampleStream, sampleRate);

                // Stereo to mono
                var monoProvider = new StereoToMonoSampleProvider(resamplingProvider)
                {
                    LeftVolume  = 1f,
                    RightVolume = 1f
                };

                // Convert to 32bit float to 16bit PCM
                var ieeeToPcm               = new SampleToWaveProvider16(monoProvider);
                var depthConvertProvider    = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 8, 1), ieeeToPcm);
                var depthConvertProviderRev = new WaveFormatConversionProvider(new WaveFormat(sampleRate, 16, 1), depthConvertProvider);

                waveBufferResample.AddSamples(recorded_buf, 0, recorded_length);

                result_len = recorded_length / (2 * (48000 / sampleRate) * 2); // depth conv and sampling and ch conv
                result_buf = new byte[result_len];
                depthConvertProviderRev.Read(result_buf, 0, result_len);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                Console.WriteLine("exit...");
                System.Windows.Forms.Application.Exit();
            }

            return(result_buf);
        }
Beispiel #4
0
        private void initAudioRecorder()
        {
            if (audio != null)
            {
                audio.DataAvailable    -= Audio_DataAvailable;
                audio.RecordingStopped -= Audio_RecordingStopped;
                audio.Dispose();
            }
            if (blankplayer != null)
            {
                blankplayer.Dispose();
            }
            audio        = new WasapiLoopbackCapture(device);
            sourceFormat = audio.WaveFormat;
            if (sourceProvider == null)
            {
                sourceProvider           = new BufferedWaveProvider(sourceFormat);
                sourceProvider.ReadFully = false;
                wfto16prov         = new WaveFloatTo16Provider(sourceProvider);
                monovolumeprovider = new StereoToMonoProvider16(wfto16prov);
                formatconv         = new WaveFormatConversionProvider(new WaveFormat(24000, 16, 1), monovolumeprovider);
            }


            text_encoding.Text = sourceFormat.Encoding.ToString();
            //var client = device.AudioClient.AudioRenderClient;
            blankplayer = new WasapiOut(device, AudioClientShareMode.Shared, false, 0);

            silence = new SilenceProvider(sourceFormat).ToSampleProvider();

            AudioDevice_Text.ForeColor = Color.Black;

            try
            {
                blankplayer.Init(silence);
            }
            catch
            {
                AudioDevice_Text.ForeColor = Color.Red;
            }
            audio.DataAvailable    += Audio_DataAvailable;
            audio.RecordingStopped += Audio_RecordingStopped;
            AudioMonitor            = device.AudioMeterInformation;
        }
Beispiel #5
0
    /// <summary>
    /// 44100Hz,2chのファイル読み込み("main"で追加される)
    /// </summary>
    /// <param name="filename"></param>
    public static void MainFileLoad(string filename)
    {
        IWaveProvider   FloatStereo44100Provider;
        AudioFileReader reader;

        reader          = new AudioFileReader(filename);
        audiofilereader = reader;
        Test.Print("Volume消す");

        IWaveProvider stereo;

        if (reader.WaveFormat.Channels == 1)
        {
            if (reader.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
            {
                //NAudio.Wave.SampleProviders.MonoToStereoSampleProvider s = new NAudio.Wave.SampleProviders.MonoToStereoSampleProvider(reader);
                stereo = new Wave16ToFloatProvider(new MonoToStereoProvider16(new WaveFloatTo16Provider(reader)));
                WaveFormatConversionProvider conv = new WaveFormatConversionProvider(new WaveFormat(44100, 2), stereo);
            }
            else if (reader.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
            {
                stereo = new Wave16ToFloatProvider(new MonoToStereoProvider16(reader));
            }
            else
            {
                return;
            }
        }
        else
        {
            stereo = reader;
        }

        FloatStereo44100Provider = stereo;//最終的にこの形式に統一44100にするかどうかは検討の余地あり

        SoundDriver.AddWaveProvider(FloatStereo44100Provider, "main");
    }
Beispiel #6
0
    public static void DoMain(CdjData cdjdata)
    {
        //using NAudio.Wave;
        //シンプル再生(2つ作れば多重再生可能)

        IWaveProvider   FloatStereo44100Provider;
        AudioFileReader reader;

        reader        = new AudioFileReader(cdjdata.SOUND);
        reader.Volume = 0.1F;



        IWaveProvider stereo;

        if (reader.WaveFormat.Channels == 1)
        {
            if (reader.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
            {
                //NAudio.Wave.SampleProviders.MonoToStereoSampleProvider s = new NAudio.Wave.SampleProviders.MonoToStereoSampleProvider(reader);
                stereo = new Wave16ToFloatProvider(new MonoToStereoProvider16(new WaveFloatTo16Provider(reader)));
                WaveFormatConversionProvider conv = new WaveFormatConversionProvider(new WaveFormat(44100, 2), stereo);
            }
            else if (reader.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
            {
                stereo = new Wave16ToFloatProvider(new MonoToStereoProvider16(reader));
            }
            else
            {
                return;
            }
        }
        else
        {
            stereo = reader;
        }

        FloatStereo44100Provider = stereo;//最終的にこの形式に統一44100にするかどうかは検討の余地あり

        SoundDriver.AddWaveProvider(FloatStereo44100Provider, "main");
        SoundDriver.Play();



        //while (waveOut.PlaybackState == PlaybackState.Playing)
        //{
        //    Application.DoEvents();
        //    this.Text = reader.CurrentTime.ToString();
        //} // 再生の終了を待つ
        //  // 再生の終了を待たずにWaveOutのインスタンスが破棄されると、その時点で再生が停止する

        //inputAnalogFader inpfader = new inputAnalogFader();
        inputMIDIFader  inpfader = new inputMIDIFader(midiinput);
        inputMIDIRecord RecordL  = new inputMIDIRecord(CdjData.Left, midiinput);
        inputMIDIRecord RecordR  = new inputMIDIRecord(CdjData.Right, midiinput);

        inpfader.Initial();

        SetDrawObjects();


        //GHplaylineL = DX.LoadGraph("playline.png");
        //GHplaylineR = DX.LoadGraph("playline2.png");

        int MovieGraphHandle;
        int MovieGraphHandle1, MovieGraphHandle2;

        MovieGraphHandle1 = DX.LoadGraph("B3_TYPE42.avi");
        MovieGraphHandle2 = DX.LoadGraph("E_Map_TYPE01a.avi");

        // 描画先を裏画面に変更
        DX.SetDrawScreen(DX.DX_SCREEN_BACK);
        // 画像を左右に動かす処理のための変数を初期化

        now.set_startbpm(0, cdjdata.BPM);
        //now.set_startbpm(DX.GetNowCount(), music.BPM);
        Random random = new Random();


        int dbg = 0;

        // メインループ
        while (DX.ProcessMessage() != -1)
        {
            //再生終了で抜ける

/*
 *          if ((DX.GetJoypadInputState(DX.DX_INPUT_KEY_PAD1) & DX.PAD_INPUT_9) != 0)
 *          {
 *              waveOut.Dispose();
 *              break;
 *          }
 *          if(waveOut.PlaybackState == PlaybackState.Stopped)
 *          {
 *              waveOut.Dispose();
 *              break;
 *          }
 */
            // 画面をクリア
            DX.ClearDrawScreen();

            now.settime((int)reader.CurrentTime.TotalMilliseconds);
            cdjdata.SetStep(now.judgementlinestep);

            //ムービー処理
            {
                // 画像を描画する座標を更新
                if ((DX.GetJoypadInputState(DX.DX_INPUT_KEY_PAD1) & DX.PAD_INPUT_RIGHT) != 0)
                {
                    MovieGraphHandle = MovieGraphHandle1;
                }
                else
                {
                    MovieGraphHandle = MovieGraphHandle2;
                }
                // 画像を描画
                //ムービー
                DX.SetDrawBlendMode(DX.DX_BLENDMODE_NOBLEND, 0);
                //ムービー調整
                if (DX.GetMovieStateToGraph(MovieGraphHandle1) != 1)
                {
                    DX.SeekMovieToGraph(MovieGraphHandle1, 0);
                    DX.PlayMovieToGraph(MovieGraphHandle1);
                }
                if (DX.GetMovieStateToGraph(MovieGraphHandle2) != 1)
                {
                    DX.SeekMovieToGraph(MovieGraphHandle2, 0);
                    DX.PlayMovieToGraph(MovieGraphHandle2);
                }
            }

            //キューイングのディスクとか再生ラインとかカットイン矢印とかを描く
            foreach (DiscQueCutData o in cdjdata.lstquedata)
            {
                o.Cutin(inputFader.GetCutInState());
                if (o.ActiveState == EnumActiveState.NEXT)
                {
                    if (o.lr == CdjData.Left)
                    {
                        o.Queing(RecordL.DeltaAngle);
                    }
                    else
                    {
                        o.Queing(RecordR.DeltaAngle);
                    }
                }
            }

            if (cdjdata.nowlr == 1 && inputFader.GetFaderState() == EnumFaderState.RIGHT)
            {
                //waveOut.Volume = 0;
            }
            else if (cdjdata.nowlr == -1 && inputFader.GetFaderState() == EnumFaderState.LEFT)
            {
                //waveOut.Volume = 0;
            }
            else
            {
                //waveOut.Volume = DEF_VOLUME;
            }


            RecordL.Update();
            RecordR.Update();
            inpfader.Update();
            midiinput.update(DateTime.Now);
            //inputFader.GetFaderState();
            //----------------------------------------------------------------------------------------
            //デバッグ用

            DX.DrawString(0, 0, "fader" + inputFader.GetFaderValue(), DX.GetColor(255, 255, 255));
            DX.DrawString(0, 20, "cutin" + inputFader.GetCutInState().ToString(), DX.GetColor(255, 255, 255));
            DX.DrawString(0, 40, "angle" + RecordR.DeltaAngle.ToString(), DX.GetColor(255, 255, 255));
            DX.DrawString(0, 60, "Pos " + midiinput.Pos(-1).ToString(), DX.GetColor(255, 255, 255));
            DX.DrawString(0, 80, "Spd " + midiinput.Speed(-1).ToString(), DX.GetColor(255, 255, 255));

            //----------------------------------------------------------------------------------------

            //描画処理
            DoDraw(cdjdata);
        }
    }