public void InitAndPlayAudio (string trackSource) { var resourceId = GetRawResourceId(trackSource); Console.WriteLine("Resource id for " + trackSource + " = " + (resourceId.HasValue ? resourceId.Value.ToString() : "(null)")); if (player != null) { if (player.IsPlaying) { player.Stop (); } player.Reset (); player.Release (); player = null; } if (resourceId.HasValue) { player = MediaPlayer.Create (Application.Context, resourceId.Value); // do not use player.Prepare () -- MediaPlayer.Create takes care of this player.Completion += (sender, e) => { player.Reset (); player.Release (); player = null; AudioState = AudioState.Stopped; }; AudioState = AudioState.Playing; player.Start (); } }
public AudioAdapter() { _musicEnabled = true; _soundEnabled = true; _musicVolume = 1.0f; _repeatMusic = false; _audioState = AudioState.Stopped; }
// Use this for initialization void Start() { this.guestIndex = 0; current = (GameObject)GameObject.Instantiate(guestList[guestIndex], this.transform.position, this.transform.rotation); this.currentAudioState = current.GetComponent<AudioState>(); this.responded = false; StartIntro (); }
private void UpdateButtonsToAudioState(AudioState previousState, AudioState newState) { Debug.Log("Changing from " + previousState + " to " + newState); if (previousState == AudioState.RECORDING) { StopCoroutine(flashRoutine); recordAudioButton.IconVisible = true; // make sure that the icon is displayed // re-enable the play and stop buttons playPauseButton.ButtonEnabled = true; stopButton.ButtonEnabled = true; } switch (newState) { case AudioState.NONE_RECORDED: playPauseButton.ButtonEnabled = false; stopButton.ButtonEnabled = false; break; case AudioState.PLAYING: playPauseButton.Text = LocalizationManager.Instance.ResolveString("Pause"); playPauseButton.Icon = pauseIcon; stopButton.ButtonEnabled = true; recordAudioButton.ButtonEnabled = false; break; case AudioState.PAUSED: // if previously playing => now not playing and so reset the play-button icon playPauseButton.Text = LocalizationManager.Instance.ResolveString("Play"); playPauseButton.Icon = playIcon; stopButton.ButtonEnabled = true; recordAudioButton.ButtonEnabled = true; break; case AudioState.STOPPED: // if previously playing => now not playing and so reset the play-button icon playPauseButton.Text = LocalizationManager.Instance.ResolveString("Play"); playPauseButton.Icon = playIcon; stopButton.ButtonEnabled = false; recordAudioButton.ButtonEnabled = true; break; case AudioState.RECORDING: flashRoutine = StartCoroutine(FlashRecordIcon()); // also disable the play and stop button playPauseButton.ButtonEnabled = false; stopButton.ButtonEnabled = false; break; } }
public async Task <SkillResponse> FunctionHandler(SkillRequest input, ILambdaContext context) { var log = context.Logger; // I use the following lines to log and validate my input // but this isn't a requirement for the skill //log.LogLine($"Skill Request Object..."); //log.LogLine(JsonConvert.SerializeObject(input)); SkillResponse returnResponse = new SkillResponse(); var audioItems = AudioAssets.GetSampleAudioFiles(); // initialize a connection to the database // this also initialized the context for the DynamoDB helper var audioStateHelper = new AudioStateHelper(); await audioStateHelper.VerifyTable(); string userId = ""; if (input.Session != null) { userId = input.Session.User.UserId; } else { userId = input.Context.System.User.UserId; } var lastState = await audioStateHelper.GetAudioState(userId); var currentState = new AudioState() { UserId = userId }; currentState.State = lastState.State; // For an intent if (input.GetRequestType() == typeof(LaunchRequest)) { log.LogLine($"Default LaunchRequest made"); var output = new PlainTextOutputSpeech() { Text = "Welcome to the Alexa audio sample. " + "You can say, play the audio, to begin." }; var reprompt = new Reprompt() { OutputSpeech = new PlainTextOutputSpeech() { Text = "You can say, play the audio, to begin." } }; returnResponse = ResponseBuilder.Ask(output, reprompt); await audioStateHelper.SaveAudioState(currentState); } else if (input.GetRequestType() == typeof(IntentRequest)) { var intentRequest = (IntentRequest)input.Request; var output = new PlainTextOutputSpeech(); var reprompt = new Reprompt(); log.LogLine($"Triggered " + intentRequest.Intent.Name); switch (intentRequest.Intent.Name) { case "PlayAudio": currentState.State.Token = audioItems.FirstOrDefault().Title; currentState.State.State = "PLAY_MODE"; currentState.State.Index = 0; currentState.State.playOrder = new List <int> { 0, 1, 2, 3, 4 }; returnResponse = ResponseBuilder.AudioPlayerPlay( PlayBehavior.ReplaceAll, audioItems[currentState.State.Index].Url, currentState.State.Token); break; case BuiltInIntent.Help: output.Text = "You can say, play the audio, to begin."; reprompt.OutputSpeech = new PlainTextOutputSpeech() { Text = "You can say, play the audio, to begin." }; returnResponse = ResponseBuilder.Ask(output, reprompt); break; case BuiltInIntent.Cancel: currentState.State.OffsetInMS = Convert.ToInt32(input.Context.AudioPlayer.OffsetInMilliseconds); currentState.State.Token = input.Context.AudioPlayer.Token; currentState.State.State = "PAUSE_MODE"; returnResponse = ResponseBuilder.AudioPlayerStop(); break; case BuiltInIntent.Next: var thisFile = lastState.State.Token; // get the last state, get the index, add 1 // or start from the beginning if you're doing a loop currentState.State.Index++; if (currentState.State.Index >= audioItems.Count) { currentState.State.Index = 0; } currentState.State.Token = audioItems[currentState.State.Index].Title; currentState.State.OffsetInMS = 0; currentState.State.State = "PLAY_MODE"; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[currentState.State.Index].Url, currentState.State.Token); break; case BuiltInIntent.Previous: // get the last state, get the index, subtract 1 currentState.State.Index = currentState.State.Index - 1; if (currentState.State.Index < 0) { currentState.State.Index = 0; } currentState.State.Token = audioItems[currentState.State.Index].Title; currentState.State.OffsetInMS = 0; currentState.State.State = "PLAY_MODE"; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[currentState.State.Index].Url, currentState.State.Token); break; case BuiltInIntent.Repeat: // get the last state, get the index, start over at offset = 0 currentState.State.Token = audioItems[currentState.State.Index].Title; currentState.State.OffsetInMS = 0; currentState.State.State = "PLAY_MODE"; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[currentState.State.Index].Url, currentState.State.Token, 0); break; case BuiltInIntent.StartOver: // start everything from the beginning currentState.State.Token = audioItems[0].Title; currentState.State.OffsetInMS = 0; currentState.State.State = "PLAY_MODE"; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[0].Url, currentState.State.Token, 0); break; case BuiltInIntent.Stop: currentState.State.OffsetInMS = Convert.ToInt32(input.Context.AudioPlayer.OffsetInMilliseconds); currentState.State.Token = input.Context.AudioPlayer.Token; currentState.State.State = "PAUSE_MODE"; returnResponse = ResponseBuilder.AudioPlayerStop(); break; case BuiltInIntent.Resume: // Get the last state, start from the offest in milliseconds returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[currentState.State.Index].Url, currentState.State.Token, currentState.State.OffsetInMS); // If there was an enqueued item... if (currentState.State.EnqueuedToken != null) { returnResponse.Response.Directives.Add(new AudioPlayerPlayDirective() { PlayBehavior = PlayBehavior.Enqueue, AudioItem = new Alexa.NET.Response.Directive.AudioItem() { Stream = new AudioItemStream() { Url = audioItems[currentState.State.Index + 1].Url, Token = audioItems[currentState.State.Index + 1].Title, ExpectedPreviousToken = currentState.State.Token, OffsetInMilliseconds = 0 } } }); } currentState.State.EnqueuedToken = audioItems[currentState.State.Index + 1].Title; currentState.State.State = "PLAY_MODE"; break; case BuiltInIntent.Pause: currentState.State.OffsetInMS = Convert.ToInt32(input.Context.AudioPlayer.OffsetInMilliseconds); currentState.State.Token = input.Context.AudioPlayer.Token; currentState.State.State = "PAUSE_MODE"; returnResponse = ResponseBuilder.AudioPlayerStop(); break; default: log.LogLine($"Unknown intent: " + intentRequest.Intent.Name); output.Text = "Welcome to Pocast Player"; reprompt.OutputSpeech = new PlainTextOutputSpeech() { Text = "This is your reprompt. Please do something." }; returnResponse = ResponseBuilder.TellWithReprompt(output, reprompt); break; } } else if (input.GetRequestType() == typeof(AudioPlayerRequest)) { var audioRequest = input.Request as AudioPlayerRequest; if (audioRequest.AudioRequestType == AudioRequestType.PlaybackStarted) { log.LogLine($"PlaybackStarted Triggered "); // respond with Stop or ClearQueue returnResponse = ResponseBuilder.AudioPlayerClearQueue(ClearBehavior.ClearEnqueued); } else if (audioRequest.AudioRequestType == AudioRequestType.PlaybackFinished) { // Audio comes to an end on its own log.LogLine($"PlaybackFinished Triggered "); if (currentState.State.EnqueuedToken != null) { int itemIndex = audioItems.IndexOf(audioItems.Where(i => i.Title == currentState.State.EnqueuedToken).FirstOrDefault()); currentState.State.Token = audioItems[itemIndex].Title; currentState.State.Index = itemIndex; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems[itemIndex].Url, currentState.State.Token); } else { // respond with Stop or ClearQueue returnResponse = ResponseBuilder.AudioPlayerClearQueue(ClearBehavior.ClearEnqueued); } } else if (audioRequest.AudioRequestType == AudioRequestType.PlaybackStopped) { // This is when your audio is explicitly stopped log.LogLine($"PlaybackStopped Triggered "); currentState.State.State = "PAUSE_MODE"; currentState.State.Token = audioRequest.Token; currentState.State.EnqueuedToken = audioRequest.EnqueuedToken; currentState.State.OffsetInMS = Convert.ToInt32(audioRequest.OffsetInMilliseconds); log.LogLine($"Saving AudioState: " + currentState.State.Token + " at " + currentState.State.OffsetInMS.ToString() + "ms"); returnResponse = null; } else if (audioRequest.AudioRequestType == AudioRequestType.PlaybackNearlyFinished) { log.LogLine($"PlaybackNearlyFinished Triggered "); // we'll want to hand back the "next" item in the queue // First we check to see if there is an enqueued item and, if there is // we can respond with nothing if (audioRequest.HasEnqueuedItem) { return(null); } // let's get the current token var currentPlay = audioRequest.Token; // get the index of that current item int itemIndex = audioItems.IndexOf(audioItems.Where(i => i.Title == audioRequest.Token).FirstOrDefault()); if (itemIndex == -1) { log.LogLine($"Could not get the index of: " + audioRequest.Token); } itemIndex++; if (itemIndex == audioItems.Count) { itemIndex = 0; } currentState.State.EnqueuedToken = audioItems[itemIndex].Title; currentState.State.Token = audioRequest.Token; // if there is not, we send a play intent with "ENQUEUE" returnResponse = ResponseBuilder.AudioPlayerPlay( PlayBehavior.Enqueue, audioItems[itemIndex].Url, currentState.State.EnqueuedToken, currentState.State.Token, 0); } else if (audioRequest.AudioRequestType == AudioRequestType.PlaybackFailed) { log.LogLine($"PlaybackFailed Triggered"); // atm, we basically pretend nothing happened and play the first // file again on a failure // THIS IS A TERRIBLE SOLUTION // Figure out a better one for your skill currentState.State.Token = audioItems.FirstOrDefault().Title; currentState.State.Index = 0; currentState.State.State = "PLAY_MODE"; returnResponse = ResponseBuilder.AudioPlayerPlay(PlayBehavior.ReplaceAll, audioItems.FirstOrDefault().Url, currentState.State.Token); } } // I use the following code to validate and log my outputs for // later investigation //log.LogLine($"Skill Response Object..."); //string responseJson = "no response is given"; //try //{ // responseJson = JsonConvert.SerializeObject(returnResponse); //} //catch //{ // log.LogLine(responseJson); // return null; //} //log.LogLine(responseJson); // Save our state await audioStateHelper.SaveAudioState(currentState); // return our response return(returnResponse); }
/// <summary> /// ステート変更. /// </summary> private void ChangeState(AudioState nextState) { State = nextState; }
public void PauseAudio () { player.Pause (); AudioState = AudioState.Paused; }
public CheckpointData Convert() => new CheckpointData(Level, Name, MapMeta.GetInventory(Inventory), Dreaming, AudioState?.Convert());
private void Start() { currentAudioState = AudioState.NOTPLAYING; gamePaused = false; }
unsafe public static int input_Callback4Port(void* input, void* output, int frameCount, // yt7pwr PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { if (mox || (vox_enabled && !vac_enabled)) { #if(WIN64) Int64* array_ptr = (Int64*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; double* VAC_in = (double*)input; #endif #if(WIN32) int* array_ptr = (int*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; double* VAC_in = (double*)input; #endif float* in_l = null, in_l_VAC = null, in_r = null, in_r_VAC = null; if (!mox && !voice_message_record) // rx { in_l = in_l_ptr1; in_r = in_r_ptr1; } else if (mox && !voice_message_record) { // tx if (voice_message_playback) voice_msg_file_reader.GetPlayBuffer(in_l_ptr1, in_l_ptr1); in_l = in_l_ptr1; in_r = in_r_ptr1; } else if (voice_message_record) { in_l = in_l_ptr1; in_r = in_r_ptr1; } if (voice_message_record) { try { if (vac_enabled) { if (rb_vacIN_l.ReadSpace() >= frameCount && rb_vacIN_r.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_r_ptr1, frameCount); VACDebug("rb_vacIN underflow VoiceMsg record"); } } ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); voice_msg_file_writer.AddWriteBuffer(in_l, in_r); } catch (Exception ex) { VACDebug("Audio: " + ex.ToString()); } } if (loopDLL_enabled) { int i; fixed (double* loopDLL_inl_ptr = &(loopDLL_inl[0])) { if (console.loopDLL.ReadTXBuffer(loopDLL_inl_ptr)) { for (i = 0; i < frameCount; i++) { in_l[0] = (float)(loopDLL_inl[i] / 1e5); in_r[0] = 0.0f; //(float)(loopDLL_inl[i] / 1e4); in_l++; in_r++; } } else { } } in_l = in_l_ptr1; in_r = in_r_ptr1; } switch (current_audio_state1) { case AudioState.DTTSP: #region VOX float* vox_l = null, vox_r = null; vox_l = in_l_ptr1; vox_r = in_l_ptr1; if (vox_enabled && !vac_enabled) { if (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN) { ScaleBuffer(vox_l, vox_l, frameCount, (float)mic_preamp); ScaleBuffer(vox_r, vox_r, frameCount, (float)mic_preamp); Peak = MaxSample(vox_l, vox_r, frameCount); // compare power to threshold if (Peak > vox_threshold) vox_active = true; else vox_active = false; } } #endregion else { // scale input with mic preamp if ((mox || voice_message_record) && dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN) { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } else { ScaleBuffer(in_l, in_l, frameCount, 0.0f); ScaleBuffer(in_r, in_r, frameCount, 0.0f); } if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() > echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: SineWave(in_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(in_r, frameCount, phase_accumulator1, sine_freq1); ScaleBuffer(in_l, in_l, frameCount, (float)input_source_scale); ScaleBuffer(in_r, in_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(in_l, frameCount); Noise(in_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; } #endregion if (!loopDLL_enabled && vac_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { if (mox) { if (rb_vacIN_l.ReadSpace() >= frameCount && rb_vacIN_r.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow inCB4"); } ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() > echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } } else if (!VACDirectI_Q && loopDLL_enabled && mox) { ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); } DttSP_mutex.WaitOne(); DttSP.ExchangeInputSamples(thread_no, in_l, in_r, frameCount); DttSP_mutex.ReleaseMutex(); break; case AudioState.CW: break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_l_ptr1, frameCount); if (mox != next_mox) mox = next_mox; } if (mox) DttSP.ExchangeInputSamples(thread_no, in_l_ptr1, in_l_ptr1, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); in_l[i] *= w; in_r[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { in_l[i] = 0.0f; in_r[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); in_l[i] *= w; in_r[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } switch_count--; if (switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } if (VACDirectI_Q && !MultiPSK_server_enable && vac_enabled && !loopDLL_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { fixed (float* outl_ptr = &(vac_outl[0])) fixed (float* outr_ptr = &(vac_outr[0])) { if (!mox) { if (sample_rateVAC == sample_rate1) { if ((rb_vacOUT_l.WriteSpace() >= frameCount) && (rb_vacOUT_r.WriteSpace() >= frameCount)) { if (vac_correct_iq) CorrectIQBuffer(in_l, in_r, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(in_l_ptr1, frameCount); rb_vacOUT_r.WritePtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow inCB4"); } } else { fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { int outsamps; DttSP.DoResamplerF(in_l_ptr1, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(in_r_ptr1, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { if (vac_correct_iq) CorrectIQBuffer(res_outl_ptr, res_outr_ptr, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { vac_rb_reset = true; VACDebug("rb_vacOUT overflow inCB4"); } } } } } } } return callback_return; } catch (Exception ex) { Debug.Write(ex.ToString()); return 0; } }
private void LoadErrorCallback(LoadItem item) { _state = AudioState.UNLOAD; }
/// <summary> /// 暂停 /// </summary> public void Pause() { _state = AudioState.PAUSE; _audioSource.Pause(); }
public async Task<Command> ForPlayoutItem( string ffmpegPath, string ffprobePath, bool saveReports, Channel channel, MediaVersion videoVersion, MediaVersion audioVersion, string videoPath, string audioPath, List<Subtitle> subtitles, string preferredAudioLanguage, string preferredSubtitleLanguage, ChannelSubtitleMode subtitleMode, DateTimeOffset start, DateTimeOffset finish, DateTimeOffset now, Option<ChannelWatermark> playoutItemWatermark, Option<ChannelWatermark> globalWatermark, VaapiDriver vaapiDriver, string vaapiDevice, bool hlsRealtime, FillerKind fillerKind, TimeSpan inPoint, TimeSpan outPoint, long ptsOffset, Option<int> targetFramerate) { MediaStream videoStream = await _ffmpegStreamSelector.SelectVideoStream(videoVersion); Option<MediaStream> maybeAudioStream = await _ffmpegStreamSelector.SelectAudioStream( audioVersion, channel.StreamingMode, channel.Number, preferredAudioLanguage); Option<Subtitle> maybeSubtitle = await _ffmpegStreamSelector.SelectSubtitleStream( videoVersion, subtitles, channel.StreamingMode, channel.Number, preferredSubtitleLanguage, subtitleMode); FFmpegPlaybackSettings playbackSettings = _playbackSettingsCalculator.CalculateSettings( channel.StreamingMode, channel.FFmpegProfile, videoVersion, videoStream, maybeAudioStream, start, now, inPoint, outPoint, hlsRealtime, targetFramerate); Option<WatermarkOptions> watermarkOptions = await _ffmpegProcessService.GetWatermarkOptions( ffprobePath, channel, playoutItemWatermark, globalWatermark, videoVersion, None, None); Option<List<FadePoint>> maybeFadePoints = watermarkOptions .Map(o => o.Watermark) .Flatten() .Where(wm => wm.Mode == ChannelWatermarkMode.Intermittent) .Map( wm => WatermarkCalculator.CalculateFadePoints( start, inPoint, outPoint, playbackSettings.StreamSeek, wm.FrequencyMinutes, wm.DurationSeconds)); string audioFormat = playbackSettings.AudioFormat switch { FFmpegProfileAudioFormat.Aac => AudioFormat.Aac, FFmpegProfileAudioFormat.Ac3 => AudioFormat.Ac3, FFmpegProfileAudioFormat.Copy => AudioFormat.Copy, _ => throw new ArgumentOutOfRangeException($"unexpected audio format {playbackSettings.VideoFormat}") }; var audioState = new AudioState( audioFormat, playbackSettings.AudioChannels, playbackSettings.AudioBitrate, playbackSettings.AudioBufferSize, playbackSettings.AudioSampleRate, videoPath == audioPath ? playbackSettings.AudioDuration : Option<TimeSpan>.None, playbackSettings.NormalizeLoudness); var ffmpegVideoStream = new VideoStream( videoStream.Index, videoStream.Codec, AvailablePixelFormats.ForPixelFormat(videoStream.PixelFormat, _logger), new FrameSize(videoVersion.Width, videoVersion.Height), videoVersion.RFrameRate, videoPath != audioPath); // still image when paths are different var videoInputFile = new VideoInputFile(videoPath, new List<VideoStream> { ffmpegVideoStream }); Option<AudioInputFile> audioInputFile = maybeAudioStream.Map( audioStream => { var ffmpegAudioStream = new AudioStream(audioStream.Index, audioStream.Codec, audioStream.Channels); return new AudioInputFile(audioPath, new List<AudioStream> { ffmpegAudioStream }, audioState); }); Option<SubtitleInputFile> subtitleInputFile = maybeSubtitle.Map<Option<SubtitleInputFile>>( subtitle => { if (!subtitle.IsImage && subtitle.SubtitleKind == SubtitleKind.Embedded && !subtitle.IsExtracted) { _logger.LogWarning("Subtitles are not yet available for this item"); return None; } var ffmpegSubtitleStream = new ErsatzTV.FFmpeg.MediaStream( subtitle.IsImage ? subtitle.StreamIndex : 0, subtitle.Codec, StreamKind.Video); string path = subtitle.IsImage ? videoPath : Path.Combine(FileSystemLayout.SubtitleCacheFolder, subtitle.Path); return new SubtitleInputFile( path, new List<ErsatzTV.FFmpeg.MediaStream> { ffmpegSubtitleStream }, false); // TODO: figure out HLS direct // channel.StreamingMode == StreamingMode.HttpLiveStreamingDirect); }).Flatten(); Option<WatermarkInputFile> watermarkInputFile = GetWatermarkInputFile(watermarkOptions, maybeFadePoints); string videoFormat = playbackSettings.VideoFormat switch { FFmpegProfileVideoFormat.Hevc => VideoFormat.Hevc, FFmpegProfileVideoFormat.H264 => VideoFormat.H264, FFmpegProfileVideoFormat.Mpeg2Video => VideoFormat.Mpeg2Video, FFmpegProfileVideoFormat.Copy => VideoFormat.Copy, _ => throw new ArgumentOutOfRangeException($"unexpected video format {playbackSettings.VideoFormat}") }; HardwareAccelerationMode hwAccel = playbackSettings.HardwareAcceleration switch { HardwareAccelerationKind.Nvenc => HardwareAccelerationMode.Nvenc, HardwareAccelerationKind.Qsv => HardwareAccelerationMode.Qsv, HardwareAccelerationKind.Vaapi => HardwareAccelerationMode.Vaapi, HardwareAccelerationKind.VideoToolbox => HardwareAccelerationMode.VideoToolbox, _ => HardwareAccelerationMode.None }; OutputFormatKind outputFormat = channel.StreamingMode == StreamingMode.HttpLiveStreamingSegmenter ? OutputFormatKind.Hls : OutputFormatKind.MpegTs; Option<string> hlsPlaylistPath = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live.m3u8") : Option<string>.None; Option<string> hlsSegmentTemplate = outputFormat == OutputFormatKind.Hls ? Path.Combine(FileSystemLayout.TranscodeFolder, channel.Number, "live%06d.ts") : Option<string>.None; // normalize songs to yuv420p Option<IPixelFormat> desiredPixelFormat = videoPath == audioPath ? ffmpegVideoStream.PixelFormat : new PixelFormatYuv420P(); var desiredState = new FrameState( playbackSettings.RealtimeOutput, false, // TODO: fallback filler needs to loop videoFormat, desiredPixelFormat, await playbackSettings.ScaledSize.Map(ss => new FrameSize(ss.Width, ss.Height)) .IfNoneAsync(new FrameSize(videoVersion.Width, videoVersion.Height)), new FrameSize(channel.FFmpegProfile.Resolution.Width, channel.FFmpegProfile.Resolution.Height), playbackSettings.FrameRate, playbackSettings.VideoBitrate, playbackSettings.VideoBufferSize, playbackSettings.VideoTrackTimeScale, playbackSettings.Deinterlace); var ffmpegState = new FFmpegState( saveReports, hwAccel, VaapiDriverName(hwAccel, vaapiDriver), VaapiDeviceName(hwAccel, vaapiDevice), playbackSettings.StreamSeek, finish - now, channel.StreamingMode != StreamingMode.HttpLiveStreamingDirect, "ErsatzTV", channel.Name, maybeAudioStream.Map(s => Optional(s.Language)).Flatten(), outputFormat, hlsPlaylistPath, hlsSegmentTemplate, ptsOffset); _logger.LogDebug("FFmpeg desired state {FrameState}", desiredState); var pipelineBuilder = new PipelineBuilder( videoInputFile, audioInputFile, watermarkInputFile, subtitleInputFile, FileSystemLayout.FFmpegReportsFolder, FileSystemLayout.FontsCacheFolder, _logger); FFmpegPipeline pipeline = pipelineBuilder.Build(ffmpegState, desiredState); return GetCommand(ffmpegPath, videoInputFile, audioInputFile, watermarkInputFile, None, pipeline); }
internal AudioStatusChangedEventArgs(AudioState state) { _state = state; }
void Update() { if (musicSource.isPlaying) { if (!isMusic) { if (Fade(false)) { musicSource.Stop(); } musicSource.Stop(); } if (state == AudioState.Default) { return; } } if (!isMusic) { return; } if (state == AudioState.FadeOffForNewMusic) { if (musicSource.isPlaying) { if (Fade(false)) { if (fadeOn) { nextTrackName = audioBlock.GetRandomMusic(); musicTrackVolume = musicVolumme * audioBlock.music.tracks [nextTrackName].Volume; audioBlock.PlayMusic(musicSource, nextTrackName, 0); state = AudioState.FadeOnForNewMusic; return; } PlayMusic(); state = AudioState.Default; return; } } else { if (fadeOn) { nextTrackName = audioBlock.GetRandomMusic(); musicTrackVolume = musicVolumme * audioBlock.music.tracks [nextTrackName].Volume; audioBlock.PlayMusic(musicSource, nextTrackName, 0); state = AudioState.FadeOnForNewMusic; return; } state = AudioState.Default; PlayMusic(); return; } return; } else if (state == AudioState.FadeOnForNewMusic) { if (Fade(true)) { state = AudioState.Default; } return; } delay -= Time.unscaledDeltaTime; if (delay <= 0) { PlayMusic(); } }
private static ChannelState ALSourceStateToChannelState(AudioState alSourceState) { switch (alSourceState) { case AudioState.Playing: return ChannelState.Playing; case AudioState.Paused: return ChannelState.Paused; default: return ChannelState.Stopped; } }
public Gamestate(PlayerUnit playerUnit, Unit unit, Scenery scenery, EventsState events, InputState input, PhysicsState physics, VideoState video, AudioState audio, TimeState time) { Assert.Ref(playerUnit, unit, scenery, events, input, physics, video, audio, time); PlayerUnit = playerUnit; Scenery = scenery; Unit = unit; Events = events; Input = input; Physics = physics; Video = video; Audio = audio; Time = time; }
public void StartVideoModeAsync(CameraParameters setupParams, AudioState audioState, OnVideoModeStartedCallback onVideoModeStartedCallback) { if (this.m_NativePtr == IntPtr.Zero) { throw new InvalidOperationException("You must create a Video Capture Object before starting its video mode."); } if (onVideoModeStartedCallback == null) { throw new ArgumentNullException("onVideoModeStartedCallback"); } if ((setupParams.cameraResolutionWidth == 0) || (setupParams.cameraResolutionHeight == 0)) { throw new ArgumentOutOfRangeException("setupParams", "The camera resolution must be set to a supported resolution."); } if (setupParams.frameRate == 0f) { throw new ArgumentOutOfRangeException("setupParams", "The camera frame rate must be set to a supported recording frame rate."); } this.StartVideoMode_Internal(this.m_NativePtr, (int) audioState, onVideoModeStartedCallback, setupParams.hologramOpacity, setupParams.frameRate, setupParams.cameraResolutionWidth, setupParams.cameraResolutionHeight, (int) setupParams.pixelFormat); }
/// <summary> /// 继续 /// </summary> public void UnPanuse() { _state = AudioState.PLAY; _audioSource.UnPause(); }
public void ResumeAudio () { player.Start (); AudioState = AudioState.Playing; }
/// <summary> /// 播放音源 /// </summary> /// <param name="state">音源状态</param> /// <param name="insId">音源实例化id</param> /// <param name="path">音源路径</param> /// <param name="pathMutex">是否同音源路径互斥</param> /// <param name="track">音源所在轨</param> /// <param name="trackMutex">是否音源所在轨互斥</param> /// <param name="group">音源所在组</param> /// <param name="groupMutex">音源所在组互斥</param> /// <param name="isEffect">是否为音效</param> /// <param name="isFade">是否渐入渐出</param> /// <param name="isLoop">是否为循环</param> /// <param name="initialVolume">初始音量</param> /// <param name="minDistance">3d音源,最近距离</param> /// <param name="maxDistance">3d音源,最远距离</param> /// <param name="follower">3d音源,跟随对象</param> /// <param name="onComplete">音源播放结束回调</param> /// <returns></returns> public int?Play( AudioState state, int?insId, string path, bool pathMutex, string track, bool trackMutex, string group, bool groupMutex, bool isEffect, bool isFade, bool isLoop, float defalutVolume, float minDistance, float maxDistance, Transform follower, Action onComplete) { if (null != insId && ChangeAudioState(state, insId.Value)) { return(insId.Value); } if (string.IsNullOrEmpty(path) || string.IsNullOrEmpty(group)) { return(null); } AudioSourceInfo audio = null; // 同路径互斥 if (pathMutex && m_pathAudio.ContainsKey(path)) { foreach (var v in m_pathAudio[path]) { if (ChangeAudioState(state, v.InsId)) { return(v.InsId); } } } audio = GetIdleAudio(null != follower); audio.IsPause = true; audio.IsFade = isFade; audio.ThePath = path; audio.IsPathMutex = pathMutex; audio.TheTrackName = track; audio.IsTrackMutex = trackMutex; audio.TheGroupName = group; audio.IsGroupMutex = groupMutex; audio.TheDefaultVolume = defalutVolume; audio.TheComplete = onComplete; audio.TheAudio.loop = isLoop; audio.TheAudio.minDistance = minDistance; audio.TheAudio.maxDistance = maxDistance; // 音效、音乐区分 if (isEffect) { audio.TheAudio.volume = defalutVolume * m_effectVolume; m_effectAudio.Add(audio.InsId, audio); } else { audio.TheAudio.volume = defalutVolume * m_musicVolume; m_musicAudio.Add(audio.InsId, audio); } ResourceLoader.AsyncLoadObject(audio.ThePath, typeof(AudioClip), (clip) => { SetAudioTrack(audio, clip as AudioClip, state); }); return(audio.InsId); }
unsafe public static int NetworkClientCallback1(void* input, void* output, int frameCount, PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { if (audio_stop) return callback_return; //audio_run.WaitOne(100); double* VAC_in = (double*)input; int* out_array_ptr = (int*)output; float* out_l_ptr1 = (float*)out_array_ptr[0]; float* out_r_ptr1 = (float*)out_array_ptr[1]; byte* out_left = (byte*)out_l_ptr1; byte* out_right = (byte*)out_r_ptr1; float* tmp_input_l = stackalloc float[frameCount * sizeof(float)]; float* tmp_input_r = stackalloc float[frameCount * sizeof(float)]; float* tmp_in_l = (float*)tmp_input_l; float* tmp_in_r = (float*)tmp_input_r; byte* left = (byte*)tmp_in_l; byte* right = (byte*)tmp_in_r; if (client_rf_spectar) { CATNetwork_mutex.WaitOne(); Marshal.Copy(network_input_bufer_l, 0, new IntPtr(tmp_in_l), 8192); Marshal.Copy(network_input_bufer_r, 0, new IntPtr(tmp_in_r), 8192); CATNetwork_mutex.ReleaseMutex(); } else { for (int i = 0; i < 1024; i++) { left[0] = network_input_bufer_l[i]; right[0] = network_input_bufer_l[i + 1]; left++; right++; i++; } fixed (float* res_outr_ptr = &(res_outr[0])) fixed (float* res_outl_ptr = &(res_outl[0])) { int outsamps; DttSP.DoResamplerF(tmp_in_l, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(tmp_in_r, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); tmp_in_l = res_outl_ptr; tmp_in_r = res_outr_ptr; } } if (wave_playback) { tmp_in_l = (float*)tmp_input_l; tmp_in_r = (float*)tmp_input_r; wave_file_reader.GetPlayBuffer(tmp_in_l, tmp_in_r); } else if ((wave_record && !mox && record_rx_preprocessed) || (wave_record && mox && record_tx_preprocessed)) { tmp_in_l = (float*)tmp_input_l; tmp_in_r = (float*)tmp_input_r; wave_file_writer.AddWriteBuffer(tmp_in_l, tmp_in_r); } else if (voice_message_record && !console.MOX) { tmp_in_l = (float*)tmp_input_l; tmp_in_r = (float*)tmp_input_r; wave_file_writer.AddWriteBuffer(tmp_in_l, tmp_in_r); } if (phase) { //phase_mutex.WaitOne(); Marshal.Copy(new IntPtr(tmp_in_l), phase_buf_l, 0, frameCount); Marshal.Copy(new IntPtr(tmp_in_r), phase_buf_r, 0, frameCount); //phase_mutex.ReleaseMutex(); } float* in_l = null, in_r = null, out_l = null, out_r = null, in_l_VAC = null, in_r_VAC = null; if (!console.RX_IQ_channel_swap) { in_l = (float*)tmp_in_l; in_r = (float*)tmp_in_r; out_l = out_l_ptr1; out_r = out_r_ptr1; } else { in_l = (float*)tmp_in_r; in_r = (float*)tmp_in_l; out_l = out_r_ptr1; out_r = out_l_ptr1; } if (vac_enabled && loopDLL_enabled && mox) { int i; in_l_VAC = in_l; // save pointer in_r_VAC = in_r; // save pointer fixed (double* loopDLL_inl_ptr = &(loopDLL_inl[0])) { if (console.loopDLL.ReadTXBuffer(loopDLL_inl_ptr)) { for (i = 0; i < frameCount; i++) { in_l[0] = (float)(loopDLL_inl[i] / 1e4); in_r[0] = (float)(loopDLL_inl[i] / 1e4); in_l++; in_r++; } } else { } } in_l = in_l_VAC; // restore pointer in_r = in_r_VAC; // restore pointer } switch (current_audio_state1) { case AudioState.DTTSP: if (dsp_mode == DSPMode.CWU || dsp_mode == DSPMode.CWL) { DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } // scale input with mic preamp if (mox && !vac_enabled && (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN)) { if (wave_playback) { ScaleBuffer(in_l, in_l, frameCount, (float)wave_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)wave_preamp); } else { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } } #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: SineWave(in_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(in_r, frameCount, phase_accumulator1, sine_freq1); ScaleBuffer(in_l, in_l, frameCount, (float)input_source_scale); ScaleBuffer(in_r, in_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(in_l, frameCount); Noise(in_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; } #endregion if (vac_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { if (mox) { if (rb_vacIN_l.ReadSpace() >= frameCount) rb_vacIN_l.ReadPtr(in_l, frameCount); else { ClearBuffer(in_l, frameCount); VACDebug("rb_vacIN underflow"); } if (rb_vacIN_r.ReadSpace() >= frameCount) rb_vacIN_r.ReadPtr(in_r, frameCount); else { ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow"); } ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); } else { DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow"); } } } else DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case SignalSource.NOISE: Noise(out_l_ptr1, frameCount); Noise(out_r_ptr1, frameCount); break; case SignalSource.TRIANGLE: Triangle(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; } #endregion break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { Win32.memset(tmp_in_l, 0, frameCount * sizeof(float)); Win32.memset(tmp_in_r, 0, frameCount * sizeof(float)); if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(tmp_in_l, frameCount); rb_vacIN_r.ReadPtr(tmp_in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow switch time!"); } } DttSP.ExchangeSamples(thread_no, tmp_in_l, tmp_in_r, out_l_ptr1, out_r_ptr1, frameCount); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } DttSP.CWtoneExchange(out_r_ptr1, out_l_ptr1, frameCount); break; case AudioState.SINL_COSR: if (two_tone) { double dump; SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); CosineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.SINL_SINR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } break; case AudioState.SINL_NOR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); ClearBuffer(out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); ClearBuffer(out_r_ptr1, frameCount); } break; case AudioState.CW_COSL_SINR: if (mox) { if (two_tone) { double dump; if (console.tx_IF) { CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out phase_accumulator1, out phase_accumulator2); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out phase_accumulator1, out phase_accumulator2); } } else { if (console.tx_IF) { CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + osc); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + osc); } } float iq_gain = 1.0f + (1.0f - (1.0f + 0.001f * (float)console.SetupForm.udDSPImageGainTX.Value)); float iq_phase = 0.001f * (float)console.SetupForm.udDSPImagePhaseTX.Value; CorrectIQBuffer(out_l, out_r, iq_gain, iq_phase, frameCount); } break; case AudioState.COSL_SINR: if (two_tone) { double dump; CosineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_SINR: if (two_tone) { ClearBuffer(out_l_ptr1, frameCount); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { ClearBuffer(out_l_ptr1, frameCount); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_NOR: ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); break; case AudioState.PIPE: CopyBuffer(tmp_in_l, out_l_ptr1, frameCount); CopyBuffer(tmp_in_r, out_r_ptr1, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(tmp_in_l, frameCount); ClearBuffer(tmp_in_r, frameCount); if (mox != next_mox) mox = next_mox; } if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(tmp_in_l, frameCount); rb_vacIN_r.ReadPtr(tmp_in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow switch time!"); } } DttSP.ExchangeSamples(thread_no, tmp_in_l, tmp_in_r, out_l_ptr1, out_r_ptr1, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { out_l_ptr1[i] = 0.0f; out_r_ptr1[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; //if(switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } double vol = monitor_volume_left; if (mox) { vol = TXScale; if (high_pwr_am) { if (dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM) vol *= 1.414; } } if ((wave_record && !mox && !record_rx_preprocessed) || // post process audio (wave_record && mox && !record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(out_l_ptr1, out_r_ptr1); if (VACPrimaryAudiodevice && !mox && current_audio_state1 == AudioState.DTTSP && !loopDLL_enabled) { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } if (loopDLL_enabled && vac_enabled && !mox) // rx { int i; double[] buffer = new double[frameCount]; fixed (double* buffer_ptr = &(buffer[0])) fixed (float* res_outl_ptr = &(res_outl[0])) { ScaleBuffer(out_l, res_outl_ptr, frameCount, (float)vol); ScaleBuffer(out_l, out_l, frameCount, 0.0f); ScaleBuffer(out_r, out_r, frameCount, 0.0f); { for (i = 0; i < frameCount; i++) { buffer[i] = (double)1e5 * res_outl[i]; } console.loopDLL.WriteRXBuffer(buffer_ptr); } } } else if (loopDLL_enabled && vac_enabled && mox) // tx { ScaleBuffer(out_l, out_l, frameCount, (float)vol); ScaleBuffer(out_r, out_r, frameCount, (float)vol); fixed (float* res_inl_ptr = &(res_inl[0])) fixed (float* res_inr_ptr = &(res_inr[0])) { int outsamps; DttSP.DoResamplerF(out_l, res_inl_ptr, frameCount, &outsamps, resampServerPtrIn_l); // down to 6000 DttSP.DoResamplerF(out_r, res_inr_ptr, frameCount, &outsamps, resampServerPtrIn_r); byte* tmp_l_ptr = (byte*)res_inl_ptr; byte* tmp_r_ptr = (byte*)res_inr_ptr; byte[] buffer = new byte[8192]; for (int i = 2; i < buffer.Length; i++) { buffer[i] = tmp_l_ptr[0]; buffer[i + 1] = tmp_r_ptr[0]; tmp_l_ptr++; tmp_r_ptr++; i++; } CATNetwork_mutex.WaitOne(); fixed (void* rptr = &buffer[0]) fixed (void* wptr = &console.ClientSocket.send_buffer[0]) Win32.memcpy(wptr, rptr, buffer.Length); CATNetwork_mutex.WaitOne(); } console.ClientSocket.sendEvent.Set(); } else { ScaleBuffer(out_l, out_l, frameCount, (float)vol); ScaleBuffer(out_r, out_r, frameCount, (float)vol); } //audio_run.ReleaseMutex(); return callback_return; } catch (Exception ex) { CATNetwork_mutex.ReleaseMutex(); //audio_run.ReleaseMutex(); Debug.Write(ex.ToString()); return 0; } }
/// <summary> /// OnCollisionEnter is called when this collider/rigidbody has begun /// touching another rigidbody/collider. /// </summary> /// <param name="other">The Collision data associated with this collision.</param> void OnCollisionEnter(Collision other) { UnityEngine.Debug.Log("Collided with" + other.gameObject.tag); switch (other.gameObject.tag) { case "grass": newAudio = AudioState.grass; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(grassSound, .15f)); } break; case "dirt": newAudio = AudioState.dirt; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(dirtSound, .15f)); } break; case "cave": newAudio = AudioState.cave; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(caveSound, .15f)); } break; case "ice": newAudio = AudioState.ice; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(iceSound, .15f)); } break; case "storm": newAudio = AudioState.storm; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(stormSound, .15f)); } break; case "water": newAudio = AudioState.water; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(waterSound, .15f)); } break; case "lava": newAudio = AudioState.lava; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(lavaSound, .15f)); } break; case "stair": newAudio = AudioState.stair; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(stairSound, .15f)); } break; case "air": newAudio = AudioState.air; if (newAudio != currentAudio || !audioSource.isPlaying) { currentAudio = newAudio; audioSource.Stop(); StartCoroutine(PlaySound(airSound, .15f)); } break; default: UnityEngine.Debug.Log("No sound"); break; } }
unsafe public static int Callback1(void* input, void* output, int frameCount, // changes yt7pwr PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { if (console.CurrentModel == Model.GENESIS_G6 || audio_stop) return callback_return; //audio_run.WaitOne(100); #if(WIN64) Int64* array_ptr = (Int64*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; double* VAC_in = (double*)input; array_ptr = (Int64*)output; float* out_l_ptr1 = (float*)array_ptr[1]; float* out_r_ptr1 = (float*)array_ptr[0]; #endif #if(WIN32) int* array_ptr = (int*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; double* VAC_in = (double*)input; array_ptr = (int*)output; float* out_l_ptr1 = (float*)array_ptr[1]; float* out_r_ptr1 = (float*)array_ptr[0]; #endif if (wave_playback) wave_file_reader.GetPlayBuffer(in_l_ptr1, in_r_ptr1); else if ((wave_record && !mox && record_rx_preprocessed && !vac_primary_audiodev) || (wave_record && mox && record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(in_l_ptr1, in_r_ptr1); if (phase) { //phase_mutex.WaitOne(); Marshal.Copy(new IntPtr(in_l_ptr1), phase_buf_l, 0, frameCount); Marshal.Copy(new IntPtr(in_r_ptr1), phase_buf_r, 0, frameCount); //phase_mutex.ReleaseMutex(); } float* in_l = null, in_l_VAC = null, in_r = null, in_r_VAC = null, out_l = null, out_r = null; if (!mox && !voice_message_record) // rx { if (!console.RX_IQ_channel_swap) { in_l = in_l_ptr1; in_r = in_r_ptr1; out_l = out_l_ptr1; out_r = out_r_ptr1; } else { in_l = in_r_ptr1; in_r = in_l_ptr1; out_l = out_r_ptr1; out_r = out_l_ptr1; } } else if (mox && !voice_message_record) { // tx if (!console.TX_IQ_channel_swap) { in_r = in_l_ptr1; in_l = in_r_ptr1; out_r = out_l_ptr1; out_l = out_r_ptr1; } else { in_r = in_l_ptr1; in_l = in_r_ptr1; out_l = out_l_ptr1; out_r = out_r_ptr1; } if (voice_message_playback) { voice_msg_file_reader.GetPlayBuffer(in_l, in_r); } } else if (voice_message_record) { in_l = in_l_ptr1; in_r = in_r_ptr1; out_l = out_l_ptr1; out_r = out_r_ptr1; } if (voice_message_record) { try { if (vac_enabled) { if (rb_vacIN_l.ReadSpace() >= frameCount && rb_vacIN_r.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_r_ptr1, frameCount); VACDebug("rb_vacIN underflow VoiceMsg record"); } } ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); voice_msg_file_writer.AddWriteBuffer(in_l, in_r); } catch (Exception ex) { VACDebug("Audio: " + ex.ToString()); } } if (!MultiPSK_server_enable && vac_enabled && loopDLL_enabled && mox) { in_l_VAC = in_l; // save pointer in_r_VAC = in_r; // save pointer fixed (double* loopDLL_inl_ptr = &(loopDLL_inl[0])) { if (console.loopDLL.ReadTXBuffer(loopDLL_inl_ptr)) { for (int i = 0; i < frameCount; i++) { in_l[0] = (float)(loopDLL_inl[i] / 1e5); in_r[0] = 0.0f; // (float)(loopDLL_inl[i] / 1e5); in_l++; in_r++; } } else { } } in_l = in_l_VAC; // restore pointer in_r = in_r_VAC; // restore pointer } else if (mox && MultiPSK_server_enable && console.MultiPSKServer.ClientConnected) { float* tmp_l = in_l; float* tmp_r = in_r; for (int i = 0; i < frameCount; i++) { in_l[0] = MultiPSK_output_bufer_l[i]; in_r[0] = 0.0f; in_l++; in_r++; } in_l = tmp_l; in_r = tmp_r; } switch (current_audio_state1) { case AudioState.DTTSP: // scale input with mic preamp if ((mox || voice_message_record) && !vac_enabled && (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN)) { if (wave_playback) { ScaleBuffer(in_l, in_l, frameCount, (float)wave_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)wave_preamp); } else { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() > echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: if (console.RX_IQ_channel_swap) { SineWave(in_r, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(in_l, frameCount, phase_accumulator1, sine_freq1); } else { SineWave(in_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(in_r, frameCount, phase_accumulator1 + 0.0001f, sine_freq1); } ScaleBuffer(in_l, in_l, frameCount, (float)input_source_scale); ScaleBuffer(in_r, in_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(in_l, frameCount); Noise(in_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; } #endregion if (!loopDLL_enabled && vac_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null && !voice_message_playback) { if (mox && !voice_message_record) { if (rb_vacIN_l.ReadSpace() >= frameCount && rb_vacIN_r.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow CB1"); } ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() > echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } else if (voice_message_record) { } } else if (!MultiPSK_server_enable && loopDLL_enabled && mox) { ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); } else if (mox && MultiPSK_server_enable && console.MultiPSKServer.ClientConnected) { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: switch (ChannelTest) { case TestChannels.Left: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Right: SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Both: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator2 = CosineWave(out_r_ptr1, frameCount, phase_accumulator2, sine_freq1); break; } break; case SignalSource.NOISE: switch (ChannelTest) { case TestChannels.Both: Noise(out_l_ptr1, frameCount); Noise(out_r_ptr1, frameCount); break; case TestChannels.Left: Noise(out_l_ptr1, frameCount); break; case TestChannels.Right: Noise(out_r_ptr1, frameCount); break; } break; case SignalSource.TRIANGLE: switch (ChannelTest) { case TestChannels.Both: Triangle(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case TestChannels.Left: Triangle(out_l_ptr1, frameCount, sine_freq1); break; case TestChannels.Right: Triangle(out_r_ptr1, frameCount, sine_freq1); break; } break; case SignalSource.SAWTOOTH: switch (ChannelTest) { case TestChannels.Both: Sawtooth(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case TestChannels.Left: Sawtooth(out_l_ptr1, frameCount, sine_freq1); break; case TestChannels.Right: Sawtooth(out_r_ptr1, frameCount, sine_freq1); break; } break; } #endregion break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { Win32.memset(in_l_ptr1, 0, frameCount * sizeof(float)); Win32.memset(in_r_ptr1, 0, frameCount * sizeof(float)); if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow Switch time CB1!"); } } ClearBuffer(out_l, frameCount); ClearBuffer(out_r, frameCount); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } else DttSP.CWtoneExchange(out_l, out_r, frameCount); break; case AudioState.SINL_COSR: if (two_tone) { double dump; SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); CosineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.SINL_SINR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } break; case AudioState.SINL_NOR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); ClearBuffer(out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); ClearBuffer(out_r_ptr1, frameCount); } break; case AudioState.CW_COSL_SINR: if (mox) { if (two_tone) { double dump; if (console.tx_IF) { CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out phase_accumulator1, out phase_accumulator2); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out phase_accumulator1, out phase_accumulator2); } } else { if (console.tx_IF) { CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + osc); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + osc); } } float iq_gain = 1.0f + (1.0f - (1.0f + 0.001f * (float)console.SetupForm.udDSPImageGainTX.Value)); float iq_phase = 0.001f * (float)console.SetupForm.udDSPImagePhaseTX.Value; CorrectIQBuffer(out_l, out_r, iq_gain, iq_phase, frameCount); } break; case AudioState.COSL_SINR: if (two_tone) { double dump; CosineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_SINR: if (two_tone) { ClearBuffer(out_l_ptr1, frameCount); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { ClearBuffer(out_l_ptr1, frameCount); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_NOR: ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); break; case AudioState.PIPE: CopyBuffer(in_l_ptr1, out_l_ptr1, frameCount); CopyBuffer(in_r_ptr1, out_r_ptr1, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); if (mox != next_mox) mox = next_mox; } if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow switch time CB1!"); } } DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { out_l[i] = 0.0f; out_r[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l[i] *= w; out_r[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(out_l, frameCount); ClearBuffer(out_r, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(out_l, out_r, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; if (switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } if (!MultiPSK_server_enable && vac_enabled && !loopDLL_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { fixed (float* outl_ptr = &(vac_outl[0])) fixed (float* outr_ptr = &(vac_outr[0])) { if (!mox) { ScaleBuffer(out_l, outl_ptr, frameCount, (float)vac_rx_scale); ScaleBuffer(out_r, outr_ptr, frameCount, (float)vac_rx_scale); } else if (mox && vac_mon && (dsp_mode == DSPMode.CWU || dsp_mode == DSPMode.CWL)) { ScaleBuffer(out_l, outl_ptr, frameCount, 0.0f); ScaleBuffer(out_r, outr_ptr, frameCount, 0.0f); } else // zero samples going back to VAC since TX monitor is off { ScaleBuffer(out_l, outl_ptr, frameCount, 0.0f); ScaleBuffer(out_r, outr_ptr, frameCount, 0.0f); } int count = 0; while (!(rb_vacOUT_l.WriteSpace() >= frameCount && rb_vacOUT_r.WriteSpace() >= frameCount)) { Thread.Sleep(1); count++; if (count > latency1) break; } if (count > 0 && debug) VACDebug("VAC WriteSpace count: " + count.ToString()); if (!mox) { if (sample_rateVAC == sample_rate1) { if ((rb_vacOUT_l.WriteSpace() >= frameCount) && (rb_vacOUT_r.WriteSpace() >= frameCount)) { if (VACDirectI_Q) { if (vac_correct_iq) CorrectIQBuffer(in_l, in_r, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(in_l, frameCount); rb_vacOUT_r.WritePtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(outl_ptr, frameCount); rb_vacOUT_r.WritePtr(outr_ptr, frameCount); Win32.LeaveCriticalSection(cs_vac); } } else { VACDebug("rb_vacOUT overflow CB1"); } } else { fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { int outsamps; if (VACDirectI_Q) { DttSP.DoResamplerF(in_l_ptr1, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(in_r_ptr1, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { if (vac_correct_iq) CorrectIQBuffer(res_outl_ptr, res_outr_ptr, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow CB1"); } } else { DttSP.DoResamplerF(outl_ptr, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(outr_ptr, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow CB1"); } } } } } } } if (console.CurrentDisplayMode == DisplayMode.SCOPE || console.CurrentDisplayMode == DisplayMode.PANASCOPE) DoScope(out_l, frameCount); double vol_l = monitor_volume_left; double vol_r = monitor_volume_right; if (mox) { vol_l = TXScale; vol_r = TXScale; if (high_pwr_am) { if (dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM) { vol_l *= 1.414; vol_r *= 1.414; } } } if (wave_record && (!vac_primary_audiodev && !mox && !record_rx_preprocessed) || // post process audio (wave_record && mox && !record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(out_r_ptr1, out_l_ptr1); if (PrimaryDirectI_Q && !mox) { if (primary_correct_iq) { CorrectIQBuffer(in_l, in_r, primary_iq_gain, primary_iq_phase, frameCount); } ScaleBuffer(in_l, out_r, frameCount, (float)vol_l); ScaleBuffer(in_r, out_l, frameCount, (float)vol_r); } else if (!MultiPSK_server_enable && loopDLL_enabled && vac_enabled && !mox) { int i; double[] buffer = new double[frameCount]; fixed (double* buffer_ptr = &(buffer[0])) fixed (float* res_outl_ptr = &(res_outl[0])) { ScaleBuffer(out_l, res_outl_ptr, frameCount, (float)vol_l); if (mon) { ScaleBuffer(out_l, out_l, frameCount, (float)monitor_volume_left); ScaleBuffer(out_r, out_r, frameCount, (float)monitor_volume_right); } else { ScaleBuffer(out_l, out_l, frameCount, 0.0f); ScaleBuffer(out_r, out_r, frameCount, 0.0f); } { for (i = 0; i < frameCount; i++) { buffer[i] = (double)1e5 * res_outl[i]; } console.loopDLL.WriteRXBuffer(buffer_ptr); } } } else if (!mox && MultiPSK_server_enable && run_MultiPSK_server_thread) { fixed (float* res_outl_ptr = &(MultiPSK_output_bufer_l[0])) { int outsamps; ScaleBuffer(out_l, out_l, frameCount, (float)vol_l); DttSP.DoResamplerF(out_l, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); ScaleBuffer(out_l, out_l, frameCount, 0.0f); ScaleBuffer(out_r, out_r, frameCount, 0.0f); } MultiPSK_event.Set(); } else { if (mox) { ScaleBuffer(out_l, out_l, frameCount, (float)vol_l); ScaleBuffer(out_r, out_r, frameCount, (float)vol_r); } else { switch (mute_ch) { case MuteChannels.Left: ScaleBuffer(out_r, out_r, frameCount, (float)vol_r); ScaleBuffer(out_r, out_l, frameCount, 1.0f); break; case MuteChannels.Right: ScaleBuffer(out_l, out_l, frameCount, (float)vol_l); ScaleBuffer(out_l, out_r, frameCount, 1.0f); break; case MuteChannels.Both: ClearBuffer(out_l, frameCount); ClearBuffer(out_r, frameCount); break; case MuteChannels.None: ScaleBuffer(out_l, out_l, frameCount, (float)vol_l); ScaleBuffer(out_r, out_r, frameCount, (float)vol_r); break; } } } return callback_return; } catch (Exception ex) { Debug.Write(ex.ToString()); return 0; } }
void IModule.Install(ModuleManager manager) { _client = manager.Client; manager.CreateDynCommands("stream", PermissionLevel.User, group => { // commands which can only be called when there is a track currently playing. group.CreateGroup("", playingGroup => { playingGroup.AddCheck((cmd, usr, chnl) => GetAudio(chnl).IsPlaying); playingGroup.CreateCommand("goto") .Description("Skips to the given point in the track.") .Parameter("time") .Do(e => GetAudio(e.Channel).SkipToTimeInTrack(TimeSpan.Parse(e.GetArg("time")))); playingGroup.CreateCommand("stop") .Description("Stops playback of the playlist.") .Do(e => GetAudio(e.Channel).StopPlaylist()); playingGroup.CreateCommand("forcestop") .Description("Forcefully stops playback of the playlist, track and leaves the voice channel.") .MinPermissions((int)PermissionLevel.ChannelAdmin) .Do(e => GetAudio(e.Channel).ForceStop()); playingGroup.CreateCommand("next") .Description("Skips the current track and plays the next track in the playlist.") .Do(e => GetAudio(e.Channel).StopPlayback()); playingGroup.CreateCommand("prev") .Description("Skips the current track and plays the previus track in the playlist.") .Do(e => GetAudio(e.Channel).Previous()); playingGroup.CreateCommand("current") .Description("Displays information about the currently played track.") .Do(async e => await GetAudio(e.Channel).PrintCurrentTrack()); playingGroup.CreateCommand("pause") .Description("Pauses/unpauses playback of the current track.") .Do(e => GetAudio(e.Channel).Pause()); }); // commands which can only be called when there is no track playing. group.CreateGroup("", idleGroup => { idleGroup.AddCheck((cmd, usr, chnl) => !GetAudio(chnl).IsPlaying); idleGroup.CreateCommand("start") .Alias("play") .Description("Starts the playback of the playlist.") .Parameter("channel", ParameterType.Unparsed) .Do(async e => { AudioState audio = GetAudio(e.Channel); string channelQuery = e.GetArg("channel"); if (string.IsNullOrEmpty(channelQuery)) { if (e.User.VoiceChannel != null) { audio.PlaybackChannel = e.User.VoiceChannel; } } else { channelQuery = channelQuery.ToLowerInvariant(); Channel voiceChannel = e.Server.VoiceChannels.FirstOrDefault( c => c.Name.ToLowerInvariant().StartsWith(channelQuery)); if (voiceChannel == null) { await e.Channel.SafeSendMessage( $"Voice channel with the name of {channelQuery} was not found."); return; } audio.PlaybackChannel = voiceChannel; } if (audio.PlaybackChannel == null) { await e.Channel.SafeSendMessage("Playback channel not set."); return; } await audio.StartPlaylist(); }); idleGroup.CreateCommand("startat") .Alias("playat") .Description("Starts playback at at given point in the track") .Parameter("time") .Do(async e => { AudioState audio = GetAudio(e.Channel); if (audio.PlaybackChannel == null) { await e.Channel.SafeSendMessage("Playback channel not set."); return; } audio.SkipToTimeInTrack(TimeSpan.Parse(e.GetArg("time"))); await audio.StartPlaylist(); }); }); group.CreateCommand("add") .Description("Adds a track to the music playlist.") .Parameter("location", ParameterType.Unparsed) .Do(async e => { string loc = e.GetArg("location"); TrackData result = await TrackData.Parse(loc); if (result == null) { await e.Channel.SafeSendMessage($"Failed getting the stream url for `{loc}."); return; } GetAudio(e.Channel).Playlist.Add(result); await e.Channel.SafeSendMessage($"Added `{result.Name}` to the playlist."); }); group.CreateCommand("setpos") .Alias("set") .Description("Sets the position of the current played track index to a given number.") .Parameter("index") .Do(e => GetAudio(e.Channel).SkipToTrack(int.Parse(e.GetArg("index")) - 1)); group.CreateCommand("remove") .Alias("rem") .Description("Removes a track at the given position from the playlist.") .Parameter("index") .Do(async e => { AudioState audio = GetAudio(e.Channel); int remIndex = int.Parse(e.GetArg("index")) - 1; TrackData remData = audio.Playlist[remIndex]; audio.Playlist.RemoveAt(remIndex); await e.Channel.SafeSendMessage($"Removed track `{remData.Name}` from the playlist."); }); group.CreateCommand("list") .Description("List the songs in the current playlist.") .Do(async e => { AudioState audio = GetAudio(e.Channel); if (!audio.Playlist.Any()) { await e.Channel.SafeSendMessage("Playlist is empty."); return; } StringBuilder builder = new StringBuilder(); builder.AppendLine("**Playlist:**"); for (int i = 0; i < audio.Playlist.Count; i++) { if (i == audio.TrackIndex && audio.IsPlaying) { builder.Append("Playing: "); } builder.AppendLine($"`{i + 1}: {audio.Playlist[i].Name}`"); } await e.Channel.SafeSendMessage(builder.ToString()); }); group.CreateCommand("clear") .Description("Stops music and clears the playlist.") .MinPermissions((int)PermissionLevel.ServerModerator) .Do(async e => { GetAudio(e.Channel).ClearPlaylist(); await e.Channel.SafeSendMessage("Cleared playlist."); }); group.CreateCommand("channel") .Description( "Sets the channel in which the audio will be played in. Use .c to set it to your current channel.") .Parameter("channel", ParameterType.Unparsed) .Do(async e => { AudioState audio = GetAudio(e.Channel); string channelQuery = e.GetArg("channel"); Channel channel = channelQuery == ".c" ? e.User.VoiceChannel : e.Server.FindChannels(channelQuery, ChannelType.Voice).FirstOrDefault(); if (channel == null) { await e.Channel.SafeSendMessage($"Voice channel `{channelQuery}` not found."); return; } if (audio.IsPlaying) { await audio.SwitchChannel(channel); } else { audio.PlaybackChannel = channel; await e.Channel.SafeSendMessage($"Set playback channel to \"`{audio.PlaybackChannel.Name}`\""); } }); }); }
public AudioSignals(AudioState audioState) { this.audioState = audioState; }
protected void HardStopAudio() { m_CurrentAudioState = AudioState.Off; m_ToolAudio.Stop(); m_ToolAudio.volume = 0.0f; }
public CheckpointData Convert() => new CheckpointData(Level, Name, MapMeta.GetInventory(Inventory), Dreaming, AudioState?.Convert()) { Flags = new HashSet <string>(Flags ?? new string[0]), CoreMode = CoreMode };
public ChunkEAudioState(AudioState audio) { Audio = audio; }
public void Read(BinaryReader reader) { InSession = reader.ReadBoolean(); if (!InSession) { return; } byte bools; int count; if (reader.ReadBoolean()) { ChunkEAudioState audio = new ChunkEAudioState(); audio.Read(reader); Audio = audio.Audio; } if (reader.ReadBoolean()) { RespawnPoint = new Vector2(reader.ReadSingle(), reader.ReadSingle()); } Inventory = new PlayerInventory(); bools = reader.ReadByte(); Inventory.Backpack = UnpackBool(bools, 0); Inventory.DreamDash = UnpackBool(bools, 1); Inventory.NoRefills = UnpackBool(bools, 2); Inventory.Dashes = reader.ReadByte(); Flags = new HashSet <string>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { Flags.Add(reader.ReadNullTerminatedString()); } LevelFlags = new HashSet <string>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { LevelFlags.Add(reader.ReadNullTerminatedString()); } Strawberries = new HashSet <EntityID>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { Strawberries.Add(new EntityID(reader.ReadNullTerminatedString(), reader.ReadInt32())); } DoNotLoad = new HashSet <EntityID>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { DoNotLoad.Add(new EntityID(reader.ReadNullTerminatedString(), reader.ReadInt32())); } Keys = new HashSet <EntityID>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { Keys.Add(new EntityID(reader.ReadNullTerminatedString(), reader.ReadInt32())); } Counters = new List <Session.Counter>(); count = reader.ReadByte(); for (int i = 0; i < count; i++) { Counters.Add(new Session.Counter { Key = reader.ReadNullTerminatedString(), Value = reader.ReadInt32() }); } FurthestSeenLevel = reader.ReadNullTerminatedString()?.Nullify(); StartCheckpoint = reader.ReadNullTerminatedString()?.Nullify(); ColorGrade = reader.ReadNullTerminatedString()?.Nullify(); count = reader.ReadByte(); SummitGems = new bool[count]; for (int i = 0; i < count; i++) { if ((i % 8) == 0) { bools = reader.ReadByte(); } SummitGems[i] = UnpackBool(bools, i % 8); } bools = reader.ReadByte(); FirstLevel = UnpackBool(bools, 0); Cassette = UnpackBool(bools, 1); HeartGem = UnpackBool(bools, 2); Dreaming = UnpackBool(bools, 3); GrabbedGolden = UnpackBool(bools, 4); HitCheckpoint = UnpackBool(bools, 5); LightingAlphaAdd = reader.ReadSingle(); BloomBaseAdd = reader.ReadSingle(); DarkRoomAlpha = reader.ReadSingle(); Time = reader.ReadInt64(); CoreMode = (Session.CoreModes)reader.ReadByte(); }
public AudioState Build(AudioState old) { return(new AudioState(soundSources, listenerPos)); }
public patch_CheckpointData(string level, string name, PlayerInventory?inventory = null, bool dreaming = false, AudioState audioState = null) : base(level, name, inventory, dreaming, audioState) { }
// Update is called once per frame void Update() { if (! this.currentAudioState.audioSource.isPlaying && this.responded == true) { print("changing guest"); guestIndex++; isACircle = false; if (!guestList[guestIndex].name.Equals( "CircularGuest 1")) { GameObject.Destroy(current); current = (GameObject)GameObject.Instantiate(guestList[guestIndex], this.transform.position, this.transform.rotation); this.currentAudioState = current.GetComponent<AudioState>(); this.responded = false; } else { print ("circles"); isACircle = true; circles = GameObject.Instantiate(guestList[guestIndex]).GetComponent<CircularAudioState>(); } StartIntro (); } }
private void UpdateAudioControls(AudioState state) { if (state == AudioState.Playing) { BottomAppBar.Visibility = Visibility.Visible; BottomAppBar.ClosedDisplayMode = AppBarClosedDisplayMode.Compact; AudioPlayButton.Visibility = Visibility.Collapsed; AudioStopButton.Visibility = Visibility.Visible; AudioPauseButton.Visibility = Visibility.Visible; AudioSkipForwardButton.Visibility = Visibility.Visible; AudioSkipBackwardButton.Visibility = Visibility.Visible; } else if (state == AudioState.Paused) { BottomAppBar.Visibility = Visibility.Visible; BottomAppBar.ClosedDisplayMode = AppBarClosedDisplayMode.Minimal; AudioPlayButton.Visibility = Visibility.Visible; AudioStopButton.Visibility = Visibility.Visible; AudioPauseButton.Visibility = Visibility.Collapsed; AudioSkipForwardButton.Visibility = Visibility.Visible; AudioSkipBackwardButton.Visibility = Visibility.Visible; } else { BottomAppBar.Visibility = Visibility.Collapsed; AudioPlayButton.Visibility = Visibility.Visible; AudioStopButton.Visibility = Visibility.Collapsed; AudioPauseButton.Visibility = Visibility.Collapsed; AudioSkipForwardButton.Visibility = Visibility.Collapsed; AudioSkipBackwardButton.Visibility = Visibility.Collapsed; } }
public void stop() { if (audioBuffer != null) { audioBuffer.Stop(); } audioState = AudioState.STOPPED; }
public void state2Next() { audioState = nextAudioState; }
public void startPlayAfterNextWrite() { audioState = AudioState.START_PLAY_AFTER_NEXT_WRITE; }
internal static extern int GetAudioState(IntPtr handle, out AudioState state);
public void StopAudio () { player.Stop (); player.Release (); player = null; AudioState = AudioState.Stopped; }
public void flush() { if (audioBuffer != null) { audioBuffer.Stop(); audioBuffer.CurrentPosition = 0; audioBuffer.Write(silence, 0, LockFlags.None); } offsetBytes = 0; prevPlayPos = 0; ptsPos = 0; prevPtsPos = 0; playLoops = 0; ptsLoops = 0; audioState = AudioState.START_PLAY_AFTER_NEXT_WRITE; }
void audioStartSet() { source.clip = startClip; source.Play(); states = AudioState.Start; }
void write(VideoLib.AudioFrame frame) { if (audioBuffer == null || frame.Length == 0) return; // store pts for this frame and the byte offset at which this frame is // written pts = frame.Pts; ptsPos = offsetBytes; int playPos, writePos; audioBuffer.GetCurrentPosition(out playPos, out writePos); if (playPos <= offsetBytes && offsetBytes < writePos) { log.Warn("playpos:" + playPos.ToString() + " offset:" + offsetBytes.ToString() + " writePos:" + writePos.ToString() + " dataSize:" + frame.Length.ToString()); offsetBytes = writePos; } audioBuffer.Write(frame.Data, 0, frame.Length, offsetBytes, LockFlags.None); offsetBytes = (offsetBytes + frame.Length) % bufferSizeBytes; if (audioState == AudioState.START_PLAY_AFTER_NEXT_WRITE) { audioBuffer.Play(0, PlayFlags.Looping); audioState = AudioState.PLAYING; } }
/// <summary> /// 再生中?. /// </summary> public bool IsPlaying(int audioHandle) { AudioState state = GetAudioState(audioHandle); return(state == AudioState.FadeIn || state == AudioState.Playing || state == AudioState.FadeOut); }
unsafe public static int Callback4Port(void* input, void* output, int frameCount, // changes yt7pwr PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { if (audio_stop) return callback_return; //audio_run.WaitOne(100); #if(WIN64) Int64* array_ptr = (Int64*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; float* in_l_ptr2 = (float*)array_ptr[2]; float* in_r_ptr2 = (float*)array_ptr[3]; array_ptr = (Int64*)output; float* out_l_ptr1 = (float*)array_ptr[0]; float* out_r_ptr1 = (float*)array_ptr[1]; float* out_l_ptr2 = (float*)array_ptr[2]; float* out_r_ptr2 = (float*)array_ptr[3]; #endif #if(WIN32) int* array_ptr = (int*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; float* in_l_ptr2 = (float*)array_ptr[2]; float* in_r_ptr2 = (float*)array_ptr[3]; array_ptr = (int*)output; float* out_l_ptr1 = (float*)array_ptr[0]; float* out_r_ptr1 = (float*)array_ptr[1]; float* out_l_ptr2 = (float*)array_ptr[2]; float* out_r_ptr2 = (float*)array_ptr[3]; #endif float* in_l = null, in_l_VAC = null, in_r = null, in_r_VAC = null; float* out_l1 = null, out_r1 = null, out_l2 = null, out_r2 = null; out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (!mox && !voice_message_record) // rx { if (RX_input_1_2) { if (!console.RX_IQ_channel_swap) { in_l = in_l_ptr1; in_r = in_r_ptr1; } else { in_r = in_l_ptr1; in_l = in_r_ptr1; } } else { if (!console.RX_IQ_channel_swap) { in_l = in_l_ptr2; in_r = in_r_ptr2; } else { in_r = in_l_ptr2; in_l = in_r_ptr2; } } } else if (mox || voice_message_record) // tx or voice msg recording { if (console.LineMicShared) { if (RX_input_1_2) { in_l = in_l_ptr1; in_r = in_r_ptr1; } else { in_l = in_l_ptr2; in_r = in_r_ptr2; } } else { if (RX_input_1_2) { in_l = in_l_ptr2; in_r = in_r_ptr2; } else { in_l = in_l_ptr1; in_r = in_r_ptr1; } } if (console.TX_IQ_channel_swap) { if (TX_out_1_2) { out_l1 = out_r_ptr1; out_r1 = out_l_ptr1; out_l2 = out_r_ptr2; out_r2 = out_l_ptr2; } else { out_l1 = out_r_ptr2; out_r1 = out_l_ptr2; out_l2 = out_r_ptr1; out_r2 = out_l_ptr1; } } else { if (TX_out_1_2) { out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; } else { out_l1 = out_l_ptr2; out_r1 = out_r_ptr2; out_l2 = out_l_ptr1; out_r2 = out_r_ptr1; } } } if (voice_message_playback) // yt7pwr voice_msg_file_reader.GetPlayBuffer(in_l, in_r); if (wave_playback) wave_file_reader.GetPlayBuffer(in_l, in_r); if (voice_message_record) // yt7pwr { try { if (vac_enabled) { if (rb_vacIN_l.ReadSpace() >= frameCount && rb_vacIN_r.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_r_ptr1, frameCount); VACDebug("rb_vacIN underflow VoiceMsg record"); } } ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); voice_msg_file_writer.AddWriteBuffer(in_l, in_r); } catch (Exception ex) { VACDebug("Audio: " + ex.ToString()); } } if (wave_record && !vac_primary_audiodev && !mox && record_rx_preprocessed || (wave_record && !vac_primary_audiodev && mox && record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(in_l, in_r); if (phase) { //phase_mutex.WaitOne(); Marshal.Copy(new IntPtr(in_l), phase_buf_l, 0, frameCount); Marshal.Copy(new IntPtr(in_r), phase_buf_r, 0, frameCount); //phase_mutex.ReleaseMutex(); } if (!MultiPSK_server_enable && vac_enabled && loopDLL_enabled && mox) { int i; in_l_VAC = in_l; // save pointer in_r_VAC = in_r; // save pointer fixed (double* loopDLL_inl_ptr = &(loopDLL_inl[0])) { try { if (console.loopDLL.ReadTXBuffer(loopDLL_inl_ptr)) { for (i = 0; i < frameCount; i++) { in_l[0] = (float)(loopDLL_inl[i] / 1e5); in_r[0] = 0.0f; // (float)(loopDLL_inl[i] / 1e5); in_l++; in_r++; } } else { } } catch (Exception ex) { console.chkPower.Checked = false; Thread.Sleep(100); MessageBox.Show("loop.dll error!Check your PATH settings!\n" + ex.ToString()); } } in_l = in_l_VAC; // restore pointer in_r = in_r_VAC; // restore pointer } else if (mox && MultiPSK_server_enable && console.MultiPSKServer.ClientConnected) { float* tmp_l = in_l; float* tmp_r = in_r; for (int i = 0; i < frameCount; i++) { in_l[0] = MultiPSK_output_bufer_l[i]; in_r[0] = 0.0f; // MultiPSK_output_bufer_l[i]; in_l++; in_r++; // i++; } in_l = tmp_l; in_r = tmp_r; } switch (current_audio_state1) { case AudioState.DTTSP: #region VOX float* vox_l = null, vox_r = null; if (vox_enabled && !vac_enabled) { switch (soundcard) { case SoundCard.FIREBOX: case SoundCard.EDIROL_FA_66: vox_l = in_l_ptr1; vox_r = in_r_ptr1; break; case SoundCard.DELTA_44: default: vox_l = in_l_ptr2; vox_r = in_r_ptr2; break; } if (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN) { ScaleBuffer(vox_l, vox_l, frameCount, (float)mic_preamp); ScaleBuffer(vox_r, vox_r, frameCount, (float)mic_preamp); Peak = MaxSample(vox_l, vox_r, frameCount); // compare power to threshold if (Peak > vox_threshold) vox_active = true; else vox_active = false; } } else if (vox_enabled && vac_enabled) { if ((rb_vacIN_l.WriteSpace() >= frameCount) && (rb_vacIN_r.WriteSpace() >= frameCount)) { if (mox) { vox_l = in_l; vox_r = in_r; Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { fixed (float* vac_inl_ptr = &(vac_inl[0])) fixed (float* vac_inr_ptr = &(vac_inr[0])) { vox_l = vac_inl_ptr; vox_r = vac_inr_ptr; Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(vac_inl_ptr, frameCount); rb_vacIN_r.ReadPtr(vac_inr_ptr, frameCount); Win32.LeaveCriticalSection(cs_vac); } } } if (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN) { Peak = MaxSample(vox_l, vox_r, frameCount); // compare power to threshold if (Peak > vox_threshold) vox_active = true; else vox_active = false; } } #endregion if (dsp_mode == DSPMode.CWU || dsp_mode == DSPMode.CWL) { DttSP.CWtoneExchange(out_l1, out_r1, frameCount); } // scale input with mic preamp if ((mox || voice_message_record) && !vac_enabled && (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN)) { if (wave_playback) { ScaleBuffer(in_l, in_l, frameCount, (float)wave_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)wave_preamp); } else { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() >= echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: SineWave(in_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(in_r, frameCount, phase_accumulator1, sine_freq1); ScaleBuffer(in_l, in_l, frameCount, (float)input_source_scale); ScaleBuffer(in_r, in_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(in_l, frameCount); Noise(in_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(in_l, frameCount, sine_freq1); CopyBuffer(in_l, in_r, frameCount); break; } #endregion if (!loopDLL_enabled && vac_enabled && !voice_message_playback && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { if (mox) // transmit mode { if (rb_vacIN_l.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow switch time Cb4!"); } ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); if (echo_enable && (dsp_mode != DSPMode.DIGL || dsp_mode != DSPMode.DIGU)) { if (!echo_pause) { echoRB.WritePtr(in_l, frameCount); if (echoRB.ReadSpace() > echo_delay - 2) { EchoMixer(in_l, in_r, frameCount); } } } } } else if (loopDLL_enabled && vac_enabled && mox) { ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); } DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l1, out_r1, frameCount); #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: switch (ChannelTest) { case TestChannels.Left: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Right: SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Both: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator2 = CosineWave(out_r_ptr1, frameCount, phase_accumulator2, sine_freq1); break; } break; case SignalSource.NOISE: switch (ChannelTest) { case TestChannels.Both: Noise(out_l_ptr1, frameCount); Noise(out_r_ptr1, frameCount); break; case TestChannels.Left: Noise(out_l_ptr1, frameCount); break; case TestChannels.Right: Noise(out_r_ptr1, frameCount); break; } break; case SignalSource.TRIANGLE: switch (ChannelTest) { case TestChannels.Both: Triangle(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case TestChannels.Left: Triangle(out_l_ptr1, frameCount, sine_freq1); break; case TestChannels.Right: Triangle(out_r_ptr1, frameCount, sine_freq1); break; } break; case SignalSource.SAWTOOTH: switch (ChannelTest) { case TestChannels.Both: Sawtooth(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case TestChannels.Left: Sawtooth(out_l_ptr1, frameCount, sine_freq1); break; case TestChannels.Right: Sawtooth(out_r_ptr1, frameCount, sine_freq1); break; } break; } #endregion break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow switch time CB4!"); } } DttSP.CWtoneExchange(out_l1, out_r1, frameCount); ClearBuffer(out_l1, frameCount); ClearBuffer(out_r1, frameCount); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } else { DttSP.CWtoneExchange(out_l1, out_r1, frameCount); } break; case AudioState.SINL_COSR: out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (two_tone) { double dump; SineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); CosineWave2Tone(out_r1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { SineWave(out_l2, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r2, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.SINL_SINR: out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (two_tone) { SineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); CopyBuffer(out_l1, out_r1, frameCount); } else { phase_accumulator1 = SineWave(out_l1, frameCount, phase_accumulator1, sine_freq1); CopyBuffer(out_l1, out_r1, frameCount); } break; case AudioState.SINL_NOR: out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (two_tone) { SineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); ClearBuffer(out_r1, frameCount); } else { phase_accumulator1 = SineWave(out_l1, frameCount, phase_accumulator1, sine_freq1); ClearBuffer(out_r1, frameCount); } break; case AudioState.CW_COSL_SINR: if (mox) { if (two_tone) { double dump; if (console.tx_IF) { CosineWave2Tone(out_r1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out dump, out dump); SineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out phase_accumulator1, out phase_accumulator2); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave2Tone(out_r1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out dump, out dump); SineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out phase_accumulator1, out phase_accumulator2); } } else { if (console.tx_IF) { CosineWave(out_r1, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); phase_accumulator1 = SineWave(out_l1, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave(out_r1, frameCount, phase_accumulator1, sine_freq1 + osc); phase_accumulator1 = SineWave(out_l1, frameCount, phase_accumulator1, sine_freq1 + osc); } } float iq_gain = 1.0f + (1.0f - (1.0f + 0.001f * (float)console.SetupForm.udDSPImageGainTX.Value)); float iq_phase = 0.001f * (float)console.SetupForm.udDSPImagePhaseTX.Value; CorrectIQBuffer(out_l1, out_r1, iq_gain, iq_phase, frameCount); } break; case AudioState.COSL_SINR: out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (two_tone) { double dump; CosineWave2Tone(out_l1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); SineWave2Tone(out_r1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { CosineWave(out_l1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = SineWave(out_r1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_SINR: out_l1 = out_l_ptr1; out_r1 = out_r_ptr1; out_l2 = out_l_ptr2; out_r2 = out_r_ptr2; if (two_tone) { ClearBuffer(out_l1, frameCount); SineWave2Tone(out_r1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { ClearBuffer(out_l1, frameCount); phase_accumulator1 = SineWave(out_r1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_NOR: ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); break; case AudioState.PIPE: CopyBuffer(in_l_ptr1, out_l_ptr1, frameCount); CopyBuffer(in_r_ptr1, out_r_ptr1, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { switch (dsp_mode) { case DSPMode.CWL: case DSPMode.CWU: break; default: ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_r_ptr1, frameCount); break; } if (mox != next_mox) mox = next_mox; } if (vac_enabled) { if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l_ptr1, frameCount); rb_vacIN_r.ReadPtr(in_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacIN underflow switch time CB4!"); } } DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l1, out_r1, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { out_l_ptr1[i] = 0.0f; out_r_ptr1[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(out_l1, out_r1, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; //if(switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } // scale output for VAC if (vac_enabled && !loopDLL_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { fixed (float* outl_ptr = &(vac_outl[0])) fixed (float* outr_ptr = &(vac_outr[0])) { if (!mox) { ScaleBuffer(out_l1, outl_ptr, frameCount, (float)vac_rx_scale); ScaleBuffer(out_r1, outr_ptr, frameCount, (float)vac_rx_scale); } else // zero samples going back to VAC since TX monitor is off { ScaleBuffer(out_l1, outl_ptr, frameCount, 0.0f); ScaleBuffer(out_r1, outr_ptr, frameCount, 0.0f); } if (!mox) { if (sample_rateVAC == sample_rate1) { if ((rb_vacOUT_l.WriteSpace() >= frameCount) && (rb_vacOUT_r.WriteSpace() >= frameCount)) { if (VACDirectI_Q) { if (vac_correct_iq) CorrectIQBuffer(in_l, in_r, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(in_l, frameCount); rb_vacOUT_r.WritePtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(outl_ptr, frameCount); rb_vacOUT_r.WritePtr(outr_ptr, frameCount); Win32.LeaveCriticalSection(cs_vac); } } else { VACDebug("rb_vacOUT overflow Cb4"); } } else { fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { int outsamps = 0; if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { if (VACDirectI_Q) { DttSP.DoResamplerF(in_l, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(in_r, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if (vac_correct_iq) CorrectIQBuffer(res_outl_ptr, res_outr_ptr, vac_iq_gain, vac_iq_phase, frameCount); rb_vacOUT_l.WritePtr(res_outl_ptr, frameCount); rb_vacOUT_r.WritePtr(res_outr_ptr, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { DttSP.DoResamplerF(outl_ptr, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(outr_ptr, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } } else { VACDebug("rb_vacOUT overflow Cb4"); } } } } } } if (wave_record && !vac_primary_audiodev && !mox && !record_rx_preprocessed) // post process audio wave_file_writer.AddWriteBuffer(out_l1, out_r1); else if (wave_record && mox && !record_tx_preprocessed) wave_file_writer.AddWriteBuffer(out_l2, out_r2); if (console.CurrentDisplayMode == DisplayMode.SCOPE || console.CurrentDisplayMode == DisplayMode.PANASCOPE) DoScope(out_l1, frameCount); double vol_l = monitor_volume_left; double vol_r = monitor_volume_right; if (PrimaryDirectI_Q && !mox) { if (primary_correct_iq) { CorrectIQBuffer(in_l, in_r, primary_iq_gain, primary_iq_phase, frameCount); } ScaleBuffer(in_l, out_l1, frameCount, (float)vol_l); ScaleBuffer(in_r, out_r1, frameCount, (float)vol_r); } else if (!MultiPSK_server_enable && loopDLL_enabled && vac_enabled && !mox) { int i; double[] buffer = new double[frameCount]; fixed (double* buffer_ptr = &(buffer[0])) fixed (float* res_outl_ptr = &(res_outl[0])) { ScaleBuffer(out_l1, res_outl_ptr, frameCount, (float)vol_l); if (mon) { ScaleBuffer(out_l1, out_l1, frameCount, (float)vol_l); ScaleBuffer(out_r1, out_r1, frameCount, (float)vol_r); ScaleBuffer(out_l1, out_l2, frameCount, (float)vol_l); ScaleBuffer(out_r1, out_r2, frameCount, (float)vol_r); } else { ClearBuffer(out_l1, frameCount); ClearBuffer(out_r1, frameCount); ClearBuffer(out_l2, frameCount); ClearBuffer(out_r2, frameCount); } for (i = 0; i < frameCount; i++) { buffer[i] = (double)1e5 * res_outl[i]; } try { console.loopDLL.WriteRXBuffer(buffer_ptr); } catch (Exception ex) { console.chkPower.Checked = false; Thread.Sleep(100); MessageBox.Show("loop.dll error!Check your PATH settings!\n" + ex.ToString()); } } } else if (!mox && MultiPSK_server_enable && run_MultiPSK_server_thread) { fixed (float* res_outl_ptr = &(MultiPSK_output_bufer_l[0])) { int outsamps; ScaleBuffer(out_l1, out_l1, frameCount, (float)monitor_volume_left); DttSP.DoResamplerF(out_l1, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); if (mon) { float[] mon_out_l = new float[frameCount]; float[] mon_out_r = new float[frameCount]; Marshal.Copy((IntPtr)out_l1, mon_out_l, 0, frameCount); Marshal.Copy((IntPtr)out_r1, mon_out_r, 0, frameCount); ChangeVolume(mon_out_l, out_l1, frameCount, (float)monitor_volume_left); ChangeVolume(mon_out_r, out_r1, frameCount, (float)monitor_volume_right); ChangeVolume(mon_out_l, out_l2, frameCount, (float)monitor_volume_left); ChangeVolume(mon_out_r, out_r2, frameCount, (float)monitor_volume_right); } else { ClearBuffer(out_l2, frameCount); ClearBuffer(out_r2, frameCount); ClearBuffer(out_l2, frameCount); ClearBuffer(out_r2, frameCount); } } MultiPSK_event.Set(); } else { if (!mox) // RX { switch (mute_ch) { case MuteChannels.Left: ScaleBuffer(out_r1, out_r1, frameCount, (float)vol_r); ScaleBuffer(out_r1, out_l1, frameCount, 1.0f); ScaleBuffer(out_r1, out_r2, frameCount, 1.0f); ScaleBuffer(out_r1, out_l2, frameCount, 1.0f); break; case MuteChannels.Right: ScaleBuffer(out_l1, out_l1, frameCount, (float)vol_l); ScaleBuffer(out_l1, out_r1, frameCount, 1.0f); ScaleBuffer(out_l1, out_r2, frameCount, 1.0f); ScaleBuffer(out_l1, out_l2, frameCount, 1.0f); break; case MuteChannels.Both: ClearBuffer(out_l1, frameCount); ClearBuffer(out_r1, frameCount); ClearBuffer(out_l2, frameCount); ClearBuffer(out_r2, frameCount); break; case MuteChannels.None: ScaleBuffer(out_l1, out_l1, frameCount, (float)vol_l); ScaleBuffer(out_r1, out_r1, frameCount, (float)vol_r); ScaleBuffer(out_l1, out_l2, frameCount, (float)vol_l); ScaleBuffer(out_r1, out_r2, frameCount, (float)vol_r); break; } } else { // TX float[] tmp_out_l = new float[frameCount]; float[] tmp_out_r = new float[frameCount]; double tx_vol = TXScale; Marshal.Copy((IntPtr)out_l1, tmp_out_l, 0, frameCount); Marshal.Copy((IntPtr)out_r1, tmp_out_r, 0, frameCount); if (high_pwr_am) { if (dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM) tx_vol *= 1.414; } ChangeVolume(tmp_out_r, out_r1, frameCount, (float)tx_vol); ChangeVolume(tmp_out_l, out_l1, frameCount, (float)tx_vol); if (mon && !vac_mon && (dsp_mode == DSPMode.CWL || dsp_mode == DSPMode.CWU)) { DttSP.CWMonitorExchange(out_l2, out_r2, frameCount); ScaleBuffer(out_l2, out_l2, frameCount, (float)monitor_volume_left); ScaleBuffer(out_r2, out_r2, frameCount, (float)monitor_volume_right); } else if (mon && !vac_mon) { ChangeVolume(tmp_out_l, out_l2, frameCount, (float)monitor_volume_left); ChangeVolume(tmp_out_r, out_r2, frameCount, (float)monitor_volume_right); } else { ClearBuffer(out_l2, frameCount); ClearBuffer(out_r2, frameCount); } } } if (soundcard != SoundCard.DELTA_44) { // scale FireBox monitor output to prevent overdrive ScaleBuffer(out_l1, out_l1, frameCount, (float)(1.5f / audio_volts1)); ScaleBuffer(out_r1, out_r1, frameCount, (float)(1.5f / audio_volts1)); } //audio_run.ReleaseMutex(); return callback_return; } catch (Exception ex) { //audio_run.ReleaseMutex(); Debug.Write(ex.ToString()); return 0; } }
unsafe public static int NetworkServerCallbackAFSpectar(void* input, void* output, int frameCount, PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { int* array_ptr = (int*)input; float* in_l_ptr1 = (float*)array_ptr[0]; float* in_r_ptr1 = (float*)array_ptr[1]; int* array_out_ptr = (int*)output; float* out_l_ptr1 = (float*)array_out_ptr[0]; float* out_r_ptr1 = (float*)array_out_ptr[1]; if (wave_playback) wave_file_reader.GetPlayBuffer(in_l_ptr1, in_r_ptr1); else if ((wave_record && !mox && record_rx_preprocessed) || (wave_record && mox && record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(in_l_ptr1, in_r_ptr1); else if (voice_message_record && !console.MOX) wave_file_writer.AddWriteBuffer(in_l_ptr1, in_r_ptr1); if (phase) { //phase_mutex.WaitOne(); Marshal.Copy(new IntPtr(in_l_ptr1), phase_buf_l, 0, frameCount); Marshal.Copy(new IntPtr(in_r_ptr1), phase_buf_r, 0, frameCount); //phase_mutex.ReleaseMutex(); } float* in_l = null, in_r = null, out_l = null, out_r = null; if (!mox) { if (!console.RX_IQ_channel_swap) { in_l = in_l_ptr1; in_r = in_r_ptr1; out_l = out_l_ptr1; out_r = out_r_ptr1; } else { in_l = in_r_ptr1; in_r = in_l_ptr1; out_l = out_r_ptr1; out_r = out_l_ptr1; } } else { if (voice_message_playback) voice_msg_file_reader.GetPlayBuffer(in_l_ptr1, in_r_ptr1); if (!console.TX_IQ_channel_swap) { in_r = in_l_ptr1; in_l = in_r_ptr1; out_r = out_l_ptr1; out_l = out_r_ptr1; } else { in_l = in_l_ptr1; in_r = in_r_ptr1; out_l = out_l_ptr1; out_r = out_r_ptr1; } } switch (current_audio_state1) { case AudioState.DTTSP: if (dsp_mode == DSPMode.CWU || dsp_mode == DSPMode.CWL) { DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } // scale input with mic preamp if (mox && !vac_enabled && (dsp_mode == DSPMode.LSB || dsp_mode == DSPMode.USB || dsp_mode == DSPMode.DSB || dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM || dsp_mode == DSPMode.FMN)) { ScaleBuffer(in_l, in_l, frameCount, (float)mic_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)mic_preamp); } if (vac_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null) { if (mox) { if (rb_vacIN_l.ReadSpace() >= frameCount) rb_vacIN_l.ReadPtr(in_l, frameCount); else { ClearBuffer(in_l, frameCount); VACDebug("rb_vacIN underflow"); } if (rb_vacIN_r.ReadSpace() >= frameCount) rb_vacIN_r.ReadPtr(in_r, frameCount); else { ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow"); } ScaleBuffer(in_l, in_l, frameCount, (float)vac_preamp); ScaleBuffer(in_r, in_r, frameCount, (float)vac_preamp); DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); } else { DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); if ((rb_vacIN_l.ReadSpace() >= frameCount) && (rb_vacIN_r.ReadSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(in_l, frameCount); rb_vacIN_r.ReadPtr(in_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { ClearBuffer(in_l, frameCount); ClearBuffer(in_r, frameCount); VACDebug("rb_vacIN underflow"); VACDebug("rb_vacIN underflow"); } } } else DttSP.ExchangeSamples(thread_no, in_l, in_r, out_l, out_r, frameCount); break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { Win32.memset(in_l_ptr1, 0, frameCount * sizeof(float)); Win32.memset(in_r_ptr1, 0, frameCount * sizeof(float)); DttSP.ExchangeSamples(thread_no, in_l_ptr1, in_r_ptr1, out_l_ptr1, out_r_ptr1, frameCount); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } DttSP.CWtoneExchange(out_r_ptr1, out_l_ptr1, frameCount); break; case AudioState.SINL_COSR: if (two_tone) { double dump; SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); CosineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.SINL_SINR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } break; case AudioState.SINL_NOR: if (mox) { if (two_tone) { double dump; if (console.tx_IF) { CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out phase_accumulator1, out phase_accumulator2); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out phase_accumulator1, out phase_accumulator2); } } else { if (console.tx_IF) { CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + osc); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + osc); } } float iq_gain = 1.0f + (1.0f - (1.0f + 0.001f * (float)console.SetupForm.udDSPImageGainTX.Value)); float iq_phase = 0.001f * (float)console.SetupForm.udDSPImagePhaseTX.Value; CorrectIQBuffer(out_l, out_r, iq_gain, iq_phase, frameCount); } break; case AudioState.COSL_SINR: if (two_tone) { double dump; CosineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_SINR: if (two_tone) { ClearBuffer(out_l_ptr1, frameCount); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { ClearBuffer(out_l_ptr1, frameCount); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_NOR: ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); break; case AudioState.PIPE: CopyBuffer(in_l_ptr1, out_l_ptr1, frameCount); CopyBuffer(in_r_ptr1, out_r_ptr1, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(in_l_ptr1, frameCount); ClearBuffer(in_r_ptr1, frameCount); if (mox != next_mox) mox = next_mox; } DttSP.ExchangeSamples(thread_no, in_l_ptr1, in_r_ptr1, out_l_ptr1, out_r_ptr1, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { out_l_ptr1[i] = 0.0f; out_r_ptr1[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; //if(switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } fixed (float* res_inl_ptr = &(res_inl[0])) fixed (float* res_inr_ptr = &(res_inr[0])) { int outsamps; DttSP.DoResamplerF(out_l, res_inl_ptr, frameCount, &outsamps, resampServerPtrIn_l); // down to 6000 DttSP.DoResamplerF(out_r, res_inr_ptr, frameCount, &outsamps, resampServerPtrIn_r); byte* tmp_l_ptr = (byte*)res_inl_ptr; byte* tmp_r_ptr = (byte*)res_inr_ptr; byte[] buffer = new byte[8192]; for (int i = 2; i < buffer.Length; i++) { buffer[i] = tmp_l_ptr[0]; buffer[i + 1] = tmp_r_ptr[0]; tmp_l_ptr++; tmp_r_ptr++; i++; } CATNetwork_mutex.WaitOne(); fixed (void* rptr = &buffer[0]) fixed (void* wptr = &console.ServerSocket.send_buffer[0]) Win32.memcpy(wptr, rptr, buffer.Length); CATNetwork_mutex.WaitOne(); } console.ServerSocket.sendEvent.Set(); double vol = monitor_volume_left; if (mox) { vol = TXScale; if (high_pwr_am) { if (dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM) vol *= 1.414; } } ScaleBuffer(out_l, out_l, frameCount, (float)vol); ScaleBuffer(out_r, out_r, frameCount, (float)vol); return 0; } catch (Exception e) { Debug.Print(e.ToString()); return -1; } }
/// <summary> /// Playing different sounds /// </summary> /// <param name="state"></param> public void Play(AudioState state) { if (PlayerPrefs.GetString("Music") != "no") { switch ((int)state) { case 0: { audioSource.clip = bonus; audioSource.Play(); break; } case 1: { audioSource.clip = kickStick; audioSource.Play(); break; } case 2: { audioSource.loop = true; audioSource.clip = stickGrow; audioSource.Play(); break; } case 3: { audioSource.clip = fallStick; audioSource.Play(); break; } case 4: { audioSource.clip = death; audioSource.Play(); break; } case 5: { audioSource.clip = score; audioSource.Play(); break; } case 6: { audioSource.Stop(); audioSource.loop = false; break; } case 7: { audioSource.clip = btnClick; audioSource.Play(); break; } } } #endregion }
unsafe public static int output_Callback4Port(void* input, void* output, int frameCount,// yt7pwr PA19.PaStreamCallbackTimeInfo* timeInfo, int statusFlags, void* userData) { try { #if(WIN64) Int64* array_ptr = (Int64*)output; float* out_l_ptr1 = (float*)array_ptr[0]; float* out_r_ptr1 = (float*)array_ptr[1]; float* in_l_ptr1 = null; float* in_r_ptr1 = null; #endif #if(WIN32) int* array_ptr = (int*)output; float* out_l_ptr1 = (float*)array_ptr[0]; float* out_r_ptr1 = (float*)array_ptr[1]; float* in_l_ptr1 = null; float* in_r_ptr1 = null; #endif if (mox && (!TX_out_1_2 || num_channels == 4)) { float* in_l_VAC = null, in_r_VAC = null, out_l = null, out_r = null; if (!voice_message_record) { if (voice_message_playback) voice_msg_file_reader.GetPlayBuffer(in_l_ptr1, in_r_ptr1); if (!console.TX_IQ_channel_swap) { out_r = out_l_ptr1; out_l = out_r_ptr1; } else { out_l = out_l_ptr1; out_r = out_r_ptr1; } } else if (voice_message_record) { out_l = out_l_ptr1; out_r = out_r_ptr1; } switch (current_audio_state1) { case AudioState.DTTSP: if (dsp_mode == DSPMode.CWU || dsp_mode == DSPMode.CWL) { DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } DttSP.ExchangeOutputSamples(thread_no, out_l, out_r, frameCount); #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); break; case SignalSource.NOISE: Noise(out_l_ptr1, frameCount); Noise(out_r_ptr1, frameCount); break; case SignalSource.TRIANGLE: Triangle(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(out_l_ptr1, frameCount, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); break; } #endregion break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { DttSP.ExchangeOutputSamples(thread_no, out_l_ptr1, out_r_ptr1, frameCount); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } else DttSP.CWtoneExchange(out_l, out_r, frameCount); break; case AudioState.SINL_COSR: if (two_tone) { double dump; SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); CosineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.SINL_SINR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); CopyBuffer(out_l_ptr1, out_r_ptr1, frameCount); } break; case AudioState.SINL_NOR: if (two_tone) { SineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); ClearBuffer(out_r_ptr1, frameCount); } else { phase_accumulator1 = SineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); ClearBuffer(out_r_ptr1, frameCount); } break; case AudioState.CW_COSL_SINR: if (mox) { if (two_tone) { double dump; if (console.tx_IF) { CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + console.TX_IF_shift * 1e5, sine_freq2 + console.TX_IF_shift * 1e5, out phase_accumulator1, out phase_accumulator2); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave2Tone(out_r, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out dump, out dump); SineWave2Tone(out_l, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1 + osc, sine_freq2 + osc, out phase_accumulator1, out phase_accumulator2); } } else { if (console.tx_IF) { CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + console.TX_IF_shift * 1e5); } else { double osc = (console.VFOAFreq - console.LOSCFreq) * 1e6; CosineWave(out_r, frameCount, phase_accumulator1, sine_freq1 + osc); phase_accumulator1 = SineWave(out_l, frameCount, phase_accumulator1, sine_freq1 + osc); } } float iq_gain = 1.0f + (1.0f - (1.0f + 0.001f * (float)console.SetupForm.udDSPImageGainTX.Value)); float iq_phase = 0.001f * (float)console.SetupForm.udDSPImagePhaseTX.Value; CorrectIQBuffer(out_l, out_r, iq_gain, iq_phase, frameCount); } break; case AudioState.COSL_SINR: if (two_tone) { double dump; CosineWave2Tone(out_l_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out dump, out dump); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { CosineWave(out_l_ptr1, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_SINR: if (two_tone) { ClearBuffer(out_l_ptr1, frameCount); SineWave2Tone(out_r_ptr1, frameCount, phase_accumulator1, phase_accumulator2, sine_freq1, sine_freq2, out phase_accumulator1, out phase_accumulator2); } else { ClearBuffer(out_l_ptr1, frameCount); phase_accumulator1 = SineWave(out_r_ptr1, frameCount, phase_accumulator1, sine_freq1); } break; case AudioState.NOL_NOR: ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); break; case AudioState.PIPE: CopyBuffer(in_l_ptr1, out_l_ptr1, frameCount); CopyBuffer(in_r_ptr1, out_r_ptr1, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); if (mox != next_mox) mox = next_mox; } DttSP.ExchangeOutputSamples(thread_no, out_l_ptr1, out_r_ptr1, frameCount); if (ramp_down) { int i; for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { out_l_ptr1[i] = 0.0f; out_r_ptr1[i] = 0.0f; } } } else if (ramp_up) { for (int i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); out_l_ptr1[i] *= w; out_r_ptr1[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(out_l_ptr1, out_r_ptr1, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; if (switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } if ((!mox || (mox && (!mon || !vac_mon))) && (vac_enabled && !loopDLL_enabled && rb_vacIN_l != null && rb_vacIN_r != null && rb_vacOUT_l != null && rb_vacOUT_r != null)) { fixed (float* outl_ptr = &(vac_outl[0])) fixed (float* outr_ptr = &(vac_outr[0])) { if (!mox) { ScaleBuffer(out_l, outl_ptr, frameCount, (float)vac_rx_scale); // RX gain ScaleBuffer(out_r, outr_ptr, frameCount, (float)vac_rx_scale); } else { ScaleBuffer(out_l_ptr1, out_l_ptr1, frameCount, (float)vac_preamp); // TX gain ScaleBuffer(out_r_ptr1, out_r_ptr1, frameCount, (float)vac_preamp); } if (!mox) { if (sample_rateVAC == sample_rate1) { if ((rb_vacOUT_l.WriteSpace() >= frameCount) && (rb_vacOUT_r.WriteSpace() >= frameCount)) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(out_l_ptr1, frameCount); rb_vacOUT_r.WritePtr(out_r_ptr1, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow outCB4"); } } else { fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { int outsamps; DttSP.DoResamplerF(out_l_ptr1, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(out_r_ptr1, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow outCB4"); } } } } } } if ((wave_record && !mox && !record_rx_preprocessed) || (wave_record && mox && !record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(out_l_ptr1, out_r_ptr1); if (console.CurrentDisplayMode == DisplayMode.SCOPE || console.CurrentDisplayMode == DisplayMode.PANASCOPE) DoScope(out_l, frameCount); double vol_l = monitor_volume_left; double vol_r = monitor_volume_right; if (mox) { vol_l = TXScale; vol_r = TXScale; if (high_pwr_am) { if (dsp_mode == DSPMode.AM || dsp_mode == DSPMode.SAM) { vol_l *= 1.414; vol_r *= 1.414; } } } ScaleBuffer(out_l, out_l, frameCount, (float)vol_l); ScaleBuffer(out_r, out_r, frameCount, (float)vol_r); } else { if ((wave_record && !mox && !record_rx_preprocessed) || (wave_record && mox && !record_tx_preprocessed)) wave_file_writer.AddWriteBuffer(out_l_ptr1, out_r_ptr1); if (mon && (dsp_mode == DSPMode.CWL || dsp_mode == DSPMode.CWU)) { DttSP.CWMonitorExchange(out_l_ptr1, out_r_ptr1, frameCount); ScaleBuffer(out_l_ptr1, out_l_ptr1, frameCount, (float)monitor_volume_left); ScaleBuffer(out_r_ptr1, out_r_ptr1, frameCount, (float)monitor_volume_right); } else { ClearBuffer(out_l_ptr1, frameCount); ClearBuffer(out_r_ptr1, frameCount); } } return callback_return; } catch (Exception ex) { Debug.Write(ex.ToString()); return 0; } }
internal AudioStateChangedEventArgs(AudioState audioState) { _audioState = audioState; }
unsafe public static int G6AudioCallback_old(int length, int x, float y, void* input, void* output) { int send_audio = 0; if (G6_send_audio == 1) send_audio = 1; /*if (sample_rate1 / sample_rateVAC == 1) send_audio = 1; else { if (G6_send_audio == 1) { decimation++; if (decimation == 3) { decimation = 0; send_audio = 1; } } }*/ try { int frameCount = Audio.BlockSize; int i, j = 0; int f = 0; int* out_ptr = (int*)output; byte* input_data = (byte*)input; byte* out_data = (byte*)out_ptr; bool dsp = false; if (console.RX_IQ_channel_swap) { for (i = 0; i < length - 7; i++) { f = input_data[i + 1]; f = f << 8; f += input_data[i + 2]; f = f << 8; f += input_data[i + 3]; f = f << 8; right_buffer_input[j] = (float)((double)(f * const_1_div_2147483648_)); f = input_data[i + 5]; f = f << 8; f += input_data[i + 6]; f = f << 8; f += input_data[i + 7]; f = f << 8; left_buffer_input[j] = (float)((double)(f * const_1_div_2147483648_)); i += 7; j++; } } else { for (i = 0; i < length - 7; i++) { f = input_data[i + 1]; f = f << 8; f += input_data[i + 2]; f = f << 8; f += input_data[i + 3]; f = f << 8; left_buffer_input[j] = (float)((double)f * const_1_div_2147483648_); f = input_data[i + 5]; f = f << 8; f += input_data[i + 6]; f = f << 8; f += input_data[i + 7]; f = f << 8; right_buffer_input[j] = (float)((double)f * const_1_div_2147483648_); i += 7; j++; } } Win32.EnterCriticalSection(cs_g6_callback); G6RB_left.Write(left_buffer_input, j); G6RB_right.Write(right_buffer_input, j); Win32.LeaveCriticalSection(cs_g6_callback); #region resample if (vac_resample) { fixed (float* input_l = &(left_buffer_input[0])) fixed (float* output_l = &(left_buffer_output[0])) fixed (float* input_r = &(right_buffer_input[0])) fixed (float* output_r = &(right_buffer_output[0])) { if (!mox && wave_playback) wave_file_reader.GetPlayBuffer(input_l, input_r); else if (!mox && wave_record && record_rx_preprocessed) wave_file_writer.AddWriteBuffer(input_l, input_r); else if (mox && vac_primary_audiodev) { if (dsp_mode == DSPMode.CWL || dsp_mode == DSPMode.CWU) { dsp = true; int outsamps; fixed (float* res_inl_ptr = &(res_inl[0])) fixed (float* res_inr_ptr = &(res_inr[0])) { DttSP.CWMonitorExchange(res_inl_ptr, res_inr_ptr, frameCount); DttSP.DoResamplerF(res_inl_ptr, res_inl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(res_inr_ptr, res_inr_ptr, frameCount, &outsamps, resampPtrOut_r); if (rb_vacOUT_l.WriteSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_inl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_inr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { } } } else if (rb_vacIN_l.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(input_l, frameCount); rb_vacIN_r.ReadPtr(input_r, frameCount); Win32.LeaveCriticalSection(cs_vac); dsp = true; } } else { if (G6RB_left.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_g6_callback); G6RB_left.Read(left_buffer_input, frameCount); G6RB_right.Read(right_buffer_input, frameCount); Win32.LeaveCriticalSection(cs_g6_callback); dsp = true; } } if (dsp) { switch (CurrentAudioState1) { case AudioState.DTTSP: #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: if (console.RX_IQ_channel_swap) { SineWave(input_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(input_l, frameCount, phase_accumulator1, sine_freq1); } else { SineWave(input_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(input_r, frameCount, phase_accumulator1 + 0.0001f, sine_freq1); } ScaleBuffer(input_l, input_l, frameCount, (float)input_source_scale); ScaleBuffer(input_r, input_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(input_l, frameCount); Noise(input_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(input_l, frameCount, sine_freq1); CopyBuffer(input_l, input_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(input_l, frameCount, sine_freq1); CopyBuffer(input_l, input_r, frameCount); break; } #endregion DttSP.ExchangeSamples(thread_no, input_l, input_r, output_l, output_r, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(input_l, frameCount); ClearBuffer(input_l, frameCount); if (mox != next_mox) mox = next_mox; } DttSP.ExchangeSamples(thread_no, input_l, input_r, output_l, output_r, frameCount); if (ramp_down) { for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); output_l[i] *= w; output_l[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { output_l[i] = 0.0f; output_l[i] = 0.0f; } } } else if (ramp_up) { for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); output_l[i] *= w; output_l[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(output_l, frameCount); ClearBuffer(output_l, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(output_l, output_l, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; if (switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { Win32.memset(output_l, 0, frameCount * sizeof(float)); Win32.memset(output_r, 0, frameCount * sizeof(float)); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } else DttSP.CWtoneExchange(output_l, output_r, frameCount); break; } if (!mox) { ScaleBuffer(output_l, output_l, frameCount, (float)(vac_rx_scale)); ScaleBuffer(output_r, output_r, frameCount, (float)(vac_rx_scale)); } else { ScaleBuffer(output_l, output_l, frameCount, (float)(high_swr_scale * radio_volume)); ScaleBuffer(output_r, output_r, frameCount, (float)(high_swr_scale * radio_volume)); } int dec = sample_rate1 / sample_rateVAC; if (!mox) { if (dec < 1) { fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { int outsamps; if (VACDirectI_Q) { DttSP.DoResamplerF(input_l, res_outl_ptr, frameCount, &outsamps, resampPtrIn_l); DttSP.DoResamplerF(input_r, res_outr_ptr, frameCount, &outsamps, resampPtrIn_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { if (vac_correct_iq) CorrectIQBuffer(res_outl_ptr, res_outr_ptr, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow G6 CB1"); } } else { DttSP.DoResamplerF(output_l, res_outl_ptr, frameCount, &outsamps, resampPtrOut_l); DttSP.DoResamplerF(output_r, res_outr_ptr, frameCount, &outsamps, resampPtrOut_r); if ((rb_vacOUT_l.WriteSpace() >= outsamps) && (rb_vacOUT_r.WriteSpace() >= outsamps)) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow G6 CB1"); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, outsamps); rb_vacOUT_r.WritePtr(res_outr_ptr, outsamps); Win32.LeaveCriticalSection(cs_vac); } } } } else { j = 0; for (i = 0; i < frameCount; i++) { res_outl[j] = output_l[i]; res_outr[j] = output_r[i]; j++; i += dec - 1; } fixed (float* res_outl_ptr = &(res_outl[0])) fixed (float* res_outr_ptr = &(res_outr[0])) { if (VACDirectI_Q) { if (rb_vacOUT_l.WriteSpace() >= frameCount) { if (vac_correct_iq) CorrectIQBuffer(res_outl_ptr, res_outr_ptr, vac_iq_gain, vac_iq_phase, frameCount); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, j); rb_vacOUT_r.WritePtr(res_outr_ptr, j); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow G6 CB1"); } } else { if (rb_vacOUT_l.WriteSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, j); rb_vacOUT_r.WritePtr(res_outr_ptr, j); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow G6 CB1"); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(res_outl_ptr, j); rb_vacOUT_r.WritePtr(res_outr_ptr, j); Win32.LeaveCriticalSection(cs_vac); } } } } } #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: switch (ChannelTest) { case TestChannels.Left: SineWave(output_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_l, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Right: SineWave(output_r, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_r, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Both: SineWave(output_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_l, frameCount, phase_accumulator1, sine_freq1); SineWave(output_r, frameCount, phase_accumulator1, sine_freq1); phase_accumulator2 = CosineWave(output_r, frameCount, phase_accumulator2, sine_freq1); break; } break; case SignalSource.NOISE: switch (ChannelTest) { case TestChannels.Both: Noise(output_l, frameCount); Noise(output_r, frameCount); break; case TestChannels.Left: Noise(output_l, frameCount); break; case TestChannels.Right: Noise(output_r, frameCount); break; } break; case SignalSource.TRIANGLE: switch (ChannelTest) { case TestChannels.Both: Triangle(output_l, frameCount, sine_freq1); CopyBuffer(output_l, output_r, frameCount); break; case TestChannels.Left: Triangle(output_l, frameCount, sine_freq1); break; case TestChannels.Right: Triangle(output_r, frameCount, sine_freq1); break; } break; case SignalSource.SAWTOOTH: switch (ChannelTest) { case TestChannels.Both: Sawtooth(output_l, frameCount, sine_freq1); CopyBuffer(output_l, output_r, frameCount); break; case TestChannels.Left: Sawtooth(output_l, frameCount, sine_freq1); break; case TestChannels.Right: Sawtooth(output_r, frameCount, sine_freq1); break; } break; } #endregion if (mox || (!mox && G6_send_audio == 1)) { byte[] conv; float vol = (float)(console.AF / 1e6); if (mox) vol = (float)(console.PWR / 1e6); j = 0; if (console.TX_IQ_channel_swap) { for (i = 0; i < frameCount; i++) { f = (int)((double)(output_r[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j] = 0; out_data[j + 1] = conv[3]; out_data[j + 2] = conv[2]; out_data[j + 3] = conv[1]; f = (int)((double)(output_l[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j + 4] = 0; out_data[j + 5] = conv[3]; out_data[j + 6] = conv[2]; out_data[j + 7] = conv[1]; j += 8; } } else { for (i = 0; i < frameCount; i++) { f = (int)((double)(output_l[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j] = 0; out_data[j + 1] = conv[3]; out_data[j + 2] = conv[2]; out_data[j + 3] = conv[1]; f = (int)((double)(output_r[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j + 4] = 0; out_data[j + 5] = conv[3]; out_data[j + 6] = conv[2]; out_data[j + 7] = conv[1]; j += 8; } } } if ((console.CurrentDisplayMode == DisplayMode.SCOPE || console.CurrentDisplayMode == DisplayMode.PANASCOPE) && !vac_enabled) DoScope(output_l, frameCount); } } } #endregion #region no resample else { fixed (float* input_l = &(left_buffer_input[0])) fixed (float* output_l = &(left_buffer_output[0])) fixed (float* input_r = &(right_buffer_input[0])) fixed (float* output_r = &(right_buffer_output[0])) { if (wave_playback && !mox) wave_file_reader.GetPlayBuffer(input_l, input_r); else if (!mox && wave_record && record_rx_preprocessed) wave_file_writer.AddWriteBuffer(input_l, input_r); else if (mox && vac_primary_audiodev) { dsp = true; if (dsp_mode == DSPMode.CWL || dsp_mode == DSPMode.CWU) { } else if (rb_vacIN_l.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacIN_l.ReadPtr(input_l, frameCount); rb_vacIN_r.ReadPtr(input_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } } else { if (G6RB_left.ReadSpace() >= frameCount) { Win32.EnterCriticalSection(cs_g6_callback); G6RB_left.Read(left_buffer_input, frameCount); G6RB_right.Read(right_buffer_input, frameCount); Win32.LeaveCriticalSection(cs_g6_callback); dsp = true; } else { } } if (dsp) { switch (CurrentAudioState1) { case AudioState.DTTSP: #region Input Signal Source switch (current_input_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: if (console.RX_IQ_channel_swap) { SineWave(input_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(input_l, frameCount, phase_accumulator1, sine_freq1); } else { SineWave(input_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(input_r, frameCount, phase_accumulator1 + 0.0001f, sine_freq1); } ScaleBuffer(input_l, input_l, frameCount, (float)input_source_scale); ScaleBuffer(input_r, input_r, frameCount, (float)input_source_scale); break; case SignalSource.NOISE: Noise(input_l, frameCount); Noise(input_r, frameCount); break; case SignalSource.TRIANGLE: Triangle(input_l, frameCount, sine_freq1); CopyBuffer(input_l, input_r, frameCount); break; case SignalSource.SAWTOOTH: Sawtooth(input_l, frameCount, sine_freq1); CopyBuffer(input_l, input_r, frameCount); break; } #endregion DttSP.ExchangeSamples(thread_no, input_l, input_r, output_l, output_r, frameCount); break; case AudioState.CW: if (next_audio_state1 == AudioState.SWITCH) { Win32.memset(output_l, 0, frameCount * sizeof(float)); Win32.memset(output_r, 0, frameCount * sizeof(float)); if (switch_count == 0) next_audio_state1 = AudioState.CW; switch_count--; } else DttSP.CWtoneExchange(output_l, output_r, frameCount); break; case AudioState.SWITCH: if (!ramp_down && !ramp_up) { ClearBuffer(input_l, frameCount); ClearBuffer(input_l, frameCount); if (mox != next_mox) mox = next_mox; } DttSP.ExchangeSamples(thread_no, input_l, input_r, output_l, output_r, frameCount); if (ramp_down) { for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); output_l[i] *= w; output_l[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_down = false; break; } } if (ramp_down) { for (; i < frameCount; i++) { output_l[i] = 0.0f; output_l[i] = 0.0f; } } } else if (ramp_up) { for (i = 0; i < frameCount; i++) { float w = (float)Math.Sin(ramp_val * Math.PI / 2.0); output_l[i] *= w; output_l[i] *= w; ramp_val += ramp_step; if (++ramp_count >= ramp_samples) { ramp_up = false; break; } } } else { ClearBuffer(output_l, frameCount); ClearBuffer(output_l, frameCount); } if (next_audio_state1 == AudioState.CW) { //cw_delay = 1; DttSP.CWtoneExchange(output_l, output_l, frameCount); } else if (switch_count == 1) DttSP.CWRingRestart(); switch_count--; if (switch_count == ramp_up_num) RampUp = true; if (switch_count == 0) current_audio_state1 = next_audio_state1; break; } #region Output Signal Source switch (current_output_signal) { case SignalSource.SOUNDCARD: break; case SignalSource.SINE: switch (ChannelTest) { case TestChannels.Left: SineWave(output_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_l, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Right: SineWave(output_r, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_r, frameCount, phase_accumulator1, sine_freq1); break; case TestChannels.Both: SineWave(output_l, frameCount, phase_accumulator1, sine_freq1); phase_accumulator1 = CosineWave(output_l, frameCount, phase_accumulator1, sine_freq1); SineWave(output_r, frameCount, phase_accumulator1, sine_freq1); phase_accumulator2 = CosineWave(output_r, frameCount, phase_accumulator2, sine_freq1); break; } break; case SignalSource.NOISE: switch (ChannelTest) { case TestChannels.Both: Noise(output_l, frameCount); Noise(output_r, frameCount); break; case TestChannels.Left: Noise(output_l, frameCount); break; case TestChannels.Right: Noise(output_r, frameCount); break; } break; case SignalSource.TRIANGLE: switch (ChannelTest) { case TestChannels.Both: Triangle(output_l, frameCount, sine_freq1); CopyBuffer(output_l, output_r, frameCount); break; case TestChannels.Left: Triangle(output_l, frameCount, sine_freq1); break; case TestChannels.Right: Triangle(output_r, frameCount, sine_freq1); break; } break; case SignalSource.SAWTOOTH: switch (ChannelTest) { case TestChannels.Both: Sawtooth(output_l, frameCount, sine_freq1); CopyBuffer(output_l, output_r, frameCount); break; case TestChannels.Left: Sawtooth(output_l, frameCount, sine_freq1); break; case TestChannels.Right: Sawtooth(output_r, frameCount, sine_freq1); break; } break; } #endregion if (!mox) { ScaleBuffer(output_l, output_l, frameCount, (float)(vac_rx_scale)); ScaleBuffer(output_r, output_r, frameCount, (float)(vac_rx_scale)); } else { ScaleBuffer(output_l, output_l, frameCount, (float)(high_swr_scale * radio_volume)); ScaleBuffer(output_r, output_r, frameCount, (float)(high_swr_scale * radio_volume)); } if (!mox && vac_primary_audiodev) { if (rb_vacOUT_l.WriteSpace() >= frameCount) { Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(output_l, frameCount); rb_vacOUT_r.WritePtr(output_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } else { VACDebug("rb_vacOUT overflow G6 CB1"); Win32.EnterCriticalSection(cs_vac); rb_vacOUT_l.WritePtr(output_l, frameCount); rb_vacOUT_r.WritePtr(output_r, frameCount); Win32.LeaveCriticalSection(cs_vac); } } if (mox || (!mox && G6_send_audio == 1)) { byte[] q = new byte[3072]; byte[] conv; float vol = (float)(console.AF / 1e6); if (mox) vol = (float)(console.PWR / 1e6); j = 0; if (console.TX_IQ_channel_swap) { for (i = 0; i < frameCount; i++) { f = (int)((double)(output_r[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j] = 0; out_data[j + 1] = conv[3]; out_data[j + 2] = conv[2]; out_data[j + 3] = conv[1]; f = (int)((double)(output_l[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j + 4] = 0; out_data[j + 5] = conv[3]; out_data[j + 6] = conv[2]; out_data[j + 7] = conv[1]; j += 8; } } else { for (i = 0; i < frameCount; i++) { f = (int)((double)(output_l[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j] = 0; out_data[j + 1] = conv[1]; out_data[j + 2] = conv[2]; out_data[j + 3] = conv[3]; f = (int)((double)(output_r[i] * vol * 0x7FFFFFFF)); conv = System.BitConverter.GetBytes(f); out_data[j + 4] = 0; out_data[j + 5] = conv[1]; out_data[j + 6] = conv[2]; out_data[j + 7] = conv[3]; j += 8; } } } if ((console.CurrentDisplayMode == DisplayMode.SCOPE || console.CurrentDisplayMode == DisplayMode.PANASCOPE) && !vac_enabled) DoScope(output_l, frameCount); } } } #endregion return send_audio; // G6_send_audio; } catch (Exception ex) { Debug.Write(ex.ToString()); return send_audio; // G6_send_audio; } }
private extern void StartVideoMode_Internal(CameraParameters cameraParameters, AudioState audioState, OnVideoModeStartedCallback onVideoModeStartedCallback);