public Result ApprovaIsAgree([FromForm] int Id, [FromForm] AudioStatus AudioStatus, [FromForm] string Memo) { string msg = string.Empty; int userId = UToken.Id; if (userId <= 0) { msg = "登录信息丢失"; } if (Id <= 0) { msg = "流程参数丢失"; } if (!string.IsNullOrEmpty(msg)) { return new Result() { Succeed = false, Message = msg } } ; var resultData = workflowMainBusiness.ApprovaIsAgree(Id, AudioStatus, Memo, userId, ref msg); return(new Result() { Succeed = resultData, Message = msg }); }
public OggOutput(GenericAudioStream stream11, Device device1, int int5) { _stream10 = new Stream4(stream11, 16); _int1 = _stream10.vmethod_0().method_0(int5); _int1 -= _int1 % 5; _int0 = _int1 / 5; _double0 = _stream10.vmethod_0().int_0 *(double)_stream10.vmethod_0().short_1 / 1000.0; _byte0 = new byte[_int0]; _device0 = device1; if (_device0 == null) { _device0 = new Device(); _device0.SetCooperativeLevel(GetDesktopWindow(), CooperativeLevel.Normal); _bool0 = true; } _secondaryBuffer0 = new SecondaryBuffer(new BufferDescription { BufferBytes = _int1, ControlPositionNotify = true, CanGetCurrentPosition = true, ControlVolume = true, GlobalFocus = true, StickyFocus = true, Format = smethod_0(_stream10.vmethod_0()) }, _device0); _secondaryBuffer0.SetCurrentPosition(0); _int2 = 0; _secondaryBuffer0.Volume = 0; _autoResetEvent0 = new AutoResetEvent(false); _bufferPositionNotify1[0].EventNotifyHandle = _autoResetEvent0.Handle; _enum10 = AudioStatus.ShouldStopAudio; }
public void DifferentStartPlaying() { WaitCallback waitCallback = null; //Error Case (Will never get called) if (this.enum1_0 == AudioStatus.ShouldStartAudio) { return; } //If song is already playing if (this.class159_0 != null && this.enum1_0 == AudioStatus.IsCurrentlyPlayingAudio && !this.class159_0.method_5()) { this.enum1_0 = AudioStatus.ShouldStartAudio; this.class159_0.SetVolume(0f); this.class159_0.method_3(); if (waitCallback == null) { waitCallback = new WaitCallback(this.method_2); } ThreadPool.QueueUserWorkItem(waitCallback); return; } this.StopPlaying(); this.enum1_0 = AudioStatus.ShouldStartAudio; this.class159_0 = new AudioPlayer(-1, this.class16_0.waveFormat_0, 200, this.Volume, this.bool_0, new Delegate3(this.method_0)); }
public void DifferentStartPlaying() { if (this.enum1_0 == AudioStatus.IsCurrentlyPlayingAudio) { this.method_0(); return; } if (this.enum1_0 == AudioStatus.ShouldStopAudio) { this.method_9(); this.method_2(5); this.long_0 = 0L; this.int_2 = 0; this.secondaryBuffer_0.SetCurrentPosition(0); this.method_4(); for (int i = 0; i < 5; i++) { this.method_7(); } this.secondaryBuffer_0.SetCurrentPosition(0); this.secondaryBuffer_0.Volume = 0; this.secondaryBuffer_0.Play(0, BufferPlayFlags.Looping); this.enum1_0 = AudioStatus.ShouldStartAudio; } }
public void DifferentStartPlaying() { if (_enum10 == AudioStatus.IsCurrentlyPlayingAudio) { method_0(); return; } if (_enum10 == AudioStatus.ShouldStopAudio) { method_9(); method_2(5); _long0 = 0L; _int2 = 0; _secondaryBuffer0.SetCurrentPosition(0); method_4(); for (var i = 0; i < 5; i++) { method_7(); } _secondaryBuffer0.SetCurrentPosition(0); _secondaryBuffer0.Volume = 0; _secondaryBuffer0.Play(0, BufferPlayFlags.Looping); _enum10 = AudioStatus.ShouldStartAudio; } }
/// <summary> /// 添加音频文件。 /// </summary> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="length"></param> /// <param name="audio_status"></param> /// <param name="ep_status">端点检测(End-point detected)器所处的状态</param> public string AppendAudio(byte[] buffer, int offset, int length, AudioStatus audio_status, out EndPointStatusEnums ep_status) { if (length > BLOCK_LEN) { throw new ArgumentException("length超过了最大发送长度。", "length"); } //返回部分结果 string partResult = null; RecogStatusEnums rec_status;//识别器所处的状态 Marshal.Copy(buffer, 0, ptrBuffer, length); ///开始向服务器发送音频数据 IsrInterop.AudioWrite(sessionId, ptrBuffer, (uint)length, audio_status, out ep_status, out rec_status); ///服务器返回部分结果 if (rec_status == RecogStatusEnums.ISR_REC_STATUS_SUCCESS) {//已经有结果缓存在MSC中了,可以获取了 partResult = IsrInterop.GetResult(sessionId, out this.rslt_status, waitTime); if (!string.IsNullOrEmpty(partResult)) { this.result.Append(partResult); } } return(partResult); }
public void DifferentStartPlaying() { WaitCallback waitCallback = null; //Error Case (Will never get called) if (_enum10 == AudioStatus.ShouldStartAudio) { return; } //If song is already playing if (_class1590 != null && _enum10 == AudioStatus.IsCurrentlyPlayingAudio && !_class1590.method_5()) { _enum10 = AudioStatus.ShouldStartAudio; _class1590.SetVolume(0f); _class1590.method_3(); if (waitCallback == null) { waitCallback = method_2; } ThreadPool.QueueUserWorkItem(waitCallback); return; } StopPlaying(); _enum10 = AudioStatus.ShouldStartAudio; _class1590 = new AudioPlayer(-1, _class160.WaveFormat0, 200, _volume, _bool0, method_0); }
public OGGOutput(GenericAudioStream stream1_1, Device device_1, int int_5) { this.stream1_0 = new Stream4(stream1_1, 16); this.int_1 = this.stream1_0.vmethod_0().method_0(int_5); this.int_1 -= this.int_1 % 5; this.int_0 = this.int_1 / 5; this.double_0 = (double)this.stream1_0.vmethod_0().int_0 *(double)this.stream1_0.vmethod_0().short_1 / 1000.0; this.byte_0 = new byte[this.int_0]; this.device_0 = device_1; if (this.device_0 == null) { this.device_0 = new Device(); this.device_0.SetCooperativeLevel(OGGOutput.GetDesktopWindow(), CooperativeLevel.Normal); this.bool_0 = true; } this.secondaryBuffer_0 = new SecondaryBuffer(new BufferDescription { BufferBytes = this.int_1, ControlPositionNotify = true, CanGetCurrentPosition = true, ControlVolume = true, GlobalFocus = true, StickyFocus = true, Format = OGGOutput.smethod_0(this.stream1_0.vmethod_0()) }, this.device_0); this.secondaryBuffer_0.SetCurrentPosition(0); this.int_2 = 0; this.secondaryBuffer_0.Volume = 0; this.autoResetEvent_0 = new AutoResetEvent(false); this.bufferPositionNotify_1[0].EventNotifyHandle = this.autoResetEvent_0.Handle; this.enum1_0 = AudioStatus.ShouldStopAudio; }
private void BTNplay_Click(object sender, RoutedEventArgs e) { switch (status) { case AudioStatus.stop: { ICONplay.Glyph = "\xE103"; end = false; MEvoice.Position = new TimeSpan(0); pre = new TimeSpan(0); MEvoice.Play(); status = AudioStatus.playing; getProgress(); } break; case AudioStatus.pause: { ICONplay.Glyph = "\xE103"; status = AudioStatus.playing; MEvoice.Play(); } break; case AudioStatus.playing: { ICONplay.Glyph = "\xE102"; status = AudioStatus.pause; MEvoice.Pause(); } break; } }
public void StopPlaying() { if (Thread.CurrentThread.ManagedThreadId != this.thread_0.ManagedThreadId) { lock (this.queue_1) { this.queue_1.Enqueue(new WaveOutput.Class164(WaveOutput.Enum19.const_1, null)); this.autoResetEvent_0.Set(); } return; } this.enum1_0 = AudioStatus.ShouldStopAudio; this.bool_0 = false; Enum18 @enum; lock (this.object_0) { @enum = Class162.waveOutReset(this.intptr_0); } if (@enum != Enum18.const_0) { throw new Exception4(@enum, "waveOutReset"); } this.stopwatch_0.Reset(); }
public void StopPlaying() { if (Thread.CurrentThread.ManagedThreadId != _thread0.ManagedThreadId) { lock (_queue1) { _queue1.Enqueue(new Class164(Enum19.Const1, null)); _autoResetEvent0.Set(); } return; } _enum10 = AudioStatus.ShouldStopAudio; _bool0 = false; Enum18 @enum; lock (_object0) { @enum = Class162.waveOutReset(_intptr0); } if (@enum != Enum18.Const0) { throw new Exception4(@enum, "waveOutReset"); } _stopwatch0.Reset(); }
public void Update() { if (Status == AudioStatus.Play && !GetComponent <AudioSource>().isPlaying) { Status = AudioStatus.Stop; } }
public ControlAudio() { first = true; //visible = false; NumButtons = 3; Status = AudioStatus.Stop; }
private void method_0() { if (this.enum1_0 == AudioStatus.IsCurrentlyPlayingAudio) { this.secondaryBuffer_0.Play(0, BufferPlayFlags.Looping); this.enum1_0 = AudioStatus.ShouldStartAudio; } }
public void StartPlaying() { if (this.enum1_0 == AudioStatus.ShouldStartAudio) { this.secondaryBuffer_0.Stop(); this.enum1_0 = AudioStatus.IsCurrentlyPlayingAudio; } }
public void StopPlaying() { this.thread_0.Abort(); this.enum1_0 = AudioStatus.ShouldStopAudio; Class119.alSourceStop(this.intptr_1); this.stopwatch_0.Reset(); Class119.smethod_7(this.intptr_1, this.intptr_2.Length); }
public void StopPlaying() { _thread0.Abort(); _enum10 = AudioStatus.ShouldStopAudio; Class119.alSourceStop(_intptr1); _stopwatch0.Reset(); Class119.smethod_7(_intptr1, _intptr2.Length); }
public void StopPlaying() { this.secondaryBuffer_0.Volume = -10000; this.method_0(); this.method_9(); this.secondaryBuffer_0.Stop(); this.enum1_0 = AudioStatus.ShouldStopAudio; }
public void StopPlaying() { _secondaryBuffer0.Volume = -10000; method_0(); method_9(); _secondaryBuffer0.Stop(); _enum10 = AudioStatus.ShouldStopAudio; }
private void method_0() { if (_enum10 == AudioStatus.IsCurrentlyPlayingAudio) { _secondaryBuffer0.Play(0, BufferPlayFlags.Looping); _enum10 = AudioStatus.ShouldStartAudio; } }
public void StartPlaying() { if (_enum10 == AudioStatus.ShouldStartAudio) { _secondaryBuffer0.Stop(); _enum10 = AudioStatus.IsCurrentlyPlayingAudio; } }
private void MEvoice_MediaEnded(object sender, RoutedEventArgs e) { lock (o) { end = true; } status = AudioStatus.stop; ICONplay.Glyph = "\xE102"; }
public void StartPlaying() { if (_class1590 != null) { if (_enum10 == AudioStatus.ShouldStartAudio) { _enum10 = AudioStatus.IsCurrentlyPlayingAudio; _class1590.method_4(); } } }
public void StartPlaying() { if (this.enum1_0 != AudioStatus.ShouldStartAudio) { return; } int num = this.vmethod_0(); this.StopPlaying(); this.stream1_0.Position = (long)(this.int_3 = num); this.enum1_0 = AudioStatus.IsCurrentlyPlayingAudio; }
private string Recognition(IList <byte[]> data) { string rec_result = ""; int error_code = 0; // 分组提交数据 for (int i = 0; i < data.Count; i++) { aud_stat = AudioStatus.ISR_AUDIO_SAMPLE_CONTINUE; if (0 == i) { aud_stat = AudioStatus.ISR_AUDIO_SAMPLE_FIRST; } Console.Write(">"); error_code = iFlyDll.QISRAudioWrite(session_id, data[i], (uint)data[i].Length, aud_stat, ref ep_stat, ref rec_stat); if ((int)ErrorCode.MSP_SUCCESS != error_code) { iFlyDll.QISRSessionEnd(session_id, null); } } error_code = iFlyDll.QISRAudioWrite(session_id, null, 0, AudioStatus.ISR_AUDIO_SAMPLE_LAST, ref ep_stat, ref rec_stat); if ((int)ErrorCode.MSP_SUCCESS != error_code) { Log(String.Format("QISRAudioWrite failed! error code: {0}" + error_code)); return(""); } while (RecogStatus.MSP_REC_STATUS_SPEECH_COMPLETE != rec_stat) { IntPtr rslt = iFlyDll.QISRGetResult(session_id, ref rec_stat, 0, ref error_code); if ((int)ErrorCode.MSP_SUCCESS != error_code) { Log(String.Format("QISRGetResult failed, error code: {0}", error_code)); break; } if (IntPtr.Zero != rslt) { string tempRes = Utils.Ptr2Str(rslt); rec_result = rec_result + tempRes; if (rec_result.Length >= BUFFER_SIZE) { Log("no enough buffer for rec_result !"); break; } } } int errorcode = iFlyDll.QISRSessionEnd(session_id, "end"); return(rec_result); }
public void StartPlaying() { if (this.class159_0 != null) { if (this.enum1_0 == AudioStatus.ShouldStartAudio) { this.enum1_0 = AudioStatus.IsCurrentlyPlayingAudio; this.class159_0.method_4(); return; } } }
public void OnGUI() { if (first) { initGUI(); first = false; } SetupGUI(); GUI.backgroundColor = Color.clear; GUI.BeginGroup(new Rect(BoxOffsetX, BoxOffsetY, BoxWidth, BoxHeight));//,mBoxStyle); if (GUI.Button(new Rect(ButtonOffsetX, ButtonOffsetY, ButtonWidth, ButtonHeight), Status == AudioStatus.Play ? AudioPlaySelected : AudioPlay)) { if (Status != AudioStatus.Play) { GetComponent <AudioSource>().Play(); Status = AudioStatus.Play; } } if (GUI.Button(new Rect(ButtonOffsetX + ButtonOffsetXstep, ButtonOffsetY, ButtonWidth, ButtonHeight), Status == AudioStatus.Pause ? AudioPauseSelected : AudioPause)) { switch (Status) { case AudioStatus.Play: GetComponent <AudioSource>().Pause(); Status = AudioStatus.Pause; break; case AudioStatus.Pause: GetComponent <AudioSource>().Play(); Status = AudioStatus.Play; break; case AudioStatus.Stop: //nothing; break; } } if (GUI.Button(new Rect(ButtonOffsetX + ButtonOffsetXstep * 2, ButtonOffsetY, ButtonWidth, ButtonHeight), Status == AudioStatus.Stop ? AudioStopSelected : AudioStop)) { if (Status != AudioStatus.Stop) { GetComponent <AudioSource>().Stop(); Status = AudioStatus.Stop; } } GUI.EndGroup(); }
public void StartPlaying() { if (_enum10 != AudioStatus.ShouldStartAudio) { return; } var num = vmethod_0(); StopPlaying(); _stream10.Position = _int3 = num; _enum10 = AudioStatus.IsCurrentlyPlayingAudio; }
private void method_5() { if (!_bool0) { _enum10 = AudioStatus.ShouldStartAudio; for (var i = 0; i < 5; i++) { _class1580[i].method_1(); } _bool0 = true; } method_6(); }
private void method_5() { if (!this.bool_0) { this.enum1_0 = AudioStatus.ShouldStartAudio; for (int i = 0; i < 5; i++) { this.class158_0[i].method_1(); } this.bool_0 = true; } this.method_6(); }
/// <summary> /// Helper for handling album commands /// </summary> /// <param name="query">The album to search for</param> /// <param name="tags">The tags from STT</param> private async void HandleAlbums(string query, dynamic tags) { Search search = await session.SearchAlbums(query, 0, 1); if (search.TotalAlbums != 0) { Album album = await search.Albums[0]; if (client == null) { listener = new TcpListener(port); listener.Start(); await Query("audioOutput", "stream_spotify", new { port = port, ip = ipAddress }, new string[] { "speakers" }); client = listener.AcceptTcpClient(); } if (tags["action"] == "play") { session.Play(await queue.PlayAlbum(album)); audioStatus = AudioStatus.Playing; } else if (tags["action"] == "add") { if (audioStatus == AudioStatus.Stopped) { session.Play(await queue.PlayAlbum(album)); audioStatus = AudioStatus.Playing; } else { queue.AddAlbum(album); } } } }
/// <summary> /// Called when a track is finished. /// </summary> /// <param name="sender">The sender</param> /// <param name="e">The event args</param> private void session_EndOfTrack(Session sender, SessionEventArgs e) { audioStatus = AudioStatus.Stopped; if (!queue.IsEmpty()) { NextSong(); } }
/// <summary> /// Plays the next song in the play queue /// </summary> private void NextSong() { Track track = queue.Dequeue(); session.PlayerUnload(); session.PlayerLoad(track); session.PlayerPlay(); audioStatus = AudioStatus.Playing; }
/// <summary> /// Helper for handling music player commands /// </summary> /// <param name="tags">The tags received from STT</param> private void HandleCommands(dynamic tags) { if (tags["action"] == "play" && audioStatus == AudioStatus.Paused) { session.PlayerPlay(); audioStatus = AudioStatus.Playing; } else if (tags["action"] == "pause" && audioStatus == AudioStatus.Playing) { session.PlayerPause(); audioStatus = AudioStatus.Paused; } else if (tags["action"] == "clear queue") { queue.Clear(); } else if (tags["action"] == "next") { if (!queue.IsEmpty()) { NextSong(); } } }
public static extern int QISRAudioWrite(string sessionID, IntPtr waveData, uint waveLen, AudioStatus audioStatus, ref EpStatus epStatus, ref RecogStatus recogStatus);