void IInjectInitialize.Initialize() { Console.WriteLine("PspAudioWaveOutImpl.Initialize()!"); if (m_Player == null) { m_Player = new WaveOutPlayer(-1, new WaveFormat(rate: Frequency, bits: 16, channels: NumberOfChannels), BufferSize, NumberOfBuffers, BufferFillEventHandler); } Initialized = true; }
public void Start(int bufSize) { Stop(); if (m_AudioStream != null) { m_AudioStream.Position = 0; m_Player = new WaveOutPlayer(-1, m_Format, bufSize, 3, new BufferFillEventHandler(Filler)); this.IsRunning = true; } }
public void Stop() { if (m_Player != null) try { m_Player.Dispose(); } finally { m_Player = null; } this.IsRunning = false; }
private void Stop() { if (m_Player != null) try { m_Player.Dispose(); } finally { m_Player = null; } }
private void Play() { osc1.Reiniciar(); osc2.Reiniciar(); osc3.Reiniciar(); if (m_Player != null) { return; } m_Player = new WaveOutPlayer(-1, format, 2048, 3, new WaveLib.BufferFillEventHandler(Filler)); }
public override void StopSynchronized() { //Console.ReadKey(); #if false Initialized = false; if (m_Player != null) { m_Player.Stop(); m_Player = null; } #endif }
private void ProcessBuffer(ref Array inpBuf, ref int segmentID, ref int cEndPosSec, ref int cStartPosSec) { /* Read the initial time. */ startTime = DateTime.Now; //bufSize = (short)(bufferSize - 1); bufSize = bufferSize; Console.WriteLine("Size of the buffer is: " + bufSize.ToString()); processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf, ref bufSize, ref emoValsArray, ref aIres, ref bStr, ref testbuf, ref testBufLeng, ref brderS); cEndPosSec += 2; /* If Analysis is ready */ Console.WriteLine("Sound captured and processed"); Console.WriteLine(processBufferResult); if (processBufferResult == NMS_PROCESS_ANALYSISREADY) { silenceCount = 0; emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres); String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres); Console.WriteLine("Features extracted!"); Console.WriteLine(fvStr); string[] lines = System.IO.File.ReadAllLines("FeatureVectorValence.arff"); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVectorValence.arff", lines); lines = System.IO.File.ReadAllLines("FeatureVectorArousal.arff"); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVectorArousal.arff", lines); //Run command prompt command // Start the child process. System.Diagnostics.Process p = new System.Diagnostics.Process(); // Redirect the output stream of the child process. p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.FileName = "classify.bat"; p.Start(); // Do not wait for the child process to exit before // reading to the end of its redirected stream. // p.WaitForExit(); // Read the output stream first and then wait. string output = p.StandardOutput.ReadToEnd(); p.WaitForExit(); p.Close(); //System.Console.WriteLine(output); string[] tokens = output.Split(':'); //for(int i=0; i<tokens.Length; i++) //{ // Console.WriteLine("#"+i+"-"+tokens[i]); //} //Parse out the valence and arousal if(tokens[3][0] == '-') valence = "-" + tokens[3][1].ToString(); else valence = tokens[3][0].ToString(); if (tokens[6][0] == '-') arousal = "-" + tokens[6][1].ToString(); else arousal = tokens[6][0].ToString(); Console.WriteLine("Valence: " + valence); Console.WriteLine("Arousal: " + arousal); log = File.AppendText("LogFeatureVector.csv"); log.WriteLine(dataArrivedTime + "," + fvStr + "," + valence + "," + arousal); log.Close(); voiceOutput = File.AppendText("..\\..\\voiceOutput.txt"); voiceOutput.WriteLine(valence.ToString() + "," + arousal.ToString()); voiceOutput.Close(); nmsCOMcallee.nmsQA_CollectAgentScoreData(); if (segmentID >= lioNetResultsCache.Count) { for (int i = 0; i <= 100; i++) lioNetResultsCache.Add(string.Empty); } lioNetResultsCache[segmentID] = bStr; nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec); nmsCOMcallee.nmsSD_LogData(); nmsCOMcallee.nmsCollectProfiler(); cStartPosSec = 0; segmentID ++; /* Read the time. */ stopTime = DateTime.Now; ProcDuration += stopTime - startTime; } /* Voice Detected */ else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0)) { cEndPosSec -= 2; cStartPosSec = cEndPosSec; } /* The QA5Core fail to identify the buffer */ else if (processBufferResult == -1 && count < countNum) { cStartPosSec = 0; } /* Silence Detected*/ else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum) { cStartPosSec = 0; silenceCount++; } /* Reset silenceCount if no voice was detected */ if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0; /* Return the Dominant Emotion after two non sequential silences */ if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE) { /* Read the end time */ DateTime stoptime = DateTime.Now; /* The processing duration*/ Console.Write("Processing Time: "); Console.WriteLine(ProcDuration); ProcDuration = TimeSpan.Zero; } /* If Program is running with determined time */ if (count == countNum) { cEndPosSec = 0; cStartPosSec = 0; /* Stop audio output */ if ((m_Player != null)) try { m_Player.Dispose(); } finally { m_Player = null; } /* Stop audio input */ if ((m_Recorder != null)) try { m_Recorder.Dispose(); } finally { m_Recorder = null; } /* Clear All Pending Data */ m_Fifo.Flush(); } /* Running during defined time*/ count++; while (File.Exists(@"..\..\voiceOutput.txt")) { Console.WriteLine("Waiting for Alex..."); Thread.Sleep(100); } Console.WriteLine("Start timer..."); oldRecordedTime = Int32.Parse(DateTime.Now.ToString("HHmmssfff")); }
private void ProcessBuffer(ref Array inpBuf, ref int segmentID, ref int cEndPosSec, ref int cStartPosSec) { /* Read the initial time. */ startTime = DateTime.Now; bufSize = (short)(bufferSize - 1); processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf, ref bufSize, ref emoValsArray, ref aIres, ref bStr, ref testbuf, ref testBufLeng, ref brderS); cEndPosSec += 2; /* If Analysis is ready */ if (processBufferResult == NMS_PROCESS_ANALYSISREADY) { silenceCount = 0; emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres); String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres); Console.WriteLine("Features extracted!"); Console.WriteLine(fvStr); string[] lines = System.IO.File.ReadAllLines("FeatureVector.arff"); Console.WriteLine(lines.Length); lines[23] = fvStr; System.IO.File.WriteAllLines("FeatureVector.arff", lines); //Run command prompt command //string strCmdText; //strCmdText = "/C java -cp weka.jar weka.classifiers.functions.Logistic -T FeatureVector.arff -l logistic_dmd.model -p 0"; //System.Diagnostics.Process.Start(@"C:\Windows\System32\cmd.exe", strCmdText); // Start the child process. System.Diagnostics.Process p = new System.Diagnostics.Process(); // Redirect the output stream of the child process. p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.StartInfo.FileName = "classify.bat"; p.Start(); // Do not wait for the child process to exit before // reading to the end of its redirected stream. // p.WaitForExit(); // Read the output stream first and then wait. string output = p.StandardOutput.ReadToEnd(); p.WaitForExit(); System.Console.WriteLine(output); if (output.Contains("angry")) { Console.WriteLine("Angry"); Console.WriteLine("Valence: -0.40"); Console.WriteLine("Arousal: 0.79"); } else if (output.Contains("neutral")) { Console.WriteLine("Neutral"); Console.WriteLine("Valence: 0.0"); Console.WriteLine("Arousal: 0.0"); } else if (output.Contains("sadness")) { Console.WriteLine("Sad"); Console.WriteLine("Valence: -0.81"); Console.WriteLine("Arousal: -0.40"); } else if (output.Contains("happy")) { Console.WriteLine("Happy"); Console.WriteLine("Valence: 0.89"); Console.WriteLine("Arousal: 0.17"); } tw = File.AppendText("VoiceAnalysisResults.txt"); tw.WriteLine("---------- Start Segment ----------"); tw.Write("Energy "); tw.WriteLine(emoVals.Energy); tw.Write("Content "); tw.WriteLine(emoVals.content); tw.Write("Upset "); tw.WriteLine(emoVals.upset); tw.Write("Angry "); tw.WriteLine(emoVals.angry); tw.Write("Stress "); tw.WriteLine(emoVals.stress); tw.Write("Concentration "); tw.WriteLine(emoVals.concentration_level); tw.Write("Intensive Thinking "); tw.WriteLine(emoVals.intensive_thinking); tw.Write("SAF "); tw.WriteLine(emoVals.saf); tw.Write("Excitement "); tw.WriteLine(emoVals.excitement); tw.Write("Atmosphere "); tw.WriteLine(emoVals.Atmos); tw.Write("EmoCog Ratio "); tw.WriteLine(emoVals.EmoCogRatio); tw.Write("Embarrassment "); tw.WriteLine(emoVals.embarrassment); tw.Write("Hesitation "); tw.WriteLine(emoVals.hesitation); tw.Write("Imagination "); tw.WriteLine(emoVals.imagination_activity); tw.Write("Extreme State "); tw.WriteLine(emoVals.extremeState); tw.Write("Uncertainty "); tw.WriteLine(emoVals.uncertainty); tw.Write("Brain Power "); tw.WriteLine(emoVals.BrainPower); tw.Write("Max Volume "); tw.WriteLine(emoVals.maxAmpVol); tw.Write("Voice Energy "); tw.WriteLine(emoVals.VoiceEnergy); tw.WriteLine("---------- End Segment ----------"); tw.Close(); nmsCOMcallee.nmsQA_CollectAgentScoreData(); if (segmentID >= lioNetResultsCache.Count) { for (int i = 0; i <= 100; i++) lioNetResultsCache.Add(string.Empty); } lioNetResultsCache[segmentID] = bStr; nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec); nmsCOMcallee.nmsSD_LogData(); nmsCOMcallee.nmsCollectProfiler(); cStartPosSec = 0; segmentID ++; /* Read the time. */ stopTime = DateTime.Now; ProcDuration += stopTime - startTime; } /* Voice Detected */ else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0)) { cEndPosSec -= 2; cStartPosSec = cEndPosSec; } /* The QA5Core fail to identify the buffer */ else if (processBufferResult == -1 && count < countNum) { cStartPosSec = 0; } /* Silence Detected*/ else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum) { cStartPosSec = 0; silenceCount++; } /* Reset silenceCount if no voice was detected */ if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0; /* Return the Dominant Emotion after two non sequential silences */ if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE) { /* Read the end time */ DateTime stoptime = DateTime.Now; /* The processing duration*/ Console.Write("Processing Time: "); Console.WriteLine(ProcDuration); ProcDuration = TimeSpan.Zero; } /* If Program is running with determined time */ if ((count == countNum)) { cEndPosSec = 0; cStartPosSec = 0; /* Stop audio output */ if ((m_Player != null)) try { m_Player.Dispose(); } finally { m_Player = null; } /* Stop audio input */ if ((m_Recorder != null)) try { m_Recorder.Dispose(); } finally { m_Recorder = null; } /* Clear All Pending Data */ m_Fifo.Flush(); } /* Running during defined time*/ count++; }
public override void InitializeComponent() { m_Player = new WaveOutPlayer(-1, new WaveFormat(rate: Frequency, bits: 16, channels: NumberOfChannels), BufferSize, NumberOfBuffers, BufferFillEventHandler); Initialized = true; }
public void Stop() { if (_player != null) { _player.Dispose(); _player = null; } }
public void Play(bool looped) { try { Stop(); _looped = looped; _lastBlock = -1; _state = new DviAdpcmDecoder.AdpcmState(); _player = new WaveOutPlayer(-1, _format, _wave.BlockSize * 4, 3, Filler); _leftOverBuffer = null; } catch { MessageBox.Show("Audio play error.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }