Inheritance: IDisposable
コード例 #1
0
ファイル: Wave.cs プロジェクト: funtomi/WaveToImage
        /// <summary>
        /// 开始录音
        /// </summary>
        public void Start()
        {
            if (_isRecord)
            {
                return;
            }
            Stop();
            try {
                FileInfo file = new FileInfo(tmpName);
                if (file.Exists)
                {
                    file.Delete();
                }
                fs_tmp = new FileStream(tmpName, System.IO.FileMode.Create);
                bw_tmp = new BinaryWriter(fs_tmp);

                dt_Start  = DateTime.Now;
                _isRecord = true;
                m_Format  = new WaveFormat(16000, 16, 1);
                //if(RecordQuality==Quality.low) m_Format = new WaveFormat(32000, 8, 2);
                //else if(RecordQuality==Quality.Normal) m_Format = new WaveFormat(44100, 8, 2);
                //else m_Format = new WaveFormat(44100, 16, 2);

                m_Recorder = new WaveInRecorder(-1, m_Format, 16384, 3, new BufferDoneEventHandler(DataArrived));
            } catch (Exception e) {
                OnError(e, "启动录音失败!");
                Stop();
            }
        }
コード例 #2
0
ファイル: WaveIn.cs プロジェクト: j1top/opentx-1
 public void Close()
 {
     WaveInRecorder.ThrowOnError(WaveNative.waveInUnprepareHeader(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header)));
     m_HeaderHandle.Free();
     m_Header.lpData = IntPtr.Zero;
     m_HeaderDataHandle.Free();
 }
コード例 #3
0
 public void Start(int bufSize)
 {
     if (m_Recorder != null) this.Stop();
     m_Recorder = new WaveInRecorder(-1, m_Format, bufSize, 3,
                      new BufferDoneEventHandler(DataArrived));
     WaveLib.Recorder.IsRunning = true;
     IAmRunning = true;
 }
コード例 #4
0
 public void Start()
 {
     Console.WriteLine("Rec START CALLED");
     if (m_Recorder != null) this.Stop();
     Console.WriteLine("Rec Restarted");
     m_Recorder = new WaveInRecorder(WaveNative.WAVE_MAPPER, m_Format, 16384, 3,
                 new BufferDoneEventHandler(DataArrived));
     WaveLib.Recorder.IsRunning = true;
     IAmRunning = true;
 }
コード例 #5
0
ファイル: WaveIn.cs プロジェクト: j1top/opentx-1
        public WaveInBuffer(IntPtr waveInHandle, int size)
        {
            m_WaveIn = waveInHandle;

            m_HeaderHandle          = GCHandle.Alloc(m_Header, GCHandleType.Pinned);
            m_Header.dwUser         = (IntPtr)GCHandle.Alloc(this);
            m_HeaderData            = new byte[size];
            m_HeaderDataHandle      = GCHandle.Alloc(m_HeaderData, GCHandleType.Pinned);
            m_Header.lpData         = m_HeaderDataHandle.AddrOfPinnedObject();
            m_Header.dwBufferLength = size;
            WaveInRecorder.ThrowOnError(WaveNative.waveInPrepareHeader(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header)));
        }
コード例 #6
0
 public void Stop()
 {
     if (m_Recorder != null)
         try
         {
             m_Recorder.Dispose();
         }
         finally
         {
             m_Recorder = null;
         }
     WaveLib.Recorder.IsRunning = false;
     IAmRunning = false;
 }
コード例 #7
0
ファイル: WaveIn.cs プロジェクト: j1top/opentx-1
        public WaveInRecorder(int device, WaveFormat format, int bufferSize, BufferDoneEventHandler doneProc)
        {
            m_DoneProc = doneProc;
            WaveInRecorder.ThrowOnError(WaveNative.waveInOpen(out m_WaveIn, device, format, m_BufferProc, 0, WaveNative.CALLBACK_FUNCTION));

            buffer1 = new WaveInBuffer(m_WaveIn, bufferSize);
            buffer2 = new WaveInBuffer(m_WaveIn, bufferSize);

            buffer1.Record();
            buffer2.Record();

            m_CurrentBuffer = buffer1;

            WaveInRecorder.ThrowOnError(WaveNative.waveInStart(m_WaveIn));
            m_Thread = new Thread(new ThreadStart(ThreadProc));
            m_Thread.Start();
        }
コード例 #8
0
ファイル: WaveIn.cs プロジェクト: j1top/opentx-1
        public void Close()
        {
            m_Finished = true;
            WaveInRecorder.ThrowOnError(WaveNative.waveInReset(m_WaveIn));

            m_Thread.Join();
            m_Thread   = null;
            m_DoneProc = null;

            buffer1.WaitFor();
            buffer2.WaitFor();

            buffer1.Close();
            buffer2.Close();

            WaveInRecorder.ThrowOnError(WaveNative.waveInClose(m_WaveIn));
            m_WaveIn = IntPtr.Zero;
        }
コード例 #9
0
ファイル: Wave.cs プロジェクト: funtomi/WaveToImage
        public void Stop()
        {
            if (m_Recorder != null)
            {
                try {
                    bw_tmp.Close();
                    m_Recorder.Dispose();


                    WriteToFile();

                    _recordSize = 0;
                    _isRecord   = false;
                } finally {
                    m_Recorder = null;
                }
            }
        }
コード例 #10
0
ファイル: Program.cs プロジェクト: Grawp/opentx
        void Go()
        {
            int samplerate = 16000;
            int bits = 16;  // 8 or 16
            int channels = 1;  // 1 or 2

            filewriter = new WavFileWriter("out.wav", samplerate, bits, channels);

            WaveFormat fmt = new WaveFormat(samplerate, bits, channels);

            // devicenumber, wavformat, buffersize, callback
            int buffersize = 16384;
            WaveInRecorder rec = new WaveInRecorder(-1, fmt, buffersize, this.DataArrived);
            tmparray = new byte[buffersize];

            Console.WriteLine("Recording - press Enter to end");
            Console.ReadLine();
            rec.Close();

            filewriter.Close();

            Console.WriteLine("Bye");
        }
コード例 #11
0
ファイル: Recorder.cs プロジェクト: master20151121/mmoments
        static public string Stop()
        {
            WaveLib.WaveFormat m_Format = new WaveLib.WaveFormat(44100, 16, 2);
            if (m_Recorder != null)
            {
                try
                {
                    // chunksize is length of wave data and the header.
                    long chunksize = RecorderOutputStream.Length + 36;

                    // writing wave header and data
                    System.IO.BinaryWriter bw = new BinaryWriter(fs);

                    WriteChars(bw, "RIFF");
                    bw.Write((int)chunksize);
                    WriteChars(bw, "WAVEfmt ");
                    bw.Write((int)16);
                    bw.Write(m_Format.wFormatTag);
                    bw.Write(m_Format.nChannels);
                    bw.Write(m_Format.nSamplesPerSec);
                    bw.Write(m_Format.nAvgBytesPerSec);
                    bw.Write(m_Format.nBlockAlign);
                    bw.Write(m_Format.wBitsPerSample);
                    WriteChars(bw, "data");
                    bw.Write(RecorderOutputStream.Length);
                    bw.Write(RecorderOutputStream.ToArray());
                    bw.Close();
                    fs.Close();
                    m_Recorder.Dispose();
                }
                finally
                {
                    m_Recorder = null;
                }
            }
            return filename_;
        }
コード例 #12
0
ファイル: Recorder.cs プロジェクト: master20151121/mmoments
        static public void Start()
        {
            Stop();
            try
            {
                String filename = "recording";
                String ending = ".wav";

                // iterate the filename
                while (File.Exists(filename + counter + ending))
                {
                    counter++;
                }

                filename_ = filename + counter.ToString() + ending;
                // create a new file and wave recorder
                fs = new FileStream(filename + counter + ending, System.IO.FileMode.Create);
                RecorderOutputStream = new MemoryStream();
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(44100, 16, 2);
                m_Recorder = new WaveLib.WaveInRecorder(-1, fmt, 16384, 3, new WaveLib.BufferDoneEventHandler(DataArrived));

            }
            catch
            {
                Stop();
                throw;
            }
        }
コード例 #13
0
        private void ProcessBuffer(ref Array inpBuf,
                                   ref int segmentID,
                                   ref int cEndPosSec,
                                   ref int cStartPosSec)
        {
            /* Read the initial time. */
            startTime = DateTime.Now;
            
            bufSize = (short)(bufferSize - 1);
            processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf,
                                                                    ref bufSize,
                                                                    ref emoValsArray,
                                                                    ref aIres,
                                                                    ref bStr,
                                                                    ref testbuf,
                                                                    ref testBufLeng,
                                                                    ref brderS);

            cEndPosSec += 2;
            /* If Analysis is ready */
            if (processBufferResult == NMS_PROCESS_ANALYSISREADY)
            {
                silenceCount = 0;        
                emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres);
                String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres);
                Console.WriteLine("Features extracted!");
                Console.WriteLine(fvStr);

                string[] lines = System.IO.File.ReadAllLines("FeatureVector.arff");
                Console.WriteLine(lines.Length);
                lines[23] = fvStr;
                System.IO.File.WriteAllLines("FeatureVector.arff", lines);

                //Run command prompt command
                //string strCmdText;
                //strCmdText = "/C java -cp weka.jar weka.classifiers.functions.Logistic -T FeatureVector.arff -l logistic_dmd.model -p 0";
                //System.Diagnostics.Process.Start(@"C:\Windows\System32\cmd.exe", strCmdText);
                // Start the child process.
                System.Diagnostics.Process p = new System.Diagnostics.Process();
                // Redirect the output stream of the child process.
                p.StartInfo.UseShellExecute = false;
                p.StartInfo.RedirectStandardOutput = true;
                p.StartInfo.FileName = "classify.bat";
                p.Start();
                // Do not wait for the child process to exit before
                // reading to the end of its redirected stream.
                // p.WaitForExit();
                // Read the output stream first and then wait.
                string output = p.StandardOutput.ReadToEnd();
                p.WaitForExit();
                System.Console.WriteLine(output);
                if (output.Contains("angry"))
                {
                    Console.WriteLine("Angry");
                    Console.WriteLine("Valence: -0.40");
                    Console.WriteLine("Arousal: 0.79");
                }
                else if (output.Contains("neutral"))
                {
                    Console.WriteLine("Neutral");
                    Console.WriteLine("Valence: 0.0");
                    Console.WriteLine("Arousal: 0.0");
                }
                else if (output.Contains("sadness"))
                {
                    Console.WriteLine("Sad");
                    Console.WriteLine("Valence: -0.81");
                    Console.WriteLine("Arousal: -0.40");
                }
                else if (output.Contains("happy"))
                {
                    Console.WriteLine("Happy");
                    Console.WriteLine("Valence: 0.89");
                    Console.WriteLine("Arousal: 0.17");
                }

                tw = File.AppendText("VoiceAnalysisResults.txt");
                tw.WriteLine("---------- Start Segment ----------");
                tw.Write("Energy                ");
                tw.WriteLine(emoVals.Energy);
                tw.Write("Content               ");
                tw.WriteLine(emoVals.content);
                tw.Write("Upset                 ");
                tw.WriteLine(emoVals.upset);
                tw.Write("Angry                 ");
                tw.WriteLine(emoVals.angry);
                tw.Write("Stress                ");
                tw.WriteLine(emoVals.stress);
                tw.Write("Concentration         ");
                tw.WriteLine(emoVals.concentration_level);
                tw.Write("Intensive Thinking    ");
                tw.WriteLine(emoVals.intensive_thinking);
                tw.Write("SAF                   ");
                tw.WriteLine(emoVals.saf);
                tw.Write("Excitement            ");
                tw.WriteLine(emoVals.excitement);
                tw.Write("Atmosphere            ");
                tw.WriteLine(emoVals.Atmos);
                tw.Write("EmoCog Ratio          ");
                tw.WriteLine(emoVals.EmoCogRatio);
                tw.Write("Embarrassment         ");
                tw.WriteLine(emoVals.embarrassment);
                tw.Write("Hesitation            ");
                tw.WriteLine(emoVals.hesitation);
                tw.Write("Imagination           ");
                tw.WriteLine(emoVals.imagination_activity);
                tw.Write("Extreme State         ");
                tw.WriteLine(emoVals.extremeState);
                tw.Write("Uncertainty           ");
                tw.WriteLine(emoVals.uncertainty);
                tw.Write("Brain Power           ");
                tw.WriteLine(emoVals.BrainPower);
                tw.Write("Max Volume            ");
                tw.WriteLine(emoVals.maxAmpVol);
                tw.Write("Voice Energy          ");
                tw.WriteLine(emoVals.VoiceEnergy);
                tw.WriteLine("---------- End Segment ----------");
                tw.Close();
                nmsCOMcallee.nmsQA_CollectAgentScoreData();

                if (segmentID >= lioNetResultsCache.Count)
                {
                    for (int i = 0; i <= 100; i++)
                        lioNetResultsCache.Add(string.Empty);
                }

                lioNetResultsCache[segmentID] = bStr;

                nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec);
                nmsCOMcallee.nmsSD_LogData();
                nmsCOMcallee.nmsCollectProfiler();
                cStartPosSec = 0;
                segmentID ++;

                /* Read the time. */
                stopTime = DateTime.Now;
                ProcDuration += stopTime - startTime;
            }
            /* Voice Detected */
            else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0))
            {
                
                cEndPosSec -= 2;
                cStartPosSec = cEndPosSec;
            }
            /* The QA5Core fail to identify the buffer */
            else if (processBufferResult == -1 && count < countNum)
            {
                cStartPosSec = 0;
            }
            /* Silence Detected*/
            else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum)
            {
                cStartPosSec = 0;
                silenceCount++;
            }
            /* Reset silenceCount if no voice was detected */
            if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0;
            /* Return the Dominant Emotion after two non sequential silences */
            if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE)
            {
                /* Read the end time */
                DateTime stoptime = DateTime.Now;

                /* The processing duration*/
                Console.Write("Processing Time: "); Console.WriteLine(ProcDuration);
                ProcDuration = TimeSpan.Zero;
                
            }
            /* If Program is running with determined time */
            if ((count == countNum))
            {
                cEndPosSec = 0;
                cStartPosSec = 0;
                /* Stop audio output */
                if ((m_Player != null)) 
                    try
                    {
                        m_Player.Dispose();
                    }
                    finally
                    {
                        m_Player = null;
                    }

                /* Stop audio input */
                if ((m_Recorder != null))
                    try
                    {
                        m_Recorder.Dispose();
                    }
                    finally
                    {
                        m_Recorder = null;
                    }

                /* Clear All Pending Data */
                m_Fifo.Flush(); 
            }

            /* Running during defined time*/
            count++;

        }       
コード例 #14
0
ファイル: nmsFunctions.cs プロジェクト: thealexhong/starship
        /// <summary>
        /// Initialize Functions Above, Start Functions Below
        /// </summary>
        public void Start()
        {
            Console.ForegroundColor = ConsoleColor.White;

            WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(sampleRate, definition, 1);
            m_Recorder = new WaveLib.WaveInRecorder(-1, fmt, sampleRate*segmentLength*(definition/8), 1, new WaveLib.BufferDoneEventHandler(DataArrived));

            log = File.CreateText("LogFeatureVector.csv");
            log.WriteLine("Time,Angry,ConcentrationLevel,Embarrassment,Excitement,Hesitation,ImaginationActivity,IntensiveThinking,Content,SAF,Upset,ExtremeState,Stress,Uncertainty,Energy,BrainPower,EmoCogRatio,MaxAmpVol,VoiceEnergy,Unknown,Valence,Arousal");
            log.Close();

            v_arff = File.CreateText("FeatureVectorValence.arff");
            v_arff.WriteLine("@relation valence\n");
            v_arff.WriteLine("@attribute Angry numeric");
            v_arff.WriteLine("@attribute ConcentrationLevel numeric");
            v_arff.WriteLine("@attribute Embarrassment numeric");
            v_arff.WriteLine("@attribute Excitement numeric");
            v_arff.WriteLine("@attribute Hesitation numeric");
            v_arff.WriteLine("@attribute ImaginationActivity numeric");
            v_arff.WriteLine("@attribute IntensiveThinking numeric");
            v_arff.WriteLine("@attribute Content numeric");
            v_arff.WriteLine("@attribute SAF numeric");
            v_arff.WriteLine("@attribute Upset numeric");
            v_arff.WriteLine("@attribute ExtremeState numeric");
            v_arff.WriteLine("@attribute Stress numeric");
            v_arff.WriteLine("@attribute Uncertainty numeric");
            v_arff.WriteLine("@attribute Energy numeric");
            v_arff.WriteLine("@attribute BrainPower numeric");
            v_arff.WriteLine("@attribute EmoCogRatio numeric");
            v_arff.WriteLine("@attribute MaxAmpVol numeric");
            v_arff.WriteLine("@attribute VoiceEnergy numeric");
            v_arff.WriteLine("@attribute valence {-2, -1, 0, 1, 2}\n");
            v_arff.WriteLine("@data\n");
            v_arff.Close();

            a_arff = File.CreateText("FeatureVectorArousal.arff");
            a_arff.WriteLine("@relation arousal\n");
            a_arff.WriteLine("@attribute Angry numeric");
            a_arff.WriteLine("@attribute ConcentrationLevel numeric");
            a_arff.WriteLine("@attribute Embarrassment numeric");
            a_arff.WriteLine("@attribute Excitement numeric");
            a_arff.WriteLine("@attribute Hesitation numeric");
            a_arff.WriteLine("@attribute ImaginationActivity numeric");
            a_arff.WriteLine("@attribute IntensiveThinking numeric");
            a_arff.WriteLine("@attribute Content numeric");
            a_arff.WriteLine("@attribute SAF numeric");
            a_arff.WriteLine("@attribute Upset numeric");
            a_arff.WriteLine("@attribute ExtremeState numeric");
            a_arff.WriteLine("@attribute Stress numeric");
            a_arff.WriteLine("@attribute Uncertainty numeric");
            a_arff.WriteLine("@attribute Energy numeric");
            a_arff.WriteLine("@attribute BrainPower numeric");
            a_arff.WriteLine("@attribute EmoCogRatio numeric");
            a_arff.WriteLine("@attribute MaxAmpVol numeric");
            a_arff.WriteLine("@attribute VoiceEnergy numeric");
            a_arff.WriteLine("@attribute arousal {-2, -1, 0, 1, 2}\n");
            a_arff.WriteLine("@data\n");
            a_arff.Close();

            voiceOutput = File.CreateText(@"..\..\voiceOutput.txt");
            voiceOutput.Close();

            Console.WriteLine("Recording...");
        }
コード例 #15
0
ファイル: nmsFunctions.cs プロジェクト: thealexhong/starship
        private void ProcessBuffer(ref Array inpBuf,
                                   ref int segmentID,
                                   ref int cEndPosSec,
                                   ref int cStartPosSec)
        {
            /* Read the initial time. */
            startTime = DateTime.Now;

            //bufSize = (short)(bufferSize - 1);
            bufSize = bufferSize;
            Console.WriteLine("Size of the buffer is: " + bufSize.ToString());
            processBufferResult = nmsCOMcallee.nmsProcessBuffer(ref inpBuf,
                                                                    ref bufSize,
                                                                    ref emoValsArray,
                                                                    ref aIres,
                                                                    ref bStr,
                                                                    ref testbuf,
                                                                    ref testBufLeng,
                                                                    ref brderS);

            cEndPosSec += 2;
            /* If Analysis is ready */
            Console.WriteLine("Sound captured and processed");
            Console.WriteLine(processBufferResult);
            if (processBufferResult == NMS_PROCESS_ANALYSISREADY)
            {
                silenceCount = 0;
                emoVals = CopyValuesFromEmoArrayIntoEmotionsStructure(emoValsArray, aIres);
                String fvStr = CopyValuesFromEmoArrayIntoString(emoValsArray, aIres);
                Console.WriteLine("Features extracted!");
                Console.WriteLine(fvStr);

                string[] lines = System.IO.File.ReadAllLines("FeatureVectorValence.arff");
                lines[23] = fvStr;
                System.IO.File.WriteAllLines("FeatureVectorValence.arff", lines);
                lines = System.IO.File.ReadAllLines("FeatureVectorArousal.arff");
                lines[23] = fvStr;
                System.IO.File.WriteAllLines("FeatureVectorArousal.arff", lines);

                //Run command prompt command
                // Start the child process.
                System.Diagnostics.Process p = new System.Diagnostics.Process();
                // Redirect the output stream of the child process.
                p.StartInfo.UseShellExecute = false;
                p.StartInfo.RedirectStandardOutput = true;
                p.StartInfo.FileName = "classify.bat";
                p.Start();
                // Do not wait for the child process to exit before
                // reading to the end of its redirected stream.
                // p.WaitForExit();
                // Read the output stream first and then wait.
                string output = p.StandardOutput.ReadToEnd();
                p.WaitForExit();
                p.Close();
                //System.Console.WriteLine(output);
                string[] tokens = output.Split(':');
                //for(int i=0; i<tokens.Length; i++)
                //{
                //    Console.WriteLine("#"+i+"-"+tokens[i]);
                //}

                //Parse out the valence and arousal
                if(tokens[3][0] == '-')
                    valence = "-" + tokens[3][1].ToString();
                else
                    valence = tokens[3][0].ToString();
                if (tokens[6][0] == '-')
                    arousal = "-" + tokens[6][1].ToString();
                else
                    arousal = tokens[6][0].ToString();

                Console.WriteLine("Valence: " + valence);
                Console.WriteLine("Arousal: " + arousal);

                log = File.AppendText("LogFeatureVector.csv");
                log.WriteLine(dataArrivedTime + "," + fvStr + "," + valence + "," + arousal);
                log.Close();

                voiceOutput = File.AppendText("..\\..\\voiceOutput.txt");
                voiceOutput.WriteLine(valence.ToString() + "," + arousal.ToString());
                voiceOutput.Close();

                nmsCOMcallee.nmsQA_CollectAgentScoreData();

                if (segmentID >= lioNetResultsCache.Count)
                {
                    for (int i = 0; i <= 100; i++)
                        lioNetResultsCache.Add(string.Empty);
                }

                lioNetResultsCache[segmentID] = bStr;

                nmsCOMcallee.nmsQA_Logdata(ref segmentID, ref cStartPosSec, ref cEndPosSec);
                nmsCOMcallee.nmsSD_LogData();
                nmsCOMcallee.nmsCollectProfiler();
                cStartPosSec = 0;
                segmentID ++;

                /* Read the time. */
                stopTime = DateTime.Now;
                ProcDuration += stopTime - startTime;
            }
            /* Voice Detected */
            else if (processBufferResult == NMS_PROCESS_VOICEDETECTED && count < countNum && (cStartPosSec == 0))
            {

                cEndPosSec -= 2;
                cStartPosSec = cEndPosSec;
            }
            /* The QA5Core fail to identify the buffer */
            else if (processBufferResult == -1 && count < countNum)
            {
                cStartPosSec = 0;
            }
            /* Silence Detected*/
            else if (processBufferResult == NMS_PROCESS_SILENCE && count < countNum)
            {
                cStartPosSec = 0;
                silenceCount++;
            }
            /* Reset silenceCount if no voice was detected */
            if (shortTermCount == 0 && silenceCount == 2) silenceCount = 0;
            /* Return the Dominant Emotion after two non sequential silences */
            if (silenceCount == 2 && processBufferResult == NMS_PROCESS_SILENCE)
            {
                /* Read the end time */
                DateTime stoptime = DateTime.Now;

                /* The processing duration*/
                Console.Write("Processing Time: "); Console.WriteLine(ProcDuration);
                ProcDuration = TimeSpan.Zero;

            }
            /* If Program is running with determined time */
            if (count == countNum)
            {
                cEndPosSec = 0;
                cStartPosSec = 0;
                /* Stop audio output */
                if ((m_Player != null))
                    try
                    {
                        m_Player.Dispose();
                    }
                    finally
                    {
                        m_Player = null;
                    }
                /* Stop audio input */
                if ((m_Recorder != null))
                    try
                    {
                        m_Recorder.Dispose();
                    }
                    finally
                    {
                        m_Recorder = null;
                    }
                /* Clear All Pending Data */
                m_Fifo.Flush();
            }
            /* Running during defined time*/
            count++;
            while (File.Exists(@"..\..\voiceOutput.txt"))
            {
                Console.WriteLine("Waiting for Alex...");
                Thread.Sleep(100);
            }
            Console.WriteLine("Start timer...");
            oldRecordedTime = Int32.Parse(DateTime.Now.ToString("HHmmssfff"));
        }
コード例 #16
0
        /// <summary>
        /// Initialize Functions Above, Start Functions Below
        /// </summary>

        public void Start() //int *R, int* H //jc
        {
            /* Creates Logistic model */
            Model.setMaxIts(2000);
            Model.buildClassifier();
            Console.ForegroundColor = ConsoleColor.White;
            WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(11025, 16, 1);
            //m_Player = new WaveLib.WaveOutPlayer(-1, fmt, 11025, 1, new WaveLib.BufferFillEventHandler(Filler));
            m_Recorder = new WaveLib.WaveInRecorder(-1, fmt, 11025, 1, new WaveLib.BufferDoneEventHandler(DataArrived));
            Console.WriteLine();
            tw = File.CreateText("VoiceAnalysisResults.txt");
            tw.Close();
            arff = File.CreateText("FeatureVector.arff");
            arff.WriteLine("@relation newfeaturevector\n");

            arff.WriteLine("@attribute Angry numeric");
            arff.WriteLine("@attribute ConcentrationLevel numeric");
            arff.WriteLine("@attribute Embarrassment numeric");
            arff.WriteLine("@attribute Excitement numeric");
            arff.WriteLine("@attribute Hesitation numeric");
            arff.WriteLine("@attribute ImaginationActivity numeric");
            arff.WriteLine("@attribute IntensiveThinking numeric");
            arff.WriteLine("@attribute Content numeric");
            arff.WriteLine("@attribute SAF numeric");
            arff.WriteLine("@attribute Upset numeric");
            arff.WriteLine("@attribute ExtremeState numeric");
            arff.WriteLine("@attribute Stress numeric");
            arff.WriteLine("@attribute Uncertainty numeric");
            arff.WriteLine("@attribute Energy numeric");
            arff.WriteLine("@attribute BrainPower numeric");
            arff.WriteLine("@attribute EmoCogRatio numeric");
            arff.WriteLine("@attribute MaxAmpVol numeric");
            arff.WriteLine("@attribute VoiceEnergy numeric");
            arff.WriteLine("@attribute emotion {neutral, happy, sadness, angry}\n");
            arff.WriteLine("@data\n");
            arff.Close();
            Console.WriteLine("Recording...");
        }
コード例 #17
0
ファイル: WaveIn.cs プロジェクト: j1top/opentx-1
 public void Record()
 {
     m_RecordEvent.Reset();
     WaveInRecorder.ThrowOnError(WaveNative.waveInAddBuffer(m_WaveIn, ref m_Header, Marshal.SizeOf(m_Header)));
 }