private void OnPCMRead(float[] i_data) { Array.Clear(i_data, 0, i_data.Length); ptrRead = Mathf.Max(ptrWrite - size, ptrRead); int _samples = Mathf.Min(ptrWrite - ptrRead, i_data.Length); int _round2 = (ptrRead + _samples) / size; int _round1 = ptrRead / size; if (ptrRead == ptrWrite) { Array.Clear(i_data, 0, i_data.Length); this.PlayEOF = true; } else if (_round2 != _round1) { int _offset0 = ptrRead % size; int _offset1 = (ptrRead + _samples) % size; Buffer.BlockCopy(buffer, _offset0 * 4, i_data, 0, (size - _offset0) * 4); Buffer.BlockCopy(buffer, 0, i_data, (size - _offset0) * 4, _offset1); } else { Buffer.BlockCopy(buffer, ptrRead % size * 4, i_data, 0, _samples * 4); } ptrRead += _samples; AudioProcessing.SetBuffData(i_data); }
static void TranscribeVideo( SampleVideo sample, // sample video to use string fixedTags, // file in which to save the fixed transcription string audio, // file in which to save the extracted audio bool useSmallSample, // if true, use a small sample of the video/audio bool useAudioFileAlreadyInCloud, // if true, use prior audio in cloud if it exists string rawTranscription) // file in which to save the raw transcription { string videofilePath = sample.filepath; string objectName = sample.objectname; RepeatedField <string> phrases = sample.phrases; AudioProcessing audioProcessing = new AudioProcessing(); string googleCloudBucketName = "govmeeting-transcribe"; TranscribeParameters transParams = new TranscribeParameters { audiofilePath = audio, objectName = objectName, GoogleCloudBucketName = googleCloudBucketName, useAudioFileAlreadyInCloud = useAudioFileAlreadyInCloud, language = "en", MinSpeakerCount = 2, MaxSpeakerCount = 6, phrases = phrases }; // Clean up from last run File.Delete(audio); File.Delete(fixedTags); if (useSmallSample) { string shortVideoFile = videofilePath.Replace(".mp4", "-3min.mp4"); //SplitRecording splitRecording = new SplitRecording(); audioProcessing.ExtractPart(videofilePath, shortVideoFile, 60, 3 * 60); videofilePath = shortVideoFile; } audioProcessing.Extract(videofilePath, audio); GMFileAccess.SetGoogleCredentialsEnvironmentVariable(); // Transcribe the audio file TranscribeAudio transcribe = new TranscribeAudio(); Transcribed_Dto response = transcribe.TranscribeAudioFile(transParams, rawTranscription); string responseString = JsonConvert.SerializeObject(response, Formatting.Indented); File.WriteAllText(fixedTags, responseString); WriteCopyOfResponse(responseString, fixedTags); }
public void Process(string videoFile, string meetingFolder, string language) { /////// Copy video to meeting folder ///////// AudioProcessing audioProcessing = new AudioProcessing(); string videofileCopy = Path.Combine(meetingFolder, "video.mp4"); // #### If MaxRecordingSize is not zero, we shorted the recording. #### if (config.MaxRecordingSize == 0) { File.Copy(videoFile, videofileCopy); } else { audioProcessing.ExtractPart(videoFile, videofileCopy, 0, config.MaxRecordingSize); } /////// Extract the audio. //////////////////////// ExtractAudio extract = new ExtractAudio(); string audioFile = Path.Combine(meetingFolder, "audio.flac"); audioProcessing.Extract(videofileCopy, audioFile); /////// Transcribe the audio file. ///////////// // We want the object name in the cloud to be the original video file name with ".flac" extension. string objectName = Path.GetFileNameWithoutExtension(videoFile) + ".flac"; TranscribeParameters transParams = new TranscribeParameters { audiofilePath = audioFile, objectName = objectName, GoogleCloudBucketName = config.GoogleCloudBucketName, useAudioFileAlreadyInCloud = config.UseAudioFileAlreadyInCloud, language = language, MinSpeakerCount = 2, MaxSpeakerCount = 6 // TODO Add "phrases" field: names of officers }; Transcribed_Dto transcript = transcribeAudio.TranscribeAudioFile(transParams); string stringValue = JsonConvert.SerializeObject(transcript, Formatting.Indented); string outputJsonFile = Path.Combine(meetingFolder, "transcribed.json"); File.WriteAllText(outputJsonFile, stringValue); }
public void TestMoveToCloudAndTranscribe(string language) { AudioProcessing audioProcessing = new AudioProcessing(); string baseName = "USA_ME_LincolnCounty_BoothbayHarbor_Selectmen_EN_2017-02-15"; string videoFile = Path.Combine(config.TestdataPath, baseName + ".mp4"); string outputFolder = Path.Combine(config.TestdataPath, "TestMoveToCloudAndTranscribe"); GMFileAccess.DeleteAndCreateDirectory(outputFolder); string outputBasePath = Path.Combine(outputFolder, baseName); string shortFile = outputBasePath + ".mp4"; string audioFile = outputBasePath + ".flac"; string jsonFile = outputBasePath + ".json"; // Extract short version //SplitRecording splitRecording = new SplitRecording(); audioProcessing.ExtractPart(videoFile, shortFile, 60, 4 * 60); // Extract audio. audioProcessing.Extract(shortFile, audioFile); // Transcribe //TranscribeAudio ta = new TranscribeAudio(_config); //TranscribeResultOrig response = new TranscribeResultOrig(); Transcribed_Dto response = new Transcribed_Dto(); // TODO - signature of TranscribeInCloud has changed. // response = transcribe.MoveToCloudAndTranscribe(audioFile, baseName + ".flac", config.GoogleCloudBucketName, config.UseAudioFileAlreadyInCloud, language); string stringValue = JsonConvert.SerializeObject(response, Formatting.Indented); File.WriteAllText(outputBasePath + "-rsp.json", stringValue); // Modify Transcript json format //ModifyTranscriptJson_1 mt = new ModifyTranscriptJson_1(); ModifyTranscriptJson mt = new ModifyTranscriptJson(); //FixasrViewModel fixasr = mt.Modify(response); EditMeeting_Dto meetingEditDto = mt.Modify(response); // Create JSON file //stringValue = JsonConvert.SerializeObject(fixasr, Formatting.Indented); stringValue = JsonConvert.SerializeObject(meetingEditDto, Formatting.Indented); File.WriteAllText(jsonFile, stringValue); }
public void SendVoidMessage(byte[] data, int len) { float[] enbuff = AudioProcessing.DeFrames(data); for (int i = 0; i < enbuff.Length; i++) { enbuff [i] *= 3; } // Array.Copy (enbuff,0,_buff,_index,enbuff.Length); // _index += enbuff.Length; // if (_index >= 80000) { // _index = 0; // _clip.SetData (_buff,0); // } _stream.Input(enbuff); // _clip.SetData (enbuff,0); }
public static async Task <MediaCapture> Init(MediaCategory mediaCategory, AudioProcessing audioProcessingType) { await MicrophoneHandler.EnableMicrophone(); var mediaCapture = new MediaCapture(); var devices = await DeviceInformation.FindAllAsync(DeviceClass.AudioCapture); var device = devices[0]; var microphoneId = device.Id; await mediaCapture.InitializeAsync(new MediaCaptureInitializationSettings { MediaCategory = mediaCategory, StreamingCaptureMode = StreamingCaptureMode.Audio, AudioDeviceId = microphoneId, AudioProcessing = audioProcessingType }); return(mediaCapture); }
public int Split(string inputFile, string outputFolder, int segmentSize, int segmentOverlap) { //RunCommand runCommand = new RunCommand(); AudioProcessing audioProcessing = new AudioProcessing(); Directory.CreateDirectory(outputFolder); int videoLength = audioProcessing.RecordingLength(inputFile); int numberOfSections = videoLength / segmentSize; int mod = videoLength % segmentSize; // If the last segment is greater than 1/2 segment size, put it in it's own segment. if (mod > (segmentSize / 2)) { numberOfSections++; } //string inputFilename = Path.GetFileNameWithoutExtension(inputFile); // Create subfolders part01, part02, part03, etc for (int x = 1; x <= numberOfSections; x++) { int start = (x - 1) * segmentSize; string segmentFolder = Path.Combine(outputFolder, $"part{x:D2}"); Directory.CreateDirectory(segmentFolder); string outputFile = Path.Combine(segmentFolder, "ToFix.mp4"); if (x < numberOfSections) { audioProcessing.ExtractPart(inputFile, outputFile, start, segmentSize + segmentOverlap); } else { audioProcessing.ExtractPart(inputFile, outputFile, start); // extract to end } } return(numberOfSections); }
public static string CopyTestData(string testfilesPath, string datafilesPath, bool deleteProcessing) { if (!Directory.Exists(datafilesPath)) { Directory.CreateDirectory(datafilesPath); Directory.CreateDirectory(datafilesPath + "/RECEIVED"); Directory.CreateDirectory(datafilesPath + "/PROCESSING"); Directory.CreateDirectory(datafilesPath + "/COMPLETED"); } else { if (deleteProcessing) { GMFileAccess.DeleteDirectoryContents(Path.Combine(datafilesPath, "PROCESSING")); } } if (!Directory.Exists(testfilesPath)) { Directory.CreateDirectory(testfilesPath); return("TESTDATA folder missing"); } // These are the test files that we will copy. string[] files = new string[] { // This meeting exists in MeetingRepository_Stub.cs // as meeting #4 and status = "Received" "USA_PA_Philadelphia_Philadelphia_CityCouncil_en_2017-12-07.pdf", // This meeting exists in MeetingRepository_Stub.cs // as meeting #5 and status = "Received" "USA_ME_LincolnCounty_BoothbayHarbor_Selectmen_en_2017-01-09.mp4", // This meeting is not present in MeetingRepository_Stub.cs. // ProcessIncomingFiles in WorkflowApp should recognize that fact, // and create a new meeting record for this file. "USA_ME_LincolnCounty_BoothbayHarbor_Selectmen_en_2017-02-15.mp4" }; if (files.Length == 0) { return("TESTDATA folder empty"); } foreach (string file in files) { string source = Path.Combine(testfilesPath, file); if (File.Exists(source)) { string destination = Path.Combine(datafilesPath, "RECEIVED", file); if (!File.Exists(destination)) { // For testing, use only the first 9 minutes of the video recordings. if (file.EndsWith(".mp4")) { //SplitRecording splitRecording = new SplitRecording(); AudioProcessing audioProcessing = new AudioProcessing(); audioProcessing.ExtractPart(source, destination, 0, 540); // 9 * 60 sec. } else { File.Copy(source, destination); } } } } return(null); }
public WorkSegments() { audioProcessing = new AudioProcessing(); }