public string GetTimecodesFileName(string mediaFileName) { if (string.IsNullOrWhiteSpace(mediaFileName)) { throw new ArgumentException("No filename was provided!", nameof(mediaFileName)); } // Get the cache filename string cacheFileName = $"{mediaFileName}.ffindex".GetNewFileName(); // Get the timecodes filename string timeCodesFileName = $"{mediaFileName}.tcodes.txt".GetNewFileName(); // Get the AviSynth script string scriptFileName = CreateAviSynthTimecodesScript(mediaFileName, cacheFileName, timeCodesFileName); // Open the AviSynth Script to generate the timecodes using (_aviSynthFileService.OpenAviSynthScriptFile(scriptFileName)) { } // Delete temporary files File.Delete(cacheFileName); File.Delete(scriptFileName); // Return the timecodes file return(timeCodesFileName); }
public int Encode(MediaAnalyzeInfo mediaAnalyzeInfo, IAudioEncoder audioEncoder, IAudioEncoderSettings settings, Action <string> logAction, Action <string> progressAction, out string outputFileName) { // Get AviSynth script AviSynthScriptService aviSynthScriptService = ServiceFactory.GetService <AviSynthScriptService>(); // Open the AviSynth script AviSynthFileService aviSynthFileService = ServiceFactory.GetService <AviSynthFileService>(); // Get the AviSynth audio script string avsScript = aviSynthScriptService.CreateAviSynthAudioScript(mediaAnalyzeInfo); // Try to open the Avs Script IAviSynthAudioSourceService audioSourceService = null; while (true) { try { using (var avsFile = aviSynthFileService.OpenAviSynthScriptFile(avsScript)) { break; } } catch (Exception) { // Check if we already tried again if (audioSourceService != null) { throw; } // In case it fails, try to create audio AviSynth script with the DirectShowSource audioSourceService = ServiceFactory.GetService <AviSynthDirectShowAudioSourceService>(); avsScript = aviSynthScriptService.CreateAviSynthAudioScript(mediaAnalyzeInfo, audioSourceService: audioSourceService); continue; } } // Determine the output filename outputFileName = $"{mediaAnalyzeInfo.Filename}.reencode.{settings.FileExtension}".GetNewFileName(); // Open the AviSynth Script to generate the timecodes using (var avsFile = aviSynthFileService.OpenAviSynthScriptFile(avsScript)) { // Check for audio existence if (avsFile.Clip.AudioSamplesCount == 0) { throw new ApplicationException("Can't find audio stream!"); } // Calculate Size in Bytes long totalSizeInBytes = avsFile.Clip.AudioSamplesCount * avsFile.Clip.AudioBytesPerSample * avsFile.Clip.AudioChannelsCount; // Define format type tag // 1 for int, 3 for float int formatTypeTag = 1; if (avsFile.Clip.AudioSampleType == AvsAudioSampleType.FLOAT) { formatTypeTag = 3; } using (var process = new Process()) { // Create the ProcessStartInfo object ProcessStartInfo info = new ProcessStartInfo { // Command line arguments, to be passed to encoder // {0} means output file name // {1} means samplerate in Hz // {2} means bits per sample // {3} means channel count // {4} means samplecount // {5} means size in bytes // {6} means format (1 int, 3 float) // {7} means target bitrate Arguments = string.Format( audioEncoder.ExecutableArguments, outputFileName, avsFile.Clip.AudioSampleRate, avsFile.Clip.AudioBitsPerSample, avsFile.Clip.AudioChannelsCount, avsFile.Clip.AudioSamplesCount, totalSizeInBytes, formatTypeTag, mediaAnalyzeInfo.TargetAudioBitrate ), FileName = audioEncoder.EncoderFileName, UseShellExecute = false, RedirectStandardInput = true, RedirectStandardOutput = true, RedirectStandardError = true, CreateNoWindow = true }; process.StartInfo = info; Debug.WriteLine(info.Arguments); // Start the process process.Start(); // TODO: Revisit that //process.PriorityClass = m_processPriority; // Read the Standard output character by character Task.Run(() => process.ReadStreamPerCharacter(true, new Action <Process, string>((p, str) => Debug.WriteLine(str)))); // Read the Standard error character by character Task.Run(() => process.ReadStreamPerCharacter(false, new Action <Process, string>((p, str) => Debug.WriteLine(str)))); try { using (Stream processInputStream = process.StandardInput.BaseStream) { // Check if we need to write WAV Header if (audioEncoder.WriteHeader) { logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Writing header data to encoder's StdIn..."); WriteHeader(audioEncoder.HeaderType, processInputStream, avsFile, totalSizeInBytes, settings.ChannelMask, formatTypeTag); } logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Writing PCM data to encoder's StdIn..."); // Calculate the frame buffer total size int frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * avsFile.Clip.AudioChannelsCount * avsFile.Clip.AudioBitsPerSample / 8; // Allocate the frame buffer byte[] frameBuffer = new byte[frameBufferTotalSize]; // Get the handle for the frame buffer GCHandle bufferHandle = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned); try { // Set a current frame sample indicator int currentFrameSample = 0; // Start passing the audio frames to the encoder's standard input stream while (currentFrameSample < avsFile.Clip.AudioSamplesCount) { // Check for unexpected process exit if (process != null && process.HasExited) { throw new ApplicationException($"Unexpected encoder termination with exit code: {process.ExitCode}"); } // Calculate how many frame samples to read int framesSamplesToRead = Math.Min((int)(avsFile.Clip.AudioSamplesCount - currentFrameSample), MAX_SAMPLES_PER_ONCE); int bytesRead = framesSamplesToRead * avsFile.Clip.AudioBytesPerSample * avsFile.Clip.AudioChannelsCount; // Read the audio frame samples and copy them to the frame buffer avsFile.ReadAudioSamples(bufferHandle.AddrOfPinnedObject(), currentFrameSample, framesSamplesToRead); // Calculate the current progress double progress = ((double)currentFrameSample / (double)avsFile.Clip.AudioSamplesCount) * 100.0; progressAction?.Invoke($"Progress {progress:#0.00}%"); // Write the frame samples to the encoder's standard input stream processInputStream.Write(frameBuffer, 0, bytesRead); processInputStream.Flush(); // Advance the current frame sample indicator currentFrameSample += framesSamplesToRead; // Signal the OS to run other threads in our time slice Thread.Yield(); } } finally { // Free the frame buffer handle bufferHandle.Free(); } } if (process != null) { logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Finalizing encoder"); // Wait for the process to exit process.WaitForExit(); // Debug write the exit code Debug.WriteLine($"Exit code: {process.ExitCode}"); } } finally { // Sanity check for non exited process if (process != null && !process.HasExited) { // Kill the process process.Kill(); // Wait for the process to exit process.WaitForExit(); // Debug write the exit code Debug.WriteLine($"Exit code: {process.ExitCode}"); } } // Return the process exit code return(process?.ExitCode ?? 0); } } }