public static void Init(this IAudioEncoderSettings settings, AudioPCMConfig pcm = null)
 {
     // Iterate through each property and call ResetValue()
     foreach (PropertyDescriptor property in TypeDescriptor.GetProperties(settings))
     {
         property.ResetValue(settings);
     }
     settings.EncoderMode = settings.DefaultMode;
     settings.PCM         = pcm;
 }
 public static void SetEncoderModeIndex(this IAudioEncoderSettings settings, int value)
 {
     string[] modes = settings.SupportedModes.Split(' ');
     if (modes.Length == 0 && value < 0)
     {
         return;
     }
     if (value < 0 || value >= modes.Length)
     {
         throw new IndexOutOfRangeException();
     }
     settings.EncoderMode = modes[value];
 }
        public static bool HasBrowsableAttributes(this IAudioEncoderSettings settings)
        {
            bool hasBrowsable = false;

            foreach (PropertyDescriptor property in TypeDescriptor.GetProperties(settings))
            {
                bool isBrowsable = true;
                foreach (var attribute in property.Attributes)
                {
                    var browsable = attribute as BrowsableAttribute;
                    isBrowsable &= browsable == null || browsable.Browsable;
                }
                hasBrowsable |= isBrowsable;
            }
            return(hasBrowsable);
        }
 public static int GuessEncoderMode(this IAudioEncoderSettings settings)
 {
     // return new List<string>(settings.SupportedModes.Split(' ')).FindIndex(m => settings.HasDefaultValuesForMode(m));
     string[] modes = settings.SupportedModes.Split(' ');
     if (modes == null || modes.Length < 1)
     {
         return(-1);
     }
     for (int i = 0; i < modes.Length; i++)
     {
         if (settings.HasDefaultValuesForMode(i))
         {
             return(i);
         }
     }
     return(-1);
 }
        public static bool HasDefaultValuesForMode(this IAudioEncoderSettings settings, int index)
        {
            bool res = true;

            foreach (PropertyDescriptor property in TypeDescriptor.GetProperties(settings))
            {
                foreach (var attribute in property.Attributes)
                {
                    if (attribute is DefaultValueForModeAttribute)
                    {
                        var defaultValueForMode = attribute as DefaultValueForModeAttribute;
                        res &= property.GetValue(settings).Equals(defaultValueForMode.m_values[index]);
                    }
                }
            }
            return(res);
        }
 public static void SetDefaultValuesForMode(this IAudioEncoderSettings settings)
 {
     foreach (PropertyDescriptor property in TypeDescriptor.GetProperties(settings))
     {
         if (!property.CanResetValue(settings))
         {
             foreach (var attribute in property.Attributes)
             {
                 if (attribute is DefaultValueForModeAttribute)
                 {
                     var defaultValueForMode = attribute as DefaultValueForModeAttribute;
                     property.SetValue(settings, defaultValueForMode.m_values[settings.GetEncoderModeIndex()]);
                 }
             }
         }
     }
 }
 public AudioEncoderSettingsViewModel(IAudioEncoderSettings settings)
 {
     this.Settings = settings;
 }
Exemplo n.º 8
0
        public int Encode(MediaAnalyzeInfo mediaAnalyzeInfo, IAudioEncoder audioEncoder, IAudioEncoderSettings settings, Action <string> logAction, Action <string> progressAction, out string outputFileName)
        {
            // Get AviSynth script
            AviSynthScriptService aviSynthScriptService = ServiceFactory.GetService <AviSynthScriptService>();

            // Open the AviSynth script
            AviSynthFileService aviSynthFileService = ServiceFactory.GetService <AviSynthFileService>();

            // Get the AviSynth audio script
            string avsScript = aviSynthScriptService.CreateAviSynthAudioScript(mediaAnalyzeInfo);

            // Try to open the Avs Script
            IAviSynthAudioSourceService audioSourceService = null;

            while (true)
            {
                try
                {
                    using (var avsFile = aviSynthFileService.OpenAviSynthScriptFile(avsScript))
                    {
                        break;
                    }
                }
                catch (Exception)
                {
                    // Check if we already tried again
                    if (audioSourceService != null)
                    {
                        throw;
                    }

                    // In case it fails, try to create audio AviSynth script with the DirectShowSource
                    audioSourceService = ServiceFactory.GetService <AviSynthDirectShowAudioSourceService>();

                    avsScript = aviSynthScriptService.CreateAviSynthAudioScript(mediaAnalyzeInfo, audioSourceService: audioSourceService);

                    continue;
                }
            }

            // Determine the output filename
            outputFileName = $"{mediaAnalyzeInfo.Filename}.reencode.{settings.FileExtension}".GetNewFileName();

            // Open the AviSynth Script to generate the timecodes
            using (var avsFile = aviSynthFileService.OpenAviSynthScriptFile(avsScript))
            {
                // Check for audio existence
                if (avsFile.Clip.AudioSamplesCount == 0)
                {
                    throw new ApplicationException("Can't find audio stream!");
                }

                // Calculate Size in Bytes
                long totalSizeInBytes = avsFile.Clip.AudioSamplesCount * avsFile.Clip.AudioBytesPerSample * avsFile.Clip.AudioChannelsCount;

                // Define format type tag
                // 1 for int, 3 for float
                int formatTypeTag = 1;
                if (avsFile.Clip.AudioSampleType == AvsAudioSampleType.FLOAT)
                {
                    formatTypeTag = 3;
                }

                using (var process = new Process())
                {
                    // Create the ProcessStartInfo object
                    ProcessStartInfo info = new ProcessStartInfo
                    {
                        // Command line arguments, to be passed to encoder
                        // {0} means output file name
                        // {1} means samplerate in Hz
                        // {2} means bits per sample
                        // {3} means channel count
                        // {4} means samplecount
                        // {5} means size in bytes
                        // {6} means format (1 int, 3 float)
                        // {7} means target bitrate
                        Arguments = string.Format(
                            audioEncoder.ExecutableArguments,
                            outputFileName,
                            avsFile.Clip.AudioSampleRate,
                            avsFile.Clip.AudioBitsPerSample,
                            avsFile.Clip.AudioChannelsCount,
                            avsFile.Clip.AudioSamplesCount,
                            totalSizeInBytes,
                            formatTypeTag,
                            mediaAnalyzeInfo.TargetAudioBitrate
                            ),

                        FileName = audioEncoder.EncoderFileName,

                        UseShellExecute        = false,
                        RedirectStandardInput  = true,
                        RedirectStandardOutput = true,
                        RedirectStandardError  = true,
                        CreateNoWindow         = true
                    };

                    process.StartInfo = info;

                    Debug.WriteLine(info.Arguments);

                    // Start the process
                    process.Start();

                    // TODO: Revisit that
                    //process.PriorityClass = m_processPriority;

                    // Read the Standard output character by character
                    Task.Run(() => process.ReadStreamPerCharacter(true, new Action <Process, string>((p, str) => Debug.WriteLine(str))));

                    // Read the Standard error character by character
                    Task.Run(() => process.ReadStreamPerCharacter(false, new Action <Process, string>((p, str) => Debug.WriteLine(str))));

                    try
                    {
                        using (Stream processInputStream = process.StandardInput.BaseStream)
                        {
                            // Check if we need to write WAV Header
                            if (audioEncoder.WriteHeader)
                            {
                                logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Writing header data to encoder's StdIn...");
                                WriteHeader(audioEncoder.HeaderType, processInputStream, avsFile, totalSizeInBytes, settings.ChannelMask, formatTypeTag);
                            }

                            logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Writing PCM data to encoder's StdIn...");

                            // Calculate the frame buffer total size
                            int frameBufferTotalSize = MAX_SAMPLES_PER_ONCE * avsFile.Clip.AudioChannelsCount * avsFile.Clip.AudioBitsPerSample / 8;

                            // Allocate the frame buffer
                            byte[] frameBuffer = new byte[frameBufferTotalSize];

                            // Get the handle for the frame buffer
                            GCHandle bufferHandle = GCHandle.Alloc(frameBuffer, GCHandleType.Pinned);

                            try
                            {
                                // Set a current frame sample indicator
                                int currentFrameSample = 0;

                                // Start passing the audio frames to the encoder's standard input stream
                                while (currentFrameSample < avsFile.Clip.AudioSamplesCount)
                                {
                                    // Check for unexpected process exit
                                    if (process != null && process.HasExited)
                                    {
                                        throw new ApplicationException($"Unexpected encoder termination with exit code: {process.ExitCode}");
                                    }

                                    // Calculate how many frame samples to read
                                    int framesSamplesToRead = Math.Min((int)(avsFile.Clip.AudioSamplesCount - currentFrameSample), MAX_SAMPLES_PER_ONCE);

                                    int bytesRead = framesSamplesToRead * avsFile.Clip.AudioBytesPerSample * avsFile.Clip.AudioChannelsCount;

                                    // Read the audio frame samples and copy them to the frame buffer
                                    avsFile.ReadAudioSamples(bufferHandle.AddrOfPinnedObject(), currentFrameSample, framesSamplesToRead);

                                    // Calculate the current progress
                                    double progress = ((double)currentFrameSample / (double)avsFile.Clip.AudioSamplesCount) * 100.0;
                                    progressAction?.Invoke($"Progress {progress:#0.00}%");

                                    // Write the frame samples to the encoder's standard input stream
                                    processInputStream.Write(frameBuffer, 0, bytesRead);
                                    processInputStream.Flush();

                                    // Advance the current frame sample indicator
                                    currentFrameSample += framesSamplesToRead;

                                    // Signal the OS to run other threads in our time slice
                                    Thread.Yield();
                                }
                            }
                            finally
                            {
                                // Free the frame buffer handle
                                bufferHandle.Free();
                            }
                        }

                        if (process != null)
                        {
                            logAction?.Invoke($"Audio encoding: {mediaAnalyzeInfo.Filename} Finalizing encoder");

                            // Wait for the process to exit
                            process.WaitForExit();

                            // Debug write the exit code
                            Debug.WriteLine($"Exit code: {process.ExitCode}");
                        }
                    }
                    finally
                    {
                        // Sanity check for non exited process
                        if (process != null && !process.HasExited)
                        {
                            // Kill the process
                            process.Kill();

                            // Wait for the process to exit
                            process.WaitForExit();

                            // Debug write the exit code
                            Debug.WriteLine($"Exit code: {process.ExitCode}");
                        }
                    }

                    // Return the process exit code
                    return(process?.ExitCode ?? 0);
                }
            }
        }
 public static int GetEncoderModeIndex(this IAudioEncoderSettings settings)
 {
     return(new List <string>(settings.SupportedModes.Split(' ')).FindIndex(m => m == settings.EncoderMode));
 }
Exemplo n.º 10
0
 public AudioEncoder(string path, IAudioEncoderSettings settings)
 {
     m_settings = settings;
 }