Example #1
0
 public void Should_throw_if_no_implementation_specified()
 {
     target = new ComplexFilter(
         new WaveFormat(),
         Substitute.For <IWindowFunction>(),
         null);
 }
Example #2
0
        private void BuildAudioInChain(WaveFormat monoFormat)
        {
            filter = new ComplexFilter(
                monoFormat,
                new BlackmanHarrisWindowFunction(),
                new FirFilter());

            filter.Filters.Add(new DigitalFilter
            {
                FilterFunction       = new LowPassFilterFunction(),
                LowerCutOffFrequency = 10000f
            });

            filterNode = new MonoSignalNode(monoFormat, filter.FilterImplementation);

            fourier = new FourierTransform(
                new FastFourierTransformProvider(),
                new BlackmanHarrisWindowFunction(),
                2048);
            fourierNode        = new MonoSignalNode(monoFormat, fourier);
            fourier.DataReady += fourierControl.fourier_DataReady;

            flanger     = new Flanger(new SineWave());
            flangerNode = new MonoSignalNode(monoFormat, flanger);

            flangerNode.CentreIn.Source = asioInput.Sources.First();
            //flangerNode.CentreIn.Source = waveCard.Inputs.First();
            filterNode.CentreIn.Source           = flangerNode.CentreOut;
            fourierNode.CentreIn.Source          = filterNode.CentreOut;
            asioOutput.Sinks.ElementAt(0).Source = filterNode.CentreOut;
            asioOutput.Sinks.ElementAt(1).Source = filterNode.CentreOut;
            //waveCard.Outputs.ElementAt(0).Source = filterNode.CentreOut;
            //waveCard.Outputs.ElementAt(1).Source = filterNode.CentreOut;
        }
Example #3
0
 public void Should_throw_if_no_format_specified()
 {
     target = new ComplexFilter(
         null,
         Substitute.For <IWindowFunction>(),
         Substitute.For <IFilterImplementation>());
 }
Example #4
0
 public unsafe ComplexFilter(Complex[] kernel)
     : base(ComplexFilter.GetFFTSize(kernel.Length))
 {
     this._actualKernelLength = kernel.Length;
     this._kernelBuffer       = UnsafeBuffer.Create(base.FFTSize, sizeof(Complex));
     this._kernelPtr          = (Complex *)(void *)this._kernelBuffer;
     this.SetKernel(kernel);
 }
Example #5
0
        public void Should_throw_if_window_function_returns_the_wrong_number_of_coefficients()
        {
            windowFunction.CalculateCoefficients(Arg.Any <int>())
            .Returns(new float[] { });

            var target = new ComplexFilter(
                new WaveFormat(),
                windowFunction,
                Substitute.For <IFilterImplementation>());
        }
Example #6
0
        public void TestInitialise()
        {
            format         = new WaveFormat(44100, 2);
            windowFunction = Substitute.For <IWindowFunction>();
            implementation = Substitute.For <IFilterImplementation>();

            windowFunction
            .CalculateCoefficients(Arg.Any <int>())
            .Returns(new float[21]);

            target = new ComplexFilter(format, windowFunction, implementation);
        }
Example #7
0
        public void Should_update_implementation_coefficients_when_filter_collection_changes()
        {
            var windowFunctionCoefficients = new[]
            {
                1f, 2f, 3f, 4f, 5f,
                1f, 2f, 3f, 4f, 5f,
                1f, 2f, 3f, 4f, 5f,
                1f, 2f, 3f, 4f, 5f,
                1f
            };

            var filterFunctionCoefficients = new[]
            {
                7f, 5f, 2f, 7f, 5f,
                2f, 7f, 5f, 2f, 7f,
                5f, 2f, 7f, 5f, 2f,
                7f, 5f, 2f, 7f, 5f,
                2f
            };

            var expected = new[]
            {
                // Product of filter and window functions
                7f, 10f, 6f, 28f, 25f,
                2f, 14f, 15f, 8f, 35f,
                5f, 4f, 21f, 20f, 10f,
                7f, 10f, 6f, 28f, 25f,
                2f
            };

            windowFunction.CalculateCoefficients(Arg.Any <int>())
            .Returns(windowFunctionCoefficients);

            var filter = Substitute.For <IDigitalFilter>();

            filter
            .CalculateCoefficients(Arg.Is <int>(20), Arg.Is <int>(44100))
            .Returns(filterFunctionCoefficients);

            target = new ComplexFilter(
                new WaveFormat(44100, 2),
                windowFunction,
                implementation);

            target.Filters.Add(filter);

            CollectionAssert.AreEqual(expected, implementation.Coefficients, new FloatComparer());
        }
Example #8
0
        public void Should_apply_window_function_to_coefficients_on_instantiation()
        {
            var expected = new[]
            {
                1f, 2f, 3f, 4f, 5f,
                6f, 7f, 8f, 9f, 10f,
                11f,
                12f, 13f, 14f, 15f, 16f,
                17f, 18f, 19f, 20f, 21f
            };

            windowFunction.CalculateCoefficients(Arg.Is <int>(20)).Returns(expected);

            target = new ComplexFilter(new WaveFormat(), windowFunction, implementation);

            CollectionAssert.AreEqual(expected, implementation.Coefficients, new FloatComparer());
        }
Example #9
0
 /// <summary>
 /// Remove a <see cref="SpatialFilter"/> from this source.
 /// </summary>
 public void RemoveFilter(Filter target)
 {
     if (SpatialFilter == target)
     {
         SpatialFilter = null;
     }
     else
     {
         ComplexFilter complex = (ComplexFilter)SpatialFilter;
         if (complex.Filters.Count == 1 && complex.Filters[0] == target)
         {
             SpatialFilter = null;
         }
         else
         {
             complex.Filters.Remove(target);
         }
     }
 }
Example #10
0
        public void Process(Complex *buffer, int length)
        {
            if (!_spectrumAnalyzer.Visible)
            {
                return;
            }

            if (_displayBeforeFilter)
            {
                if (_iqStream.Length < length * 4)
                {
                    _iqStream.Write(buffer, length);
                }
            }

            if (_enableFilter)
            {
                if (_complexFilter == null)
                {
                    var kernel = FilterBuilder.MakeComplexKernel(_sampleRate, DefaultFilterOrder, _filterbandwidth, _filterOffset, WindowType.BlackmanHarris7);
                    _complexFilter = new ComplexFilter(kernel);
                }
                else if (_updateFilter)
                {
                    var kernel = FilterBuilder.MakeComplexKernel(_sampleRate, DefaultFilterOrder, _filterbandwidth, _filterOffset, WindowType.BlackmanHarris7);
                    _complexFilter.SetKernel(kernel);
                    _updateFilter = false;
                }

                _complexFilter.Process(buffer, length);
            }

            if (!_displayBeforeFilter)
            {
                if (_iqStream.Length < length * 4)
                {
                    _iqStream.Write(buffer, length);
                }
            }
        }
        /// <summary>
        /// This is an example of how to override the MarkThree.Windows.Controls.Report class.
        /// </summary>
        public ReportEquityWorkingOrder()
        {
            // All records in the presentation layer of the report require a unique identifier.  When the report is updated, this
            // identifier is used to map the data to an existing record or to create a new one.  The starting point for the report
            // is the header record which uses this identifier.  The rest of the records in the report will generally use the
            // source DataRow as the unique identifier.
            this.guid = Guid.NewGuid();

            this.reportId = Guid.Empty;

            // These objects are required for sorting, filtering and ordering the report.
            this.prefilter = new ComplexFilter <WorkingOrderRow>();
            this.prefilter.Add(this.FilterBlotters);
            this.filter   = new ComplexFilter <Schema.EquityWorkingOrder.WorkingOrder>();
            this.comparer = new ComplexComparer <Schema.EquityWorkingOrder.WorkingOrder>();
            this.comparer.Add(new Schema.EquityWorkingOrder.MarketValueComparer(), SortOrder.Descending);

            // This is the list of all the blotters on display in this report.  A single blotter can be displayed or several may be
            // aggregated.  The blotter list is used by the 'prefilter' to determine which WorkingOrder rows from the data model
            // should be transformed into the presentation layer objects.
            this.setBlotterFilterHandler = new SetBlotterFilterHandler(OnSetBlotterFilter);
            this.blotterList             = new List <Guid>();

            // This is needed to satisfy the compiler.  In practice, this value is loaded from the user settings and defaulted
            // through the same mechanism.
            this.AnimationSpeed = AnimationSpeed.Off;

            this.CommandBindings.Add(new CommandBinding(FluidTradeCommands.SortReport, OnSortReport));

            // These handlers will update the middle tier in response to changes in the report.
            this.AddHandler(ToggleButton.CheckedEvent, new RoutedEventHandler(OnToggleButtonChange));
            this.AddHandler(ToggleButton.UncheckedEvent, new RoutedEventHandler(OnToggleButtonChange));
            this.AddHandler(Selector.SelectionChangedEvent, new RoutedEventHandler(OnSelectorSelectionChanged));
            this.AddHandler(TextBox.TextChangedEvent, new RoutedEventHandler(OnTextChanged));
            this.AddHandler(TextBox.LostFocusEvent, new RoutedEventHandler(OnTextBoxLostFocus));

            // These handlers take care of installing and uninstalling this window in the data model update events.
            this.Loaded   += new RoutedEventHandler(OnLoaded);
            this.Unloaded += new RoutedEventHandler(OnUnloaded);
        }
        /// <summary>
        /// This is an example of how to override the MarkThree.Windows.Controls.Report class.
        /// </summary>
        public ReportDebtNegotiatorPaymentSummary()
        {
            // All records in the presentation layer of the report require a unique identifier.  When the report is updated, this
            // identifier is used to map the data to an existing record or to create a new one.  The starting point for the report
            // is the header record which uses this identifier.  The rest of the records in the report will generally use the
            // source DataRow as the unique identifier.
            this.guid = Guid.NewGuid();

            this.reportId = Guid.Empty;

            // These objects are required for sorting, filtering and ordering the report.
            this.prefilter = new ComplexFilter <ConsumerTrustPaymentRow>();
            this.prefilter.Add(this.FilterBlotters);
            this.filter   = new ComplexFilter <Schema.DebtNegotiatorPaymentSummary.PaymentSummary>();
            this.comparer = new ComplexComparer <Schema.DebtNegotiatorPaymentSummary.PaymentSummary>();
            this.comparer.Add(new Schema.DebtNegotiatorPaymentSummary.EffectivePaymentDateComparer(), SortOrder.Ascending);

            // This is needed to satisfy the compiler.  In practice, this value is loaded from the user settings and defaulted
            // through the same mechanism.
            this.AnimationSpeed = AnimationSpeed.Off;

            this.CommandBindings.Add(new CommandBinding(FluidTradeCommands.SortReport, OnSortReport));

            // These handlers will update the middle tier in response to changes in the report.
            this.AddHandler(FluidTrade.Actipro.DateTimePicker.DateTimeChangedEvent, new RoutedEventHandler(OnDateTimePicker));
            this.AddHandler(FluidTrade.Guardian.Windows.Controls.StatusComboBox.PersistentSelectedValueChangedEvent, new RoutedEventHandler(OnComboBoxChange));
            this.AddHandler(ToggleButton.CheckedEvent, new RoutedEventHandler(OnToggleButtonChange));
            this.AddHandler(ToggleButton.UncheckedEvent, new RoutedEventHandler(OnToggleButtonChange));
            this.AddHandler(TextBox.TextChangedEvent, new RoutedEventHandler(OnTextChanged));
            this.AddHandler(TextBox.LostFocusEvent, new RoutedEventHandler(OnTextBoxLostFocus));
            this.AddHandler(ReportGrid.ShowToolTipEvent, new ReportGridtToolTipEventHandler(OnShowToolTipHandler));


            // These handlers take care of installing and uninstalling this window in the data model update events.
            this.Loaded   += new RoutedEventHandler(OnLoaded);
            this.Unloaded += new RoutedEventHandler(OnUnloaded);
        }
Example #13
0
        private void BuildSineWaveChain(WaveFormat stereoFormat)
        {
            sineWave     = new SineWave();
            sineWaveNode = new StereoSignalNode(stereoFormat, sineWave);

            sineWaveNode.LeftIn.Source  = asioInput.Sources.ElementAt(0);
            sineWaveNode.RightIn.Source = asioInput.Sources.ElementAt(0);

            filter = new ComplexFilter(
                stereoFormat,
                new BlackmanHarrisWindowFunction(),
                new FirFilter());

            filter.Filters.Add(new DigitalFilter
            {
                FilterFunction       = new BandStopFilterFunction(),
                LowerCutOffFrequency = 10000f,
                UpperCutOffFrequency = 12000f
            });

            stereoFilterNode = new StereoSignalNode(stereoFormat, filter.FilterImplementation);

            stereoFilterNode.LeftIn.Source  = sineWaveNode.LeftOut;
            stereoFilterNode.RightIn.Source = sineWaveNode.RightOut;

            fourier = new FourierTransform(
                new FastFourierTransformProvider(),
                new BlackmanHarrisWindowFunction(),
                2048);
            fourierNode        = new MonoSignalNode(stereoFormat, fourier);
            fourier.DataReady += fourierControl.fourier_DataReady;

            fourierNode.CentreIn.Source = stereoFilterNode.LeftOut;

            asioOutput.Sinks.ElementAt(0).Source = stereoFilterNode.LeftOut;
            asioOutput.Sinks.ElementAt(1).Source = stereoFilterNode.RightOut;
        }
Example #14
0
    public FFmpegPipeline Build(FFmpegState ffmpegState, FrameState desiredState)
    {
        var allVideoStreams = _videoInputFile.SelectMany(f => f.VideoStreams).ToList();

        // -sc_threshold 0 is unsupported with mpeg2video
        _pipelineSteps.Add(
            allVideoStreams.All(s => s.Codec != VideoFormat.Mpeg2Video) &&
            desiredState.VideoFormat != VideoFormat.Mpeg2Video
                ? new NoSceneDetectOutputOption(0)
                : new NoSceneDetectOutputOption(1_000_000_000));

        if (ffmpegState.SaveReport)
        {
            _pipelineSteps.Add(new FFReportVariable(_reportsFolder, None));
        }

        foreach (TimeSpan desiredStart in ffmpegState.Start.Filter(s => s > TimeSpan.Zero))
        {
            var option = new StreamSeekInputOption(desiredStart);
            _audioInputFile.Iter(f => f.AddOption(option));
            _videoInputFile.Iter(f => f.AddOption(option));

            // need to seek text subtitle files
            if (_subtitleInputFile.Map(s => !s.IsImageBased).IfNone(false))
            {
                _pipelineSteps.Add(new StreamSeekFilterOption(desiredStart));
            }
        }

        foreach (TimeSpan desiredFinish in ffmpegState.Finish)
        {
            _pipelineSteps.Add(new TimeLimitOutputOption(desiredFinish));
        }

        foreach (VideoStream videoStream in allVideoStreams)
        {
            bool hasOverlay = _watermarkInputFile.IsSome ||
                              _subtitleInputFile.Map(s => s.IsImageBased && !s.Copy).IfNone(false);

            Option <int> initialFrameRate = Option <int> .None;
            foreach (string frameRateString in videoStream.FrameRate)
            {
                if (int.TryParse(frameRateString, out int parsedFrameRate))
                {
                    initialFrameRate = parsedFrameRate;
                }
            }

            var currentState = new FrameState(
                false, // realtime
                false, // infinite loop
                videoStream.Codec,
                videoStream.PixelFormat,
                videoStream.FrameSize,
                videoStream.FrameSize,
                initialFrameRate,
                Option <int> .None,
                Option <int> .None,
                Option <int> .None,
                false); // deinterlace

            IEncoder encoder;

            if (IsDesiredVideoState(currentState, desiredState))
            {
                encoder = new EncoderCopyVideo();
                _pipelineSteps.Add(encoder);
            }
            else
            {
                Option <IPipelineStep> maybeAccel = AvailableHardwareAccelerationOptions.ForMode(
                    ffmpegState.HardwareAccelerationMode,
                    ffmpegState.VaapiDevice,
                    _logger);

                if (maybeAccel.IsNone)
                {
                    ffmpegState = ffmpegState with
                    {
                        // disable hw accel if we don't match anything
                        HardwareAccelerationMode = HardwareAccelerationMode.None
                    };
                }

                foreach (IPipelineStep accel in maybeAccel)
                {
                    currentState = accel.NextState(currentState);
                    _pipelineSteps.Add(accel);
                }

                // nvenc requires yuv420p background with yuva420p overlay
                if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc && hasOverlay)
                {
                    desiredState = desiredState with {
                        PixelFormat = new PixelFormatYuv420P()
                    };
                }

                // qsv should stay nv12
                if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Qsv && hasOverlay)
                {
                    IPixelFormat pixelFormat = desiredState.PixelFormat.IfNone(new PixelFormatYuv420P());
                    desiredState = desiredState with {
                        PixelFormat = new PixelFormatNv12(pixelFormat.Name)
                    };
                }

                foreach (string desiredVaapiDriver in ffmpegState.VaapiDriver)
                {
                    IPipelineStep step = new LibvaDriverNameVariable(desiredVaapiDriver);
                    currentState = step.NextState(currentState);
                    _pipelineSteps.Add(step);
                }

                foreach (IDecoder decoder in AvailableDecoders.ForVideoFormat(
                             ffmpegState,
                             currentState,
                             desiredState,
                             _watermarkInputFile,
                             _subtitleInputFile,
                             _logger))
                {
                    foreach (VideoInputFile videoInputFile in _videoInputFile)
                    {
                        videoInputFile.AddOption(decoder);
                        currentState = decoder.NextState(currentState);
                    }
                }
            }

            if (_subtitleInputFile.Map(s => s.Copy) == Some(true))
            {
                _pipelineSteps.Add(new EncoderCopySubtitle());
            }

            if (videoStream.StillImage)
            {
                var option = new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode);
                _videoInputFile.Iter(f => f.AddOption(option));
            }

            if (!IsDesiredVideoState(currentState, desiredState))
            {
                if (desiredState.Realtime)
                {
                    var option = new RealtimeInputOption();
                    _audioInputFile.Iter(f => f.AddOption(option));
                    _videoInputFile.Iter(f => f.AddOption(option));
                }

                if (desiredState.InfiniteLoop)
                {
                    var option = new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode);
                    _audioInputFile.Iter(f => f.AddOption(option));
                    _videoInputFile.Iter(f => f.AddOption(option));
                }

                foreach (int desiredFrameRate in desiredState.FrameRate)
                {
                    if (currentState.FrameRate != desiredFrameRate)
                    {
                        IPipelineStep step = new FrameRateOutputOption(desiredFrameRate);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                foreach (int desiredTimeScale in desiredState.VideoTrackTimeScale)
                {
                    if (currentState.VideoTrackTimeScale != desiredTimeScale)
                    {
                        IPipelineStep step = new VideoTrackTimescaleOutputOption(desiredTimeScale);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                foreach (int desiredBitrate in desiredState.VideoBitrate)
                {
                    if (currentState.VideoBitrate != desiredBitrate)
                    {
                        IPipelineStep step = new VideoBitrateOutputOption(desiredBitrate);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                foreach (int desiredBufferSize in desiredState.VideoBufferSize)
                {
                    if (currentState.VideoBufferSize != desiredBufferSize)
                    {
                        IPipelineStep step = new VideoBufferSizeOutputOption(desiredBufferSize);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                if (desiredState.Deinterlaced && !currentState.Deinterlaced)
                {
                    IPipelineFilterStep step = AvailableDeinterlaceFilters.ForAcceleration(
                        ffmpegState.HardwareAccelerationMode,
                        currentState,
                        desiredState,
                        _watermarkInputFile,
                        _subtitleInputFile);
                    currentState = step.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(step));
                }

                // TODO: this is a software-only flow, will need to be different for hardware accel
                if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.None)
                {
                    if (currentState.ScaledSize != desiredState.ScaledSize ||
                        currentState.PaddedSize != desiredState.PaddedSize)
                    {
                        IPipelineFilterStep scaleStep = new ScaleFilter(
                            currentState,
                            desiredState.ScaledSize,
                            desiredState.PaddedSize);
                        currentState = scaleStep.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(scaleStep));

                        // TODO: padding might not be needed, can we optimize this out?
                        IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
                        currentState = padStep.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(padStep));

                        IPipelineFilterStep sarStep = new SetSarFilter();
                        currentState = sarStep.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(sarStep));
                    }
                }
                else if (currentState.ScaledSize != desiredState.ScaledSize)
                {
                    IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
                        ffmpegState.HardwareAccelerationMode,
                        currentState,
                        desiredState.ScaledSize,
                        desiredState.PaddedSize);
                    currentState = scaleFilter.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));

                    // TODO: padding might not be needed, can we optimize this out?
                    if (currentState.PaddedSize != desiredState.PaddedSize)
                    {
                        IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
                        currentState = padStep.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(padStep));
                    }

                    IPipelineFilterStep sarStep = new SetSarFilter();
                    currentState = sarStep.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(sarStep));
                }
                else if (currentState.PaddedSize != desiredState.PaddedSize)
                {
                    IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
                        ffmpegState.HardwareAccelerationMode,
                        currentState,
                        desiredState.ScaledSize,
                        desiredState.PaddedSize);
                    currentState = scaleFilter.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));

                    if (currentState.PaddedSize != desiredState.PaddedSize)
                    {
                        IPipelineFilterStep padStep = new PadFilter(currentState, desiredState.PaddedSize);
                        currentState = padStep.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(padStep));
                    }

                    IPipelineFilterStep sarStep = new SetSarFilter();
                    currentState = sarStep.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(sarStep));
                }

                if (hasOverlay && currentState.PixelFormat.Map(pf => pf.FFmpegName) !=
                    desiredState.PixelFormat.Map(pf => pf.FFmpegName))
                {
                    // this should only happen with nvenc?
                    // use scale filter to fix pixel format

                    foreach (IPixelFormat pixelFormat in desiredState.PixelFormat)
                    {
                        if (currentState.FrameDataLocation == FrameDataLocation.Software)
                        {
                            IPipelineFilterStep formatFilter = new PixelFormatFilter(pixelFormat);
                            currentState = formatFilter.NextState(currentState);
                            _videoInputFile.Iter(f => f.FilterSteps.Add(formatFilter));

                            switch (ffmpegState.HardwareAccelerationMode)
                            {
                            case HardwareAccelerationMode.Nvenc:
                                var uploadFilter = new HardwareUploadFilter(ffmpegState);
                                currentState = uploadFilter.NextState(currentState);
                                _videoInputFile.Iter(f => f.FilterSteps.Add(uploadFilter));
                                break;
                            }
                        }
                        else
                        {
                            if (ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.Qsv)
                            {
                                // the filter re-applies the current pixel format, so we have to set it first
                                currentState = currentState with {
                                    PixelFormat = desiredState.PixelFormat
                                };

                                IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
                                    ffmpegState.HardwareAccelerationMode,
                                    currentState,
                                    desiredState.ScaledSize,
                                    desiredState.PaddedSize);
                                currentState = scaleFilter.NextState(currentState);
                                _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
                            }
                        }
                    }
                }

                // nvenc custom logic
                if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc)
                {
                    foreach (VideoInputFile videoInputFile in _videoInputFile)
                    {
                        // if we only deinterlace, we need to set pixel format again (using scale_cuda)
                        bool onlyYadif = videoInputFile.FilterSteps.Count == 1 &&
                                         videoInputFile.FilterSteps.Any(fs => fs is YadifCudaFilter);

                        // if we have no filters and an overlay, we need to set pixel format
                        bool unfilteredWithOverlay = videoInputFile.FilterSteps.Count == 0 && hasOverlay;

                        if (onlyYadif || unfilteredWithOverlay)
                        {
                            // the filter re-applies the current pixel format, so we have to set it first
                            currentState = currentState with {
                                PixelFormat = desiredState.PixelFormat
                            };

                            IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
                                ffmpegState.HardwareAccelerationMode,
                                currentState,
                                desiredState.ScaledSize,
                                desiredState.PaddedSize);
                            currentState = scaleFilter.NextState(currentState);
                            videoInputFile.FilterSteps.Add(scaleFilter);
                        }
                    }
                }

                if (ffmpegState.PtsOffset > 0)
                {
                    foreach (int videoTrackTimeScale in desiredState.VideoTrackTimeScale)
                    {
                        IPipelineStep step = new OutputTsOffsetOption(
                            ffmpegState.PtsOffset,
                            videoTrackTimeScale);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                foreach (IPixelFormat desiredPixelFormat in desiredState.PixelFormat)
                {
                    if (currentState.PixelFormat.Map(pf => pf.FFmpegName) != desiredPixelFormat.FFmpegName)
                    {
                        // qsv doesn't seem to like this
                        if (ffmpegState.HardwareAccelerationMode != HardwareAccelerationMode.Qsv)
                        {
                            IPipelineStep step = new PixelFormatOutputOption(desiredPixelFormat);
                            currentState = step.NextState(currentState);
                            _pipelineSteps.Add(step);
                        }
                    }
                }
            }

            // TODO: if all video filters are software, use software pixel format for hwaccel output
            // might be able to skip scale_cuda=format=whatever,hwdownload,format=whatever

            foreach (AudioInputFile audioInputFile in _audioInputFile)
            {
                // always need to specify audio codec so ffmpeg doesn't default to a codec we don't want
                foreach (IEncoder step in AvailableEncoders.ForAudioFormat(audioInputFile.DesiredState, _logger))
                {
                    currentState = step.NextState(currentState);
                    _pipelineSteps.Add(step);
                }

                foreach (int desiredAudioChannels in audioInputFile.DesiredState.AudioChannels)
                {
                    _pipelineSteps.Add(new AudioChannelsOutputOption(desiredAudioChannels));
                }

                foreach (int desiredBitrate in audioInputFile.DesiredState.AudioBitrate)
                {
                    _pipelineSteps.Add(new AudioBitrateOutputOption(desiredBitrate));
                }

                foreach (int desiredBufferSize in audioInputFile.DesiredState.AudioBufferSize)
                {
                    _pipelineSteps.Add(new AudioBufferSizeOutputOption(desiredBufferSize));
                }

                foreach (int desiredSampleRate in audioInputFile.DesiredState.AudioSampleRate)
                {
                    _pipelineSteps.Add(new AudioSampleRateOutputOption(desiredSampleRate));
                }

                if (audioInputFile.DesiredState.NormalizeLoudness)
                {
                    _audioInputFile.Iter(f => f.FilterSteps.Add(new NormalizeLoudnessFilter()));
                }

                foreach (TimeSpan desiredDuration in audioInputFile.DesiredState.AudioDuration)
                {
                    _audioInputFile.Iter(f => f.FilterSteps.Add(new AudioPadFilter(desiredDuration)));
                }
            }

            foreach (SubtitleInputFile subtitleInputFile in _subtitleInputFile)
            {
                if (subtitleInputFile.IsImageBased)
                {
                    // vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
                    // though videotoolbox uses software decoders, so no need to download for that
                    if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
                    {
                        var downloadFilter = new HardwareDownloadFilter(currentState);
                        currentState = downloadFilter.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
                    }

                    subtitleInputFile.FilterSteps.Add(new SubtitlePixelFormatFilter(ffmpegState));

                    subtitleInputFile.FilterSteps.Add(new SubtitleHardwareUploadFilter(currentState, ffmpegState));
                }
                else
                {
                    _videoInputFile.Iter(f => f.AddOption(new CopyTimestampInputOption()));

                    // text-based subtitles are always added in software, so always try to download the background

                    // nvidia needs some extra format help if the only filter will be the download filter
                    if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Nvenc &&
                        _videoInputFile.Map(f => f.FilterSteps.Count).IfNone(1) == 0)
                    {
                        IPipelineFilterStep scaleFilter = AvailableScaleFilters.ForAcceleration(
                            ffmpegState.HardwareAccelerationMode,
                            currentState,
                            desiredState.ScaledSize,
                            desiredState.PaddedSize);
                        currentState = scaleFilter.NextState(currentState);
                        _videoInputFile.Iter(f => f.FilterSteps.Add(scaleFilter));
                    }

                    var downloadFilter = new HardwareDownloadFilter(currentState);
                    currentState = downloadFilter.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
                }
            }

            foreach (WatermarkInputFile watermarkInputFile in _watermarkInputFile)
            {
                // vaapi and videotoolbox use a software overlay, so we need to ensure the background is already in software
                // though videotoolbox uses software decoders, so no need to download for that
                if (ffmpegState.HardwareAccelerationMode == HardwareAccelerationMode.Vaapi)
                {
                    var downloadFilter = new HardwareDownloadFilter(currentState);
                    currentState = downloadFilter.NextState(currentState);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(downloadFilter));
                }

                watermarkInputFile.FilterSteps.Add(
                    new WatermarkPixelFormatFilter(ffmpegState, watermarkInputFile.DesiredState));

                foreach (VideoStream watermarkStream in watermarkInputFile.VideoStreams)
                {
                    if (watermarkStream.StillImage == false)
                    {
                        watermarkInputFile.AddOption(new DoNotIgnoreLoopInputOption());
                    }
                    else if (watermarkInputFile.DesiredState.MaybeFadePoints.Map(fp => fp.Count > 0).IfNone(false))
                    {
                        // looping is required to fade a static image in and out
                        watermarkInputFile.AddOption(new InfiniteLoopInputOption(ffmpegState.HardwareAccelerationMode));
                    }
                }

                if (watermarkInputFile.DesiredState.Size == WatermarkSize.Scaled)
                {
                    watermarkInputFile.FilterSteps.Add(
                        new WatermarkScaleFilter(watermarkInputFile.DesiredState, currentState.PaddedSize));
                }

                if (watermarkInputFile.DesiredState.Opacity != 100)
                {
                    watermarkInputFile.FilterSteps.Add(new WatermarkOpacityFilter(watermarkInputFile.DesiredState));
                }

                foreach (List <WatermarkFadePoint> fadePoints in watermarkInputFile.DesiredState.MaybeFadePoints)
                {
                    watermarkInputFile.FilterSteps.AddRange(fadePoints.Map(fp => new WatermarkFadeFilter(fp)));
                }

                watermarkInputFile.FilterSteps.Add(new WatermarkHardwareUploadFilter(currentState, ffmpegState));
            }

            // after everything else is done, apply the encoder
            if (_pipelineSteps.OfType <IEncoder>().All(e => e.Kind != StreamKind.Video))
            {
                foreach (IEncoder e in AvailableEncoders.ForVideoFormat(
                             ffmpegState,
                             currentState,
                             desiredState,
                             _watermarkInputFile,
                             _subtitleInputFile,
                             _logger))
                {
                    encoder = e;
                    _pipelineSteps.Add(encoder);
                    _videoInputFile.Iter(f => f.FilterSteps.Add(encoder));
                    currentState = encoder.NextState(currentState);
                }
            }

            if (ffmpegState.DoNotMapMetadata)
            {
                _pipelineSteps.Add(new DoNotMapMetadataOutputOption());
            }

            foreach (string desiredServiceProvider in ffmpegState.MetadataServiceProvider)
            {
                _pipelineSteps.Add(new MetadataServiceProviderOutputOption(desiredServiceProvider));
            }

            foreach (string desiredServiceName in ffmpegState.MetadataServiceName)
            {
                _pipelineSteps.Add(new MetadataServiceNameOutputOption(desiredServiceName));
            }

            foreach (string desiredAudioLanguage in ffmpegState.MetadataAudioLanguage)
            {
                _pipelineSteps.Add(new MetadataAudioLanguageOutputOption(desiredAudioLanguage));
            }

            switch (ffmpegState.OutputFormat)
            {
            case OutputFormatKind.MpegTs:
                _pipelineSteps.Add(new OutputFormatMpegTs());
                _pipelineSteps.Add(new PipeProtocol());
                // currentState = currentState with { OutputFormat = OutputFormatKind.MpegTs };
                break;

            case OutputFormatKind.Hls:
                foreach (string playlistPath in ffmpegState.HlsPlaylistPath)
                {
                    foreach (string segmentTemplate in ffmpegState.HlsSegmentTemplate)
                    {
                        var step = new OutputFormatHls(
                            desiredState,
                            videoStream.FrameRate,
                            segmentTemplate,
                            playlistPath);
                        currentState = step.NextState(currentState);
                        _pipelineSteps.Add(step);
                    }
                }

                break;
            }

            var complexFilter = new ComplexFilter(
                currentState,
                ffmpegState,
                _videoInputFile,
                _audioInputFile,
                _watermarkInputFile,
                _subtitleInputFile,
                currentState.PaddedSize,
                _fontsFolder);

            _pipelineSteps.Add(complexFilter);
        }

        return(new FFmpegPipeline(_pipelineSteps));
    }