Beispiel #1
0
        /// <summary>
        /// Configure the Opus encoder filter
        /// after the encoder is in the graph and connected
        /// to the source.
        ///
        /// Warning: This might not work for a DVCaptureGraph.
        /// </summary>
        /// <param name="cg"></param>
        /// <param name="j"></param>
        public override void PostConnectConfig(Dictionary <string, Object> args)
        {
            CaptureGraph       cg   = args["CaptureGraph"] as CaptureGraph;
            IAudioCaptureGraph iacg = cg as IAudioCaptureGraph;

            _AMMediaType[] mts   = Pin.GetMediaTypes(cg.Source.OutputPin);
            bool           mtSet = false;

            foreach (_AMMediaType mt in mts)
            {
                WAVEFORMATEX wfex = (WAVEFORMATEX)MediaType.FormatType.MarshalData(mt);
                if ((wfex.SamplesPerSec == Frequency) && (wfex.Channels == Channels) && (wfex.BitsPerSample == Depth))
                {
                    cg.Source.SetMediaType(mt);
                    mtSet = true;
                    break;
                }
            }

            if (!mtSet)
            {
                throw new ApplicationException("The audio device doesn't support the configured values of SamplesPerSec/Channels/BitsPerSample.");
            }

            IOpusEncoderCtl iOpus = (IOpusEncoderCtl)iacg.AudioCompressor.BaseFilter;

            iOpus.SetSignal(Signal);
            int br = BitRate;

            if (br == 0)
            {
                br = ManualBitRate;
            }
            iOpus.SetBitRate(br);
            iOpus.SetComplexity(Complexity);
            iOpus.SetMaxBandwidth(MaxBandwidth);
            iOpus.SetVbr(VBR);
            iOpus.SetVbrConstraint(VBRConstraint);
            iOpus.SetDtx(DTX);
            iOpus.SetPacketLossPerc(PacketLossPerc);
            iOpus.SetLsbDepth(LSBDepth);
            iOpus.SetForcedChannels(ForcedChannels);
        }
 public frmAudioFormat(CaptureGraph cg)
 {
     InitializeComponent();
     this.captureGraph = cg;
 }
Beispiel #3
0
        /// <summary>
        /// Display statistics for the specified video capture graph.  It might be a DV graph.
        /// </summary>
        /// <param name="vcg"></param>
        internal void UpdateVideoBox(CaptureGraph vcg)
        {
            btnAdvancedVideoSettings.Enabled = true;
            lblVideoInfo.Enabled = true;
            ckPlayVideo.Enabled = true;

            // Update video info about the camera
            _AMMediaType mt;
            object formatBlock;
            vcg.Source.GetMediaType(out mt, out formatBlock);

            string info = null;
            if (formatBlock is VIDEOINFOHEADER) {
                VIDEOINFOHEADER vih = (VIDEOINFOHEADER)formatBlock;
                BITMAPINFOHEADER bmih = vih.BitmapInfo;
                info = string.Format(CultureInfo.CurrentCulture, Strings.ResolutionStatus,
                    bmih.Width, bmih.Height, vih.FrameRate.ToString("F2", CultureInfo.InvariantCulture));
            }
            else if (formatBlock is DVINFO) {
                info = "DV Video";
                DVCaptureGraph dvcg = vcg as DVCaptureGraph;
                if (dvcg != null) {
                    dvcg.GetVideoMediaType(out mt, out formatBlock);
                    if (formatBlock is VIDEOINFOHEADER) {
                        VIDEOINFOHEADER vih = (VIDEOINFOHEADER)formatBlock;
                        BITMAPINFOHEADER bmih = vih.BitmapInfo;
                        info = string.Format(CultureInfo.CurrentCulture, Strings.ResolutionStatus,
                            bmih.Width, bmih.Height, vih.FrameRate.ToString("F2", CultureInfo.InvariantCulture));
                    }
                }
            }

            if(vcg.Compressor == null)
            {
                info += string.Format(CultureInfo.CurrentCulture, "\r\n" + Strings.CompressorDisabled);
            }
            else
            {
                if (vcg is VideoCaptureGraph) {
                    info += string.Format(CultureInfo.CurrentCulture, "\r\n" + Strings.CompressedBitRate,
                        ((VideoCaptureGraph)vcg).VideoCompressor.QualityInfo.BitRate / 1000);
                }
                else if (vcg is DVCaptureGraph) {
                    if (((DVCaptureGraph)vcg).VideoCompressor == null) {
                        info += string.Format(CultureInfo.CurrentCulture, "\r\n" + Strings.CompressorDisabled);
                    }
                    else {
                        info += string.Format(CultureInfo.CurrentCulture, "\r\n" + Strings.CompressedBitRate,
                            ((DVCaptureGraph)vcg).VideoCompressor.QualityInfo.BitRate / 1000);
                    }
                }
            }

            lblVideoInfo.Text = info;
        }
Beispiel #4
0
        public void RenderAndRunVideo(CaptureGraph vcg, bool playIt)
        {
            if(playIt)
            {
                Log("Playing video (render and run graph) - " + vcg.Source.FriendlyName);

                vcg.RenderLocal();

                if (vcg is VideoCaptureGraph) {
                    // This is not working for DV graphs but doesn't seem critical in this context
                    VideoCapability.DisableDXVA(vcg.FilgraphManager);
                }

                // Set device name in the video window and turn off the system menu
                IVideoWindow iVW = (IVideoWindow)vcg.FilgraphManager;
                iVW.Caption = vcg.Source.FriendlyName;
                iVW.WindowStyle &= ~0x00080000; // WS_SYSMENU

                vcg.Run();
            }
            else
            {
                Log("Stop video (stop and unrender graph) - " + vcg.Source.FriendlyName);

                vcg.Stop();
                vcg.RemoveRenderer(MSR.LST.Net.Rtp.PayloadType.dynamicVideo);

                // I have no idea why the video window stays up but this fixes it
                GC.Collect();
            }

            Log(FilterGraph.Debug(vcg.IFilterGraph));
        }
Beispiel #5
0
 public void RenderAndRunVideo(CaptureGraph vcg)
 {
     RenderAndRunVideo(vcg, ckPlayVideo.Checked);
 }
Beispiel #6
0
        public void RenderAndRunAudio(CaptureGraph cg, bool playIt)
        {
            if(cg == null)
            {
                throw new ArgumentNullException(Strings.CantRenderAudioGraph);
            }

            if(playIt)
            {
                Log("Playing audio (render and run graph) - " + cg.Source.FriendlyName);

                // Re-add the renderer in case they changed it since the last
                // time they played the audio
                cg.AddAudioRenderer((FilterInfo)cboSpeakers.SelectedItem);
                cg.Run();
            }
            else
            {
                Log("Stop audio (stop and unrender graph) - " + cg.Source.FriendlyName);

                cg.Stop();
                cg.RemoveRenderer(MSR.LST.Net.Rtp.PayloadType.dynamicAudio);
            }

            Log(FilterGraph.Debug(cg.IFilterGraph));
        }
Beispiel #7
0
 public void RenderAndRunAudio(CaptureGraph cg)
 {
     RenderAndRunAudio(cg, ckPlayAudio.Checked);
 }