示例#1
0
        public AudioDevice(string deviceName, int numChannels, int samplingRate, int bitsPerSample)
        {
            if (cfgAudDec != IntPtr.Zero || audDec != IntPtr.Zero || aTxfDec != IntPtr.Zero)
            {
                return;
            }

            //allocate the data and config context
            cfgAudDec = MixCastAV.getAudioDecodeCfg(deviceName, numChannels, samplingRate, bitsPerSample,
                                                    AUDIO_NUMCHANNELS, AUDIO_SAMPLINGRATE, AUDIO_BITSPERSAMPLE);

            //setup the audio decode codec
            audDec = MixCastAV.getVideoDecodeContext(cfgAudDec);

            //setup the audio transformer for decode
            aTxfDec = MixCastAV.getVideoTransformContext(cfgAudDec);

            if (cfgAudDec != IntPtr.Zero && audDec != IntPtr.Zero && aTxfDec != IntPtr.Zero)
            {
                Debug.Log("Started Audio Device");
            }
            else
            {
                return;
            }

            //udatasize = LibAvStuff.getCfgOutputDataSize(cfgAudDec);

            //create the audio asynchronous interface
            //audAsyncDec = LibAvStuff.createAudioDecodeAsync(audDec, cfgAudDec, aTxfDec);
        }
        /// <summary>
        ///     WebcamFeedLibAv(StringBuilder deviceName, int w, int h, int fps, StringBuilder pixFmtStr, bool forceRGB = true)
        ///     Build the decoder with the info supplied in the parameters. This is non-threaded.
        /// </summary>
        /// <param name="deviceName">The device name which supports alternative hardware name</param>
        /// <param name="w">The requested width of the device and also the width of the texture</param>
        /// <param name="h">The requested height of the device and also the height of the texture</param>
        /// <param name="fps">The requested framerate of device</param>
        /// <param name="pixFmtStr">The requested pixel format of the supported device</param>
        /// <param name="forceRGB">Toggle Forced RGB24 for texture</param>
        public DirectShowInputFeedStream(string deviceName, int w, int h, int fps, string pixFmtStr, int rtBufSize, int forceRGB = MixCastAV.FORCE_RGBA)
        {
            width  = w;
            height = h;

            if (cfgVidDec != IntPtr.Zero || vidDec != IntPtr.Zero || vidTxfDec != IntPtr.Zero)
            {
                return;
            }

            cfgVidDec = MixCastAV.getVideoDecodeCfg(deviceName, w, h, fps, pixFmtStr, w, h, fps, MixCastAV.texturePixelFormat, 1);

            if (vidDec == IntPtr.Zero)
            {
                vidDec = MixCastAV.getVideoDecodeContext(cfgVidDec);
            }

            if (vidDec == IntPtr.Zero)
            {
                Texture = null;
                return;
            }

            vidTxfDec = MixCastAV.getVideoTransformContext(cfgVidDec);

            if (vidTxfDec != IntPtr.Zero && vidDec != IntPtr.Zero && cfgVidDec != IntPtr.Zero)
            {
                Debug.Log("Started Device Feed");
            }
            else
            {
                Texture = null;
                return;
            }

            udatasize = MixCastAV.getCfgOutputDataSize(cfgVidDec);

            //initialize the texture format
            Texture          = new Texture2D(w, h, TextureFormat.RGBA32, false, true);
            Texture.wrapMode = TextureWrapMode.Clamp;

            // Initialize decoder camera thread
            decoderInterface = MixCastAV.CreateDecodeInterface(vidDec, cfgVidDec, vidTxfDec);
            MixCastAV.SetDecodeInterfaceTexture(decoderInterface, Texture.GetNativeTexturePtr());
        }